diff --git a/.editorconfig b/.editorconfig index 49f34bd03..4a9719c4b 100644 --- a/.editorconfig +++ b/.editorconfig @@ -1,5 +1,38 @@ -; 4-column tab indentation -[*.cs] +# Editor Config for foundationdb-dotnet-client + +# root +root = true + + +# Global defaults +[*] +end_of_line = crlf +insert_final_newline = true indent_style = tab indent_size = 4 +[*.cs] +csharp_indent_block_contents = true +csharp_indent_braces = false +csharp_indent_case_contents = false +csharp_indent_labels = one_less_than_current +csharp_indent_switch_labels = true +csharp_new_line_before_members_in_anonymous_types = true +csharp_new_line_before_members_in_object_initializers = true +csharp_prefer_braces = true +csharp_preserve_single_line_statements = true +csharp_space_after_cast = true +csharp_space_after_comma = true +csharp_space_after_keywords_in_control_flow_statements = true +csharp_space_between_method_call_name_and_opening_parenthesis = false +csharp_style_conditional_delegate_call = true:suggestion +csharp_style_pattern_matching_over_as_with_null_check = true:suggestion +csharp_style_pattern_matching_over_is_with_cast_check = true:suggestion +csharp_style_throw_expression = true:suggestion +dotnet_sort_system_directives_first = true:suggestion +dotnet_style_coalesce_expression = true:suggestion +dotnet_style_explicit_tuple_names = true:warning +dotnet_style_null_propagation = true:suggestion +dotnet_style_qualification_for_method = false:suggestion + +csharp_style_inlined_variable_declaration = true:suggestion diff --git a/.gitignore b/.gitignore index 9623924d5..5f7062a2a 100644 --- a/.gitignore +++ b/.gitignore @@ -27,7 +27,7 @@ _ReSharper*/ *.vssscc $tf*/ -nuget/ +.nuget/ packages/ *.ide/ .vs/ @@ -37,4 +37,10 @@ fdb_c.dll # Build tools and artifacts /build/tools/ /build/output/ +/build/.fake +# JetBrains +.idea + +# OSX +.DS_Store diff --git a/.nuget/NuGet.Config b/.nuget/NuGet.Config deleted file mode 100644 index 67f8ea046..000000000 --- a/.nuget/NuGet.Config +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/.nuget/NuGet.exe b/.nuget/NuGet.exe deleted file mode 100755 index 8dd7e45ae..000000000 Binary files a/.nuget/NuGet.exe and /dev/null differ diff --git a/.nuget/NuGet.targets b/.nuget/NuGet.targets deleted file mode 100644 index 3f8c37b22..000000000 --- a/.nuget/NuGet.targets +++ /dev/null @@ -1,144 +0,0 @@ - - - - $(MSBuildProjectDirectory)\..\ - - - false - - - false - - - true - - - false - - - - - - - - - - - $([System.IO.Path]::Combine($(SolutionDir), ".nuget")) - - - - - $(SolutionDir).nuget - - - - $(MSBuildProjectDirectory)\packages.$(MSBuildProjectName.Replace(' ', '_')).config - $(MSBuildProjectDirectory)\packages.$(MSBuildProjectName).config - - - - $(MSBuildProjectDirectory)\packages.config - $(PackagesProjectConfig) - - - - - $(NuGetToolsPath)\NuGet.exe - @(PackageSource) - - "$(NuGetExePath)" - mono --runtime=v4.0.30319 "$(NuGetExePath)" - - $(TargetDir.Trim('\\')) - - -RequireConsent - -NonInteractive - - "$(SolutionDir) " - "$(SolutionDir)" - - - $(NuGetCommand) install "$(PackagesConfig)" -source "$(PackageSources)" $(NonInteractiveSwitch) $(RequireConsentSwitch) -solutionDir $(PaddedSolutionDir) - $(NuGetCommand) pack "$(ProjectPath)" -Properties "Configuration=$(Configuration);Platform=$(Platform)" $(NonInteractiveSwitch) -OutputDirectory "$(PackageOutputDir)" -symbols - - - - RestorePackages; - $(BuildDependsOn); - - - - - $(BuildDependsOn); - BuildPackage; - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/Common/VersionInfo.cs b/Common/VersionInfo.cs index 5262018c7..8c8f4ef16 100644 --- a/Common/VersionInfo.cs +++ b/Common/VersionInfo.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -27,15 +27,13 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY #endregion using System.Reflection; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; [assembly: AssemblyCompany("Doxense")] [assembly: AssemblyProduct("FoundationDB.Client")] -[assembly: AssemblyCopyright("Copyright Doxense SAS 2013-2015")] +[assembly: AssemblyCopyright("Copyright Doxense SAS 2013-2018")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] -[assembly: AssemblyVersion("0.9.9.0")] //note: Only change this when doing breaking API changes! -[assembly: AssemblyFileVersion("0.9.8.0")] //note: change this to "x.y.revision.build_step" in a build step of your favorite C.I. build server -[assembly: AssemblyInformationalVersion("0.9.9-pre")] //note: this is the version seen by NuGet, add "-alpha"/"-beta"/"-rc1" at the end to create pre-release packages +[assembly: AssemblyVersion("5.1.0.0")] //note: Only change this when doing breaking API changes! +[assembly: AssemblyFileVersion("5.1.0.0")] //note: change this to "x.y.revision.build_step" in a build step of your favorite C.I. build server +[assembly: AssemblyInformationalVersion("0.5.1-alpha1")] //note: this is the version seen by NuGet, add "-alpha"/"-beta"/"-rc1" at the end to create pre-release packages diff --git a/FdbBurner/App.config b/FdbBurner/App.config index 9c05822ff..bae5d6d81 100644 --- a/FdbBurner/App.config +++ b/FdbBurner/App.config @@ -1,6 +1,6 @@ - + - + - \ No newline at end of file + diff --git a/FdbBurner/FdbBurner.csproj b/FdbBurner/FdbBurner.csproj index e2cc1154e..3c13f4ee0 100644 --- a/FdbBurner/FdbBurner.csproj +++ b/FdbBurner/FdbBurner.csproj @@ -9,7 +9,7 @@ Properties FdbBurner FdbBurner - v4.5.1 + v4.6.1 512 true diff --git a/FdbBurner/Program.cs b/FdbBurner/Program.cs index 8e898300f..cc2a77af2 100644 --- a/FdbBurner/Program.cs +++ b/FdbBurner/Program.cs @@ -1,15 +1,15 @@ -using FoundationDB.Client; -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Globalization; -using System.Linq; -using System.Text; -using System.Threading; -using System.Threading.Tasks; - + namespace FdbBurner { + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.Globalization; + using System.Linq; + using System.Text; + using System.Threading; + using System.Threading.Tasks; + using FoundationDB.Client; public class Program { diff --git a/FdbShell/App.config b/FdbShell/App.config index 8e1564635..bae5d6d81 100644 --- a/FdbShell/App.config +++ b/FdbShell/App.config @@ -1,6 +1,6 @@ - + - + - \ No newline at end of file + diff --git a/FdbShell/Commands/BasicCommands.cs b/FdbShell/Commands/BasicCommands.cs index ecbeac4ab..4a340e7d1 100644 --- a/FdbShell/Commands/BasicCommands.cs +++ b/FdbShell/Commands/BasicCommands.cs @@ -1,19 +1,20 @@ -using FoundationDB.Client; -using FoundationDB.Layers.Directories; -using FoundationDB.Layers.Tuples; -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Globalization; -using System.IO; -using System.Linq; -using System.Net; -using System.Text; -using System.Threading; -using System.Threading.Tasks; - + namespace FdbShell { + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.Globalization; + using System.IO; + using System.Linq; + using System.Net; + using System.Text; + using System.Threading; + using System.Threading.Tasks; + using Doxense.Collections.Tuples; + using FoundationDB.Client; + using FoundationDB.Layers.Directories; + public static class BasicCommands { @@ -29,7 +30,7 @@ public static async Task TryOpenCurrentDirectoryAsync(string[] pa { if (path != null && path.Length > 0) { - return await db.Directory.TryOpenAsync(path, cancellationToken: ct); + return await db.Directory.TryOpenAsync(path, ct: ct); } else { @@ -37,11 +38,11 @@ public static async Task TryOpenCurrentDirectoryAsync(string[] pa } } - public static async Task Dir(string[] path, IFdbTuple extras, DirectoryBrowseOptions options, IFdbDatabase db, TextWriter log, CancellationToken ct) + public static async Task Dir(string[] path, ITuple extras, DirectoryBrowseOptions options, IFdbDatabase db, TextWriter log, CancellationToken ct) { if (log == null) log = Console.Out; - log.WriteLine("# Listing {0}:", String.Join("/", path)); + log.WriteLine("# Listing {0}:", string.Join("/", path)); var parent = await TryOpenCurrentDirectoryAsync(path, db, ct); if (parent == null) @@ -52,7 +53,7 @@ public static async Task Dir(string[] path, IFdbTuple extras, DirectoryBrowseOpt if (parent.Layer.IsPresent) { - log.WriteLine("# Layer: {0}", parent.Layer.ToAsciiOrHexaString()); + log.WriteLine("# Layer: {0:P}", parent.Layer); } var folders = await Fdb.Directory.BrowseAsync(db, parent, ct); @@ -69,16 +70,16 @@ public static async Task Dir(string[] path, IFdbTuple extras, DirectoryBrowseOpt if (!(subfolder is FdbDirectoryPartition)) { long count = await Fdb.System.EstimateCountAsync(db, subfolder.Keys.ToRange(), ct); - log.WriteLine(" {0,-12} {1,-12} {3,9:N0} {2}", FdbKey.Dump(FdbSubspace.Copy(subfolder).Key), subfolder.Layer.IsNullOrEmpty ? "-" : ("<" + subfolder.Layer.ToUnicode() + ">"), name, count); + log.WriteLine(" {0,-12} {1,-12} {3,9:N0} {2}", FdbKey.Dump(subfolder.Copy().GetPrefix()), subfolder.Layer.IsNullOrEmpty ? "-" : ("<" + subfolder.Layer.ToUnicode() + ">"), name, count); } else { - log.WriteLine(" {0,-12} {1,-12} {3,9} {2}", FdbKey.Dump(FdbSubspace.Copy(subfolder).Key), subfolder.Layer.IsNullOrEmpty ? "-" : ("<" + subfolder.Layer.ToUnicode() + ">"), name, "-"); + log.WriteLine(" {0,-12} {1,-12} {3,9} {2}", FdbKey.Dump(subfolder.Copy().GetPrefix()), subfolder.Layer.IsNullOrEmpty ? "-" : ("<" + subfolder.Layer.ToUnicode() + ">"), name, "-"); } } else { - log.WriteLine(" {0,-12} {1,-12} {2}", FdbKey.Dump(FdbSubspace.Copy(subfolder).Key), subfolder.Layer.IsNullOrEmpty ? "-" : ("<" + subfolder.Layer.ToUnicode() + ">"), name); + log.WriteLine(" {0,-12} {1,-12} {2}", FdbKey.Dump(subfolder.Copy().GetPrefix()), subfolder.Layer.IsNullOrEmpty ? "-" : ("<" + subfolder.Layer.ToUnicode() + ">"), name); } } else @@ -96,7 +97,7 @@ public static async Task Dir(string[] path, IFdbTuple extras, DirectoryBrowseOpt } /// Creates a new directory - public static async Task CreateDirectory(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) + public static async Task CreateDirectory(string[] path, ITuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { if (log == null) log = Console.Out; @@ -104,33 +105,33 @@ public static async Task CreateDirectory(string[] path, IFdbTuple extras, IFdbDa log.WriteLine("# Creating directory {0} with layer '{1}'", String.Join("/", path), layer); - var folder = await db.Directory.TryOpenAsync(path, cancellationToken: ct); + var folder = await db.Directory.TryOpenAsync(path, ct: ct); if (folder != null) { log.WriteLine("- Directory {0} already exists!", string.Join("/", path)); return; } - folder = await db.Directory.TryCreateAsync(path, Slice.FromString(layer), cancellationToken: ct); - log.WriteLine("- Created under {0} [{1}]", FdbKey.Dump(folder.Key), folder.Key.ToHexaString(' ')); + folder = await db.Directory.TryCreateAsync(path, Slice.FromString(layer), ct: ct); + log.WriteLine("- Created under {0} [{1}]", FdbKey.Dump(folder.GetPrefix()), folder.GetPrefix().ToHexaString(' ')); // look if there is already stuff under there - var stuff = await db.ReadAsync((tr) => tr.GetRange(folder.Keys.ToRange()).FirstOrDefaultAsync(), cancellationToken: ct); + var stuff = await db.ReadAsync((tr) => tr.GetRange(folder.Keys.ToRange()).FirstOrDefaultAsync(), ct: ct); if (stuff.Key.IsPresent) { log.WriteLine("CAUTION: There is already some data under {0} !"); - log.WriteLine(" {0} = {1}", FdbKey.Dump(stuff.Key), stuff.Value.ToAsciiOrHexaString()); + log.WriteLine(" {0} = {1:V}", FdbKey.Dump(stuff.Key), stuff.Value); } } /// Remove a directory and all its data - public static async Task RemoveDirectory(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) + public static async Task RemoveDirectory(string[] path, ITuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { if (log == null) log = Console.Out; string layer = extras.Count > 0 ? extras.Get(0) : null; - var folder = await db.Directory.TryOpenAsync(path, cancellationToken: ct); + var folder = await db.Directory.TryOpenAsync(path, ct: ct); if (folder == null) { log.WriteLine("# Directory {0} does not exist", string.Join("/", path)); @@ -153,16 +154,16 @@ public static async Task RemoveDirectory(string[] path, IFdbTuple extras, IFdbDa } /// Move/Rename a directory - public static async Task MoveDirectory(string[] srcPath, string[] dstPath, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) + public static async Task MoveDirectory(string[] srcPath, string[] dstPath, ITuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { - var folder = await db.Directory.TryOpenAsync(srcPath, cancellationToken: ct); + var folder = await db.Directory.TryOpenAsync(srcPath, ct: ct); if (folder == null) { log.WriteLine("# Source directory {0} does not exist!", string.Join("/", srcPath)); return; } - folder = await db.Directory.TryOpenAsync(dstPath, cancellationToken: ct); + folder = await db.Directory.TryOpenAsync(dstPath, ct: ct); if (folder != null) { log.WriteLine("# Destination directory {0} already exists!", string.Join("/", dstPath)); @@ -173,7 +174,7 @@ public static async Task MoveDirectory(string[] srcPath, string[] dstPath, IFdbT Console.WriteLine("Moved {0} to {1}", string.Join("/", srcPath), string.Join("/", dstPath)); } - public static async Task ShowDirectoryLayer(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) + public static async Task ShowDirectoryLayer(string[] path, ITuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { var dir = await BasicCommands.TryOpenCurrentDirectoryAsync(path, db, ct); if (dir == null) @@ -185,13 +186,13 @@ public static async Task ShowDirectoryLayer(string[] path, IFdbTuple extras, IFd if (dir.Layer == FdbDirectoryPartition.LayerId) log.WriteLine("# Directory {0} is a partition", String.Join("/", path)); else if (dir.Layer.IsPresent) - log.WriteLine("# Directory {0} has layer {1}", String.Join("/", path), dir.Layer.ToAsciiOrHexaString()); + log.WriteLine("# Directory {0} has layer {1:P}", String.Join("/", path), dir.Layer); else log.WriteLine("# Directory {0} does not have a layer defined", String.Join("/", path)); } } - public static async Task ChangeDirectoryLayer(string[] path, string layer, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) + public static async Task ChangeDirectoryLayer(string[] path, string layer, ITuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { var dir = await BasicCommands.TryOpenCurrentDirectoryAsync(path, db, ct); if (dir == null) @@ -201,12 +202,12 @@ public static async Task ChangeDirectoryLayer(string[] path, string layer, IFdbT else { dir = await db.ReadWriteAsync((tr) => dir.ChangeLayerAsync(tr, Slice.FromString(layer)), ct); - log.WriteLine("# Directory {0} layer changed to {1}", String.Join("/", path), dir.Layer.ToAsciiOrHexaString()); + log.WriteLine("# Directory {0} layer changed to {1:P}", String.Join("/", path), dir.Layer); } } /// Counts the number of keys inside a directory - public static async Task Count(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) + public static async Task Count(string[] path, ITuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { // look if there is something under there var folder = (await TryOpenCurrentDirectoryAsync(path, db, ct)) as FdbDirectorySubspace; @@ -216,12 +217,12 @@ public static async Task Count(string[] path, IFdbTuple extras, IFdbDatabase db, return; } - var copy = FdbSubspace.Copy(folder); - log.WriteLine("# Counting keys under {0} ...", FdbKey.Dump(copy.Key)); + var copy = folder.Copy(); + log.WriteLine("# Counting keys under {0} ...", FdbKey.Dump(copy.GetPrefix())); - var progress = new Progress>((state) => + var progress = new Progress<(long Count, Slice Current)>((state) => { - log.Write("\r# Found {0:N0} keys...", state.Item1); + log.Write("\r# Found {0:N0} keys...", state.Count); }); long count = await Fdb.System.EstimateCountAsync(db, copy.ToRange(), progress, ct); @@ -229,7 +230,7 @@ public static async Task Count(string[] path, IFdbTuple extras, IFdbDatabase db, } /// Shows the first few keys of a directory - public static async Task Show(string[] path, IFdbTuple extras, bool reverse, IFdbDatabase db, TextWriter log, CancellationToken ct) + public static async Task Show(string[] path, ITuple extras, bool reverse, IFdbDatabase db, TextWriter log, CancellationToken ct) { int count = 20; if (extras.Count > 0) @@ -239,23 +240,23 @@ public static async Task Show(string[] path, IFdbTuple extras, bool reverse, IFd } // look if there is something under there - var folder = await db.Directory.TryOpenAsync(path, cancellationToken: ct); + var folder = await db.Directory.TryOpenAsync(path, ct: ct); if (folder != null) { - log.WriteLine("# Content of {0} [{1}]", FdbKey.Dump(folder.Key), folder.Key.ToHexaString(' ')); + log.WriteLine("# Content of {0} [{1}]", FdbKey.Dump(folder.GetPrefix()), folder.GetPrefix().ToHexaString(' ')); var keys = await db.QueryAsync((tr) => { var query = tr.GetRange(folder.Keys.ToRange()); return reverse ? query.Reverse().Take(count) : query.Take(count + 1); - }, cancellationToken: ct); + }, ct: ct); if (keys.Count > 0) { if (reverse) keys.Reverse(); foreach (var key in keys.Take(count)) { - log.WriteLine("...{0} = {1}", FdbKey.Dump(folder.ExtractKey(key.Key)), key.Value.ToAsciiOrHexaString()); + log.WriteLine("...{0} = {1:V}", FdbKey.Dump(folder.ExtractKey(key.Key)), key.Value); } if (!reverse && keys.Count == count + 1) { @@ -270,14 +271,14 @@ public static async Task Show(string[] path, IFdbTuple extras, bool reverse, IFd } /// Display a tree of a directory's children - public static async Task Tree(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) + public static async Task Tree(string[] path, ITuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { if (log == null) log = Console.Out; log.WriteLine("# Tree of {0}:", String.Join("/", path)); FdbDirectorySubspace root = null; - if (path.Length > 0) root = await db.Directory.TryOpenAsync(path, cancellationToken: ct); + if (path.Length > 0) root = await db.Directory.TryOpenAsync(path, ct: ct); await TreeDirectoryWalk(root, new List(), db, log, ct); @@ -317,7 +318,7 @@ private static async Task TreeDirectoryWalk(FdbDirectorySubspace folder, List { var kvs = await Task.WhenAll( - tr.GetRange(FdbKeyRange.StartsWith(key)).FirstOrDefaultAsync(), - tr.GetRange(FdbKeyRange.StartsWith(key)).LastOrDefaultAsync() + tr.GetRange(KeyRange.StartsWith(key)).FirstOrDefaultAsync(), + tr.GetRange(KeyRange.StartsWith(key)).LastOrDefaultAsync() ); return new { Min = kvs[0].Key, Max = kvs[1].Key }; }, ct); if (bounds.Min.HasValue) { // folder is not empty - shards = await Fdb.System.GetChunksAsync(db, FdbKeyRange.StartsWith(key), ct); + shards = await Fdb.System.GetChunksAsync(db, KeyRange.StartsWith(key), ct); //TODO: we still need to check if the first and last shard really intersect the subspace // we need to check if the shards actually contain data @@ -453,14 +454,14 @@ private static string FormatSize(long size, CultureInfo ci = null) } /// Find the DCs, machines and processes in the cluster - public static async Task Topology(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) + public static async Task Topology(string[] path, ITuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { var coords = await Fdb.System.GetCoordinatorsAsync(db, ct); log.WriteLine("[Cluster] {0}", coords.Id); var servers = await db.QueryAsync(tr => tr .WithReadAccessToSystemKeys() - .GetRange(FdbKeyRange.StartsWith(Fdb.System.ServerList)) + .GetRange(KeyRange.StartsWith(Fdb.System.ServerList)) .Select(kvp => new { // Offsets Size Type Name Description @@ -545,7 +546,7 @@ public static async Task Topology(string[] path, IFdbTuple extras, IFdbDatabase log.WriteLine(); } - public static async Task Shards(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) + public static async Task Shards(string[] path, ITuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { var ranges = await Fdb.System.GetChunksAsync(db, FdbKey.MinValue, FdbKey.MaxValue, ct); Console.WriteLine("Found {0} shards in the whole cluster", ranges.Count); @@ -554,7 +555,7 @@ public static async Task Shards(string[] path, IFdbTuple extras, IFdbDatabase db var folder = (await TryOpenCurrentDirectoryAsync(path, db, ct)) as FdbDirectorySubspace; if (folder != null) { - var r = FdbKeyRange.StartsWith(FdbSubspace.Copy(folder).Key); + var r = KeyRange.StartsWith(folder.Copy().GetPrefix()); Console.WriteLine("Searching for shards that intersect with /{0} ...", String.Join("/", path)); ranges = await Fdb.System.GetChunksAsync(db, r, ct); Console.WriteLine("Found {0} ranges intersecting {1}:", ranges.Count, r); @@ -574,7 +575,7 @@ public static async Task Shards(string[] path, IFdbTuple extras, IFdbDatabase db //TODO: shards that intersect the current directory } - public static async Task Sampling(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) + public static async Task Sampling(string[] path, ITuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { double ratio = 0.1d; bool auto = true; @@ -586,16 +587,16 @@ public static async Task Sampling(string[] path, IFdbTuple extras, IFdbDatabase } var folder = await TryOpenCurrentDirectoryAsync(path, db, ct); - FdbKeyRange span; + KeyRange span; if (folder is FdbDirectorySubspace) { - span = FdbKeyRange.StartsWith(FdbSubspace.Copy(folder as FdbDirectorySubspace).Key); + span = KeyRange.StartsWith((folder as FdbDirectorySubspace).Copy().GetPrefix()); log.WriteLine("Reading list of shards for /{0} under {1} ...", String.Join("/", path), FdbKey.Dump(span.Begin)); } else { log.WriteLine("Reading list of shards for the whole cluster ..."); - span = FdbKeyRange.All; + span = KeyRange.All; } // dump keyServers @@ -603,7 +604,7 @@ public static async Task Sampling(string[] path, IFdbTuple extras, IFdbDatabase log.WriteLine("> Found {0:N0} shard(s)", ranges.Count); // take a sample - var samples = new List(); + var samples = new List(); if (ranges.Count <= 32) { // small enough to scan it all @@ -620,7 +621,7 @@ public static async Task Sampling(string[] path, IFdbTuple extras, IFdbDatabase if (sz < 32) sz = Math.Max(sz, Math.Min(32, ranges.Count)); } - var population = new List(ranges); + var population = new List(ranges); for (int i = 0; i < sz; i++) { int p = rnd.Next(population.Count); @@ -667,8 +668,8 @@ public static async Task Sampling(string[] path, IFdbTuple extras, IFdbDatabase long count = 0; int iter = 0; - var beginSelector = FdbKeySelector.FirstGreaterOrEqual(range.Begin); - var endSelector = FdbKeySelector.FirstGreaterOrEqual(range.End); + var beginSelector = KeySelector.FirstGreaterOrEqual(range.Begin); + var endSelector = KeySelector.FirstGreaterOrEqual(range.End); while (true) { FdbRangeChunk data = default(FdbRangeChunk); @@ -707,7 +708,7 @@ public static async Task Sampling(string[] path, IFdbTuple extras, IFdbDatabase if (!data.HasMore) break; - beginSelector = FdbKeySelector.FirstGreaterThan(data.Last.Key); + beginSelector = KeySelector.FirstGreaterThan(data.Last.Key); ++iter; } diff --git a/FdbShell/Commands/FdbCliCommands.cs b/FdbShell/Commands/FdbCliCommands.cs index eab5d23bb..a62fbda25 100644 --- a/FdbShell/Commands/FdbCliCommands.cs +++ b/FdbShell/Commands/FdbCliCommands.cs @@ -1,17 +1,13 @@ -using FoundationDB.Client; -using FoundationDB.Layers.Directories; -using FoundationDB.Layers.Tuples; -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.IO; -using System.Linq; -using System.Text; -using System.Threading; -using System.Threading.Tasks; - + namespace FdbShell { + using System; + using System.Diagnostics; + using System.IO; + using System.Text; + using System.Threading; + using System.Threading.Tasks; + public static class FdbCliCommands { diff --git a/FdbShell/FdbShell.csproj b/FdbShell/FdbShell.csproj index c065c6d3d..e626e7b5e 100644 --- a/FdbShell/FdbShell.csproj +++ b/FdbShell/FdbShell.csproj @@ -9,7 +9,7 @@ Properties FdbShell FdbShell - v4.5 + v4.6.1 512 ..\..\W2M\ true @@ -77,13 +77,6 @@ - - - - This project references NuGet package(s) that are missing on this computer. Use NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. - - - - \ No newline at end of file + + diff --git a/FoundationDB.Client/IFdbCluster.cs b/FoundationDB.Client/IFdbCluster.cs index 6c14420fa..3322e1320 100644 --- a/FoundationDB.Client/IFdbCluster.cs +++ b/FoundationDB.Client/IFdbCluster.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -34,10 +34,12 @@ namespace FoundationDB.Client using System.Threading.Tasks; /// Cluster connection context. + [PublicAPI] public interface IFdbCluster : IDisposable { /// Path to the cluster file used by this connection, or null if the default cluster file is being used - string Path { [CanBeNull] get; } + [CanBeNull] + string Path { get; } /// Set an option on this cluster that does not take any parameter /// Option to set @@ -57,10 +59,10 @@ public interface IFdbCluster : IDisposable /// Name of the database. Must be 'DB' (as of Beta 2) /// Subspace of keys that will be accessed. /// If true, the database will only allow read operations. - /// Cancellation Token (optionnal) for the connect operation + /// Cancellation Token (optionnal) for the connect operation /// Task that will return an FdbDatabase, or an exception - Task OpenDatabaseAsync(string databaseName, IFdbSubspace subspace, bool readOnly, CancellationToken cancellationToken); - //REVIEW: we should return an IFdbDatabase instead ! + [ItemNotNull] + Task OpenDatabaseAsync(string databaseName, IKeySubspace subspace, bool readOnly, CancellationToken ct); } } diff --git a/FoundationDB.Client/IFdbDatabase.cs b/FoundationDB.Client/IFdbDatabase.cs index c3eca66c4..5cc046bb0 100644 --- a/FoundationDB.Client/IFdbDatabase.cs +++ b/FoundationDB.Client/IFdbDatabase.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -33,13 +33,16 @@ namespace FoundationDB.Client using System.Threading; /// Database connection context. - public interface IFdbDatabase : IFdbReadOnlyRetryable, IFdbRetryable, IFdbDynamicSubspace, IFdbKey, IDisposable + [PublicAPI] + public interface IFdbDatabase : IFdbRetryable, IDynamicKeySubspace, IDisposable { /// Name of the database - string Name { [NotNull] get; } + [NotNull] + string Name { get; } /// Cluster of the database - IFdbCluster Cluster { [NotNull] get; } + [NotNull] + IFdbCluster Cluster { get; } /// Returns a cancellation token that is linked with the lifetime of this database instance /// The token will be cancelled if the database instance is disposed @@ -47,10 +50,12 @@ public interface IFdbDatabase : IFdbReadOnlyRetryable, IFdbRetryable, IFdbDynami /// Returns the global namespace used by this database instance /// Makes a copy of the subspace tuple, so you should not call this property a lot. Use any of the Partition(..) methods to create a subspace of the database - IFdbDynamicSubspace GlobalSpace { [NotNull] get; } + [NotNull] + IDynamicKeySubspace GlobalSpace { get; } /// Directory partition of this database instance - FdbDatabasePartition Directory { [NotNull] get; } + [NotNull] + FdbDatabasePartition Directory { get; } /// If true, this database instance will only allow starting read-only transactions. bool IsReadOnly { get; } @@ -81,7 +86,7 @@ public interface IFdbDatabase : IFdbReadOnlyRetryable, IFdbRetryable, IFdbDynami /// Start a new transaction on this database, with the specified mode /// Mode of the transaction (read-only, read-write, ....) - /// Optional cancellation token that can abort all pending async operations started by this transaction. + /// Optional cancellation token that can abort all pending async operations started by this transaction. /// Existing parent context, if the transaction needs to be linked with a retry loop, or a parent transaction. If null, will create a new standalone context valid only for this transaction /// New transaction instance that can read from or write to the database. /// You MUST call Dispose() on the transaction when you are done with it. You SHOULD wrap it in a 'using' statement to ensure that it is disposed in all cases. @@ -93,7 +98,7 @@ public interface IFdbDatabase : IFdbReadOnlyRetryable, IFdbRetryable, IFdbDynami /// await tr.CommitAsync(); /// } [NotNull] - IFdbTransaction BeginTransaction(FdbTransactionMode mode, CancellationToken cancellationToken, FdbOperationContext context = null); + IFdbTransaction BeginTransaction(FdbTransactionMode mode, CancellationToken ct, FdbOperationContext context = null); } diff --git a/FoundationDB.Client/IFdbKey.cs b/FoundationDB.Client/IFdbKey.cs deleted file mode 100644 index 1a3f2c904..000000000 --- a/FoundationDB.Client/IFdbKey.cs +++ /dev/null @@ -1,40 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013, Doxense SARL -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Client -{ - using System; - - /// Allows a layer or custom class to be used as keys in a FoundationDB database - public interface IFdbKey - { - /// Returns a binary representation of this instance, to be used as a complete key in the database - Slice ToFoundationDbKey(); - } - -} diff --git a/FoundationDB.Client/IFdbReadOnlyRetryable.cs b/FoundationDB.Client/IFdbReadOnlyRetryable.cs index d025ced09..32e1be41e 100644 --- a/FoundationDB.Client/IFdbReadOnlyRetryable.cs +++ b/FoundationDB.Client/IFdbReadOnlyRetryable.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -34,6 +34,7 @@ namespace FoundationDB.Client using System.Threading.Tasks; /// Transactional context that can execute, inside a retry loop, idempotent actions using read-only transactions. + [PublicAPI] public interface IFdbReadOnlyRetryable { #region Important Note: Differences with Python's @transactional and Java's TransactionContext @@ -64,29 +65,29 @@ public interface IFdbReadOnlyRetryable /// Runs a transactional lambda function inside a read-only transaction, which can be executed more than once if any retryable error occurs. /// Asynchronous handler that will be retried until it succeeds, or a non-recoverable error occurs. - /// Token used to cancel the operation + /// Token used to cancel the operation /// /// Since the handler can run more than once, and that there is no guarantee that the transaction commits once it returns, you MAY NOT mutate any global state (counters, cache, global dictionary) inside this lambda! /// You must wait for the Task to complete successfully before updating the global state of the application. /// - Task ReadAsync([NotNull, InstantHandle] Func asyncHandler, CancellationToken cancellationToken); + Task ReadAsync([NotNull, InstantHandle] Func asyncHandler, CancellationToken ct); /// Runs a transactional lambda function inside a read-only transaction, which can be executed more than once if any retryable error occurs. /// Asynchronous handler that will be retried until it succeeds, or a non-recoverable error occurs. - /// Token used to cancel the operation + /// Token used to cancel the operation /// /// Since the handler can run more than once, and that there is no guarantee that the transaction commits once it returns, you MAY NOT mutate any global state (counters, cache, global dictionary) inside this lambda! /// You must wait for the Task to complete successfully before updating the global state of the application. /// - Task ReadAsync([NotNull, InstantHandle] Func> asyncHandler, CancellationToken cancellationToken); + Task ReadAsync([NotNull, InstantHandle] Func> asyncHandler, CancellationToken ct); //REVIEW: should we keep these ? /// [EXPERIMENTAL] do not use yet!. - Task ReadAsync([NotNull, InstantHandle] Func asyncHandler, [InstantHandle] Action onDone, CancellationToken cancellationToken); + Task ReadAsync([NotNull, InstantHandle] Func asyncHandler, [InstantHandle] Action onDone, CancellationToken ct); /// [EXPERIMENTAL] do not use yet!. - Task ReadAsync([NotNull, InstantHandle] Func> asyncHandler, [InstantHandle] Action onDone, CancellationToken cancellationToken); + Task ReadAsync([NotNull, InstantHandle] Func> asyncHandler, [InstantHandle] Action onDone, CancellationToken ct); } diff --git a/FoundationDB.Client/IFdbReadOnlyTransaction.cs b/FoundationDB.Client/IFdbReadOnlyTransaction.cs index 19644ee81..ccc8c9218 100644 --- a/FoundationDB.Client/IFdbReadOnlyTransaction.cs +++ b/FoundationDB.Client/IFdbReadOnlyTransaction.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -35,6 +35,7 @@ namespace FoundationDB.Client using System.Threading.Tasks; /// Transaction that allows read operations + [PublicAPI] public interface IFdbReadOnlyTransaction : IDisposable { @@ -43,7 +44,8 @@ public interface IFdbReadOnlyTransaction : IDisposable int Id { get; } /// Context of this transaction. - FdbOperationContext Context { [NotNull] get; } + [NotNull] + FdbOperationContext Context { get; } /// Isolation Level of this transaction. FdbIsolationLevel IsolationLevel { get; } @@ -52,7 +54,8 @@ public interface IFdbReadOnlyTransaction : IDisposable bool IsSnapshot { get; } /// Return a Snapshotted version of this transaction, or the transaction itself it is already operating in Snapshot mode. - IFdbReadOnlyTransaction Snapshot { [NotNull] get; } + [NotNull] + IFdbReadOnlyTransaction Snapshot { get; } /// Cancellation Token linked to the life time of the transaction /// Will be triggered if the transaction is aborted or disposed @@ -76,17 +79,19 @@ public interface IFdbReadOnlyTransaction : IDisposable /// Reads several values from the database snapshot represented by the current transaction /// Keys to be looked up in the database /// Task that will return an array of values, or an exception. Each item in the array will contain the value of the key at the same index in , or Slice.Nil if that key does not exist. + [ItemNotNull] Task GetValuesAsync([NotNull] Slice[] keys); /// Resolves a key selector against the keys in the database snapshot represented by the current transaction. /// Key selector to resolve /// Task that will return the key matching the selector, or an exception - Task GetKeyAsync(FdbKeySelector selector); + Task GetKeyAsync(KeySelector selector); /// Resolves several key selectors against the keys in the database snapshot represented by the current transaction. /// Key selectors to resolve /// Task that will return an array of keys matching the selectors, or an exception - Task GetKeysAsync([NotNull] FdbKeySelector[] selectors); + [ItemNotNull] + Task GetKeysAsync([NotNull] KeySelector[] selectors); /// /// Reads all key-value pairs in the database snapshot represented by transaction (potentially limited by Limit, TargetBytes, or Mode) @@ -98,7 +103,7 @@ public interface IFdbReadOnlyTransaction : IDisposable /// Optionnal query options (Limit, TargetBytes, Mode, Reverse, ...) /// If streaming mode is FdbStreamingMode.Iterator, this parameter should start at 1 and be incremented by 1 for each successive call while reading this range. In all other cases it is ignored. /// - Task GetRangeAsync(FdbKeySelector beginInclusive, FdbKeySelector endExclusive, FdbRangeOptions options = null, int iteration = 0); + Task GetRangeAsync(KeySelector beginInclusive, KeySelector endExclusive, FdbRangeOptions options = null, int iteration = 0); /// /// Create a new range query that will read all key-value pairs in the database snapshot represented by the transaction @@ -107,11 +112,12 @@ public interface IFdbReadOnlyTransaction : IDisposable /// key selector defining the end of the range /// Optionnal query options (Limit, TargetBytes, Mode, Reverse, ...) /// Range query that, once executed, will return all the key-value pairs matching the providing selector pair - FdbRangeQuery> GetRange(FdbKeySelector beginInclusive, FdbKeySelector endExclusive, FdbRangeOptions options = null); + FdbRangeQuery> GetRange(KeySelector beginInclusive, KeySelector endExclusive, FdbRangeOptions options = null); /// Returns a list of public network addresses as strings, one for each of the storage servers responsible for storing and its associated value /// Name of the key whose location is to be queried. /// Task that will return an array of strings, or an exception + [ItemNotNull] Task GetAddressesForKeyAsync(Slice key); /// Returns this transaction snapshot read version. diff --git a/FoundationDB.Client/IFdbRetryable.cs b/FoundationDB.Client/IFdbRetryable.cs index c0e4c1dba..5efbcb297 100644 --- a/FoundationDB.Client/IFdbRetryable.cs +++ b/FoundationDB.Client/IFdbRetryable.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -34,64 +34,65 @@ namespace FoundationDB.Client using System.Threading.Tasks; /// Transactional context that can execute, inside a retry loop, idempotent actions using read and/or write transactions. + [PublicAPI] public interface IFdbRetryable : IFdbReadOnlyRetryable { // note: see IFdbReadOnlyRetryable for comments about the differences between the .NET binding and other binding regarding the design of Transactionals /// Run an idempotent transaction block inside a write-only transaction, which can be executed more than once if any retryable error occurs. /// Idempotent handler that should only call write methods on the transation, and may be retried until the transaction commits, or a non-recoverable error occurs. - /// Token used to cancel the operation + /// Token used to cancel the operation /// /// You do not need to commit the transaction inside the handler, it will be done automatically. /// Since the handler can run more than once, and that there is no guarantee that the transaction commits once it returns, you MAY NOT mutate any global state (counters, cache, global dictionary) inside this lambda! /// You must wait for the Task to complete successfully before updating the global state of the application. /// - Task WriteAsync([NotNull, InstantHandle] Action handler, CancellationToken cancellationToken); + Task WriteAsync([NotNull, InstantHandle] Action handler, CancellationToken ct); /// Run an idempotent transactional block inside a write-only transaction, which can be executed more than once if any retryable error occurs. /// Idempotent async handler that will be retried until the transaction commits, or a non-recoverable error occurs. - /// Token used to cancel the operation + /// Token used to cancel the operation /// /// You do not need to commit the transaction inside the handler, it will be done automatically. /// Since the handler can run more than once, and that there is no guarantee that the transaction commits once it returns, you MAY NOT mutate any global state (counters, cache, global dictionary) inside this lambda! /// You must wait for the Task to complete successfully before updating the global state of the application. /// - Task WriteAsync([NotNull, InstantHandle] Func handler, CancellationToken cancellationToken); + Task WriteAsync([NotNull, InstantHandle] Func handler, CancellationToken ct); /// Run an idempotent transactional block inside a read-write transaction, which can be executed more than once if any retryable error occurs. /// Idempotent asynchronous handler that will be retried until the transaction commits, or a non-recoverable error occurs. - /// Token used to cancel the operation + /// Token used to cancel the operation /// /// You do not need to commit the transaction inside the handler, it will be done automatically. /// Since the handler can run more than once, and that there is no guarantee that the transaction commits once it returns, you MAY NOT mutate any global state (counters, cache, global dictionary) inside this lambda! /// You must wait for the Task to complete successfully before updating the global state of the application. /// - Task ReadWriteAsync([NotNull, InstantHandle] Func asyncHandler, CancellationToken cancellationToken); + Task ReadWriteAsync([NotNull, InstantHandle] Func asyncHandler, CancellationToken ct); /// Run an idempotent transactional block that returns a value, inside a read-write transaction, which can be executed more than once if any retryable error occurs. /// Idempotent asynchronous lambda function that will be retried until the transaction commits, or a non-recoverable error occurs. The returned value of the last call will be the result of the operation. - /// Token used to cancel the operation - /// Result of the lambda function if the transaction committed sucessfully. + /// Token used to cancel the operation + /// Result of the lambda function if the transaction committed successfully. /// /// You do not need to commit the transaction inside the handler, it will be done automatically. /// Since the handler can run more than once, and that there is no guarantee that the transaction commits once it returns, you MAY NOT mutate any global state (counters, cache, global dictionary) inside this lambda! /// You must wait for the Task to complete successfully before updating the global state of the application. /// - Task ReadWriteAsync([NotNull, InstantHandle] Func> asyncHandler, CancellationToken cancellationToken); + Task ReadWriteAsync([NotNull, InstantHandle] Func> asyncHandler, CancellationToken ct); //REVIEW: should we keep these ? /// [EXPERIMENTAL] do not use yet!. - Task WriteAsync([NotNull, InstantHandle] Action handler, [NotNull, InstantHandle] Action onDone, CancellationToken cancellationToken); + Task WriteAsync([NotNull, InstantHandle] Action handler, [NotNull, InstantHandle] Action onDone, CancellationToken ct); /// [EXPERIMENTAL] do not use yet!. - Task WriteAsync([NotNull, InstantHandle] Func handler, [NotNull, InstantHandle] Action onDone, CancellationToken cancellationToken); + Task WriteAsync([NotNull, InstantHandle] Func handler, [NotNull, InstantHandle] Action onDone, CancellationToken ct); /// [EXPERIMENTAL] do not use yet!. - Task ReadWriteAsync([NotNull, InstantHandle] Func asyncHandler, [NotNull, InstantHandle] Action onDone, CancellationToken cancellationToken); + Task ReadWriteAsync([NotNull, InstantHandle] Func asyncHandler, [NotNull, InstantHandle] Action onDone, CancellationToken ct); /// [EXPERIMENTAL] do not use yet!. - Task ReadWriteAsync([NotNull, InstantHandle] Func> asyncHandler, [NotNull, InstantHandle] Action onDone, CancellationToken cancellationToken); + Task ReadWriteAsync([NotNull, InstantHandle] Func> asyncHandler, [NotNull, InstantHandle] Action onDone, CancellationToken ct); } } diff --git a/FoundationDB.Client/IFdbTransaction.cs b/FoundationDB.Client/IFdbTransaction.cs index 7e777eeb2..98f684dd9 100644 --- a/FoundationDB.Client/IFdbTransaction.cs +++ b/FoundationDB.Client/IFdbTransaction.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,11 +28,13 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Client { + using JetBrains.Annotations; using System; using System.Threading; using System.Threading.Tasks; /// Transaction that allows read and write operations + [PublicAPI] public interface IFdbTransaction : IFdbReadOnlyTransaction { /// Returns true if this transaction instance only allow read operations @@ -110,14 +112,40 @@ public interface IFdbTransaction : IFdbReadOnlyTransaction /// long GetCommittedVersion(); + /// Returns the which was used by versionstamps operations in this transaction. + /// + /// The Task will be ready only after the successful completion of a call to on this transaction. + /// Read-only transactions do not modify the database when committed and will result in the Task completing with an error. + /// Keep in mind that a transaction which reads keys and then sets them to their current values may be optimized to a read-only transaction. + /// + Task GetVersionStampAsync(); + + /// Return a place-holder 80-bit VersionStamp, whose value is not yet known, but will be filled by the database at commit time. + /// This value can used to generate temporary keys or value, for use with the or mutations + /// + /// The generate placeholder will use a random value that is unique per transaction (and changes at reach retry). + /// If the key contains the exact 80-bit byte signature of this token, the corresponding location will be tagged and replaced with the actual VersionStamp at commit time. + /// If another part of the key contains (by random chance) the same exact byte sequence, then an error will be triggered, and hopefully the transaction will retry with another byte sequence. + /// + VersionStamp CreateVersionStamp(); + + /// Return a place-holder 96-bit VersionStamp with an attached user version, whose value is not yet known, but will be filled by the database at commit time. + /// This value can used to generate temporary keys or value, for use with the or mutations + /// + /// The generate placeholder will use a random value that is unique per transaction (and changes at reach retry). + /// If the key contains the exact 80-bit byte signature of this token, the corresponding location will be tagged and replaced with the actual VersionStamp at commit time. + /// If another part of the key contains (by random chance) the same exact byte sequence, then an error will be triggered, and hopefully the transaction will retry with another byte sequence. + /// + VersionStamp CreateVersionStamp(int userVersion); + /// /// Watch a key for any change in the database. /// /// Key to watch - /// CancellationToken used to abort the watch if the caller doesn't want to wait anymore. Note that you can manually cancel the watch by calling Cancel() on the returned FdbWatch instance + /// CancellationToken used to abort the watch if the caller doesn't want to wait anymore. Note that you can manually cancel the watch by calling Cancel() on the returned FdbWatch instance /// FdbWatch that can be awaited and will complete when the key has changed in the database, or cancellation occurs. You can call Cancel() at any time if you are not interested in watching the key anymore. You MUST always call Dispose() if the watch completes or is cancelled, to ensure that resources are released properly. /// You can directly await an FdbWatch, or obtain a Task<Slice> by reading the property. - FdbWatch Watch(Slice key, CancellationToken cancellationToken); + FdbWatch Watch(Slice key, CancellationToken ct); } diff --git a/FoundationDB.Client/FdbKeyRange.cs b/FoundationDB.Client/KeyRange.cs similarity index 53% rename from FoundationDB.Client/FdbKeyRange.cs rename to FoundationDB.Client/KeyRange.cs index a66c45110..a4c72b0de 100644 --- a/FoundationDB.Client/FdbKeyRange.cs +++ b/FoundationDB.Client/KeyRange.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,254 +28,288 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Client { - using FoundationDB.Client.Utils; using System; + using System.Collections.Generic; using System.Diagnostics; + using System.Runtime.CompilerServices; + using Doxense; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; /// Represents a pair of keys defining the range 'Begin <= key > End' [DebuggerDisplay("Begin={Begin}, End={End}")] - public struct FdbKeyRange : IEquatable, IComparable + public readonly struct KeyRange : IEquatable, IComparable, IEquatable<(Slice Begin, Slice End)>, IComparable<(Slice Begin, Slice End)> { - /// Returns an empty pair of keys - public static FdbKeyRange Empty { get { return default(FdbKeyRange); } } - - /// Returns a range that contains all the keys in the database - public static FdbKeyRange All { get { return new FdbKeyRange(FdbKey.MinValue, FdbKey.MaxValue); } } /// Start of the range - public Slice Begin { get { return m_begin; } } - private Slice m_begin; //PERF: readonly struct + public readonly Slice Begin; /// End of the range - public Slice End { get { return m_end; } } - private Slice m_end; //PERF: readonly struct + public readonly Slice End; - /// - /// Create a new range of keys - /// + /// Create a new range of keys /// Start of range (usually included) /// End of range (usually excluded) - public FdbKeyRange(Slice begin, Slice end) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public KeyRange(Slice begin, Slice end) { - m_begin = begin; - m_end = end; - - Contract.Ensures(m_begin <= m_end, "The range is inverted"); + this.Begin = begin; + this.End = end; + Contract.Ensures(this.Begin <= this.End, "The range is inverted"); } - public FdbKeyRange(IFdbKey begin, IFdbKey end) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static KeyRange Create(Slice a, Slice b) { - if (begin == null) throw new ArgumentNullException("begin"); - if (end == null) throw new ArgumentNullException("end"); - - m_begin = begin.ToFoundationDbKey(); - m_end = end.ToFoundationDbKey(); - - Contract.Ensures(m_begin <= m_end, "The range is inverted"); + return new KeyRange(a, b); } - public static FdbKeyRange Create(Slice a, Slice b) - { - return new FdbKeyRange(a, b); - } + /// Returns an empty pair of keys + public static readonly KeyRange Empty = default(KeyRange); + + /// Returns a range that contains all the keys in the database + public static KeyRange All => new KeyRange(FdbKey.MinValue, FdbKey.MaxValue); /// Create a range that will return all keys starting with : ('prefix' <= k < strinc('prefix')) - /// - /// - public static FdbKeyRange StartsWith(Slice prefix) + [Pure] + public static KeyRange StartsWith(Slice prefix) { - if (prefix.IsNull) throw Fdb.Errors.KeyCannotBeNull("prefix"); + if (prefix.Count == 0) + { + if (prefix.IsNull) throw Fdb.Errors.KeyCannotBeNull(nameof(prefix)); + return new KeyRange(Slice.Empty, FdbKey.MaxValue); + } // prefix => [ prefix, prefix + 1 ) - return new FdbKeyRange( + return new KeyRange( prefix, FdbKey.Increment(prefix) ); } - public static FdbKeyRange StartsWith(TKey prefix) - where TKey : IFdbKey - { - if (prefix == null) throw new ArgumentNullException("prefix"); - return StartsWith(prefix.ToFoundationDbKey()); - } - /// Create a range that selects all keys starting with , but not the prefix itself: ('prefix\x00' <= k < string('prefix') /// Key prefix (that will be excluded from the range) /// Range including all keys with the specified prefix. - public static FdbKeyRange PrefixedBy(Slice prefix) + [Pure] + public static KeyRange PrefixedBy(Slice prefix) { - if (prefix.IsNull) throw Fdb.Errors.KeyCannotBeNull("prefix"); + if (prefix.IsNull) throw Fdb.Errors.KeyCannotBeNull(nameof(prefix)); // prefix => [ prefix."\0", prefix + 1) - return new FdbKeyRange( + return new KeyRange( prefix + FdbKey.MinValue, FdbKey.Increment(prefix) ); } - public static FdbKeyRange PrefixedBy(TKey prefix) - where TKey : IFdbKey - { - if (prefix == null) throw new ArgumentNullException("prefix"); - return PrefixedBy(prefix.ToFoundationDbKey()); - } - /// Create a range that will only return itself ('key' <= k < 'key\x00') /// Key that will be returned by the range /// Range that only return the specified key. - public static FdbKeyRange FromKey(Slice key) + [Pure] + public static KeyRange FromKey(Slice key) { - if (key.IsNull) throw Fdb.Errors.KeyCannotBeNull(); - if (key.Count == 0) { // "" => [ "", "\x00" ) - return new FdbKeyRange(Slice.Empty, FdbKey.MinValue); + if (key.IsNull) throw Fdb.Errors.KeyCannotBeNull(); + return new KeyRange(Slice.Empty, FdbKey.MinValue); } // key => [ key, key + '\0' ) - return new FdbKeyRange( + return new KeyRange( key, key + FdbKey.MinValue ); } - public static FdbKeyRange FromKey(TKey key) - where TKey : IFdbKey + public override bool Equals(object obj) + { + if (obj is KeyRange range) return Equals(range); + if (obj is ValueTuple tuple) return Equals(tuple); + return false; + } + + public override int GetHashCode() { - if (key == null) throw new ArgumentNullException("key"); - return FromKey(key.ToFoundationDbKey()); + return HashCodes.Combine(this.Begin.GetHashCode(), this.End.GetHashCode()); } - public override bool Equals(object obj) + public bool Equals(KeyRange other) { - return (obj is FdbKeyRange) && Equals((FdbKeyRange)obj); + return this.Begin.Equals(other.Begin) && this.End.Equals(other.End); } - public override int GetHashCode() + public bool Equals((Slice Begin, Slice End) other) { - // ReSharper disable once NonReadonlyMemberInGetHashCode - int h1 = m_begin.GetHashCode(); - // ReSharper disable once NonReadonlyMemberInGetHashCode - int h2 = m_end.GetHashCode(); - return ((h1 << 5) + h1) ^ h2; + return this.Begin.Equals(other.Begin) && this.End.Equals(other.End); } - public bool Equals(FdbKeyRange other) + public static bool operator ==(KeyRange left, KeyRange right) { - return m_begin.Equals(other.m_begin) && m_end.Equals(other.m_end); + return left.Begin.Equals(right.Begin) && left.End.Equals(right.End); } - public static bool operator ==(FdbKeyRange left, FdbKeyRange right) + public static bool operator !=(KeyRange left, KeyRange right) { - return left.m_begin.Equals(right.m_begin) && left.m_end.Equals(right.m_end); + return !left.Begin.Equals(right.Begin) || !left.End.Equals(right.End); } - public static bool operator !=(FdbKeyRange left, FdbKeyRange right) + public int CompareTo(KeyRange other) { - return !left.m_begin.Equals(right.m_begin) || !left.m_end.Equals(right.m_end); + int c = this.Begin.CompareTo(other.Begin); + if (c == 0) c = this.End.CompareTo(other.End); + return c; } - public int CompareTo(FdbKeyRange other) + public int CompareTo((Slice Begin, Slice End) other) { - int c = m_begin.CompareTo(other.m_begin); - if (c == 0) c = m_end.CompareTo(other.m_end); + int c = this.Begin.CompareTo(other.Begin); + if (c == 0) c = this.End.CompareTo(other.End); return c; } + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator KeyRange((Slice Begin, Slice End) range) + { + return new KeyRange(range.Begin, range.End); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator (Slice Begin, Slice End)(KeyRange range) + { + return (range.Begin, range.End); + } + /// Combine another range with the current range, to produce a range that includes both (and all keys in between it the ranges are disjoint) /// Range to merge with the current range /// New range where the Begin key is the smallest bound and the End key is the largest bound of both ranges. /// If both range are disjoint, then the resulting range will also contain the keys in between. - public FdbKeyRange Merge(FdbKeyRange other) + [Pure] + public KeyRange Merge(KeyRange other) { - Slice begin = m_begin.CompareTo(other.m_begin) <= 0 ? m_begin : other.m_begin; - Slice end = m_end.CompareTo(other.m_end) >= 0 ? m_end : other.m_end; - return new FdbKeyRange(begin, end); + Slice begin = this.Begin.CompareTo(other.Begin) <= 0 ? this.Begin : other.Begin; + Slice end = this.End.CompareTo(other.End) >= 0 ? this.End : other.End; + return new KeyRange(begin, end); } /// Checks whether the current and the specified range are intersecting (i.e: there exists at at least one key that belongs to both ranges) /// Range that is being checked for interection /// True if the other range intersects the current range. /// Note that ranges [0, 1) and [1, 2) do not intersect, since the end is exclusive by default - public bool Intersects(FdbKeyRange other) + [Pure] + public bool Intersects(KeyRange other) { - int c = m_begin.CompareTo(other.m_begin); + int c = this.Begin.CompareTo(other.Begin); if (c == 0) { // share the same begin key return true; } - else if (c < 0) + if (c < 0) { // after us - return m_end.CompareTo(other.m_begin) > 0; - } - else - { // before us - return m_begin.CompareTo(other.m_end) < 0; + return this.End.CompareTo(other.Begin) > 0; } + // before us + return this.Begin.CompareTo(other.End) < 0; } /// Checks whether the current and the specified range are disjoint (i.e: there exists at least one key between both ranges) /// /// /// Note that ranges [0, 1) and [1, 2) are not disjoint because, even though they do not intersect, they are both contiguous. - public bool Disjoint(FdbKeyRange other) + [Pure] + public bool Disjoint(KeyRange other) { - int c = m_begin.CompareTo(other.m_begin); + int c = this.Begin.CompareTo(other.Begin); if (c == 0) { // share the same begin key return false; } - else if (c < 0) + if (c < 0) { // after us - return m_end.CompareTo(other.m_begin) < 0; - } - else - { // before us - return m_begin.CompareTo(other.m_end) > 0; + return this.End.CompareTo(other.Begin) < 0; } + // before us + return this.Begin.CompareTo(other.End) > 0; } /// Returns true, if the key is contained in the range /// /// + [Pure] public bool Contains(Slice key) { - return key.CompareTo(m_begin) >= 0 && key.CompareTo(m_end) < 0; - } - - public bool Contains(TKey key) - where TKey : IFdbKey - { - if (key == null) throw new ArgumentNullException("key"); - return Contains(key.ToFoundationDbKey()); + return key.CompareTo(this.Begin) >= 0 && key.CompareTo(this.End) < 0; } /// Test if is contained inside the range /// Key that will be compared with the the range's bounds /// If true, the End bound is inclusive, otherwise it is exclusive /// -1 if key is less than the lower bound of the range ( < Begin), +1 if the key is greater or equal to the higher bound of the range ( >= End) or 0 if it is inside the range (Begin <= < End) + [Pure] public int Test(Slice key, bool endIncluded = false) { // note: if the range is empty (Begin = End = Slice.Empty) then it should return 0 - if (m_begin.IsPresent && key.CompareTo(m_begin) < 0) return -1; - if (m_end.IsPresent && key.CompareTo(m_end) >= (endIncluded ? 1 : 0)) return +1; + if (this.Begin.IsPresent && key.CompareTo(this.Begin) < 0) return -1; + if (this.End.IsPresent && key.CompareTo(this.End) >= (endIncluded ? 1 : 0)) return +1; return 0; } - public int Test(TKey key, bool endIncluded = false) - where TKey : IFdbKey - { - if (key == null) throw new ArgumentNullException("key"); - return Test(key.ToFoundationDbKey(), endIncluded); - } - /// Returns a printable version of the range public override string ToString() { - return "{" + FdbKey.PrettyPrint(m_begin, FdbKey.PrettyPrintMode.Begin) + ", " + FdbKey.PrettyPrint(m_end, FdbKey.PrettyPrintMode.End) + "}"; + return "{" + FdbKey.PrettyPrint(this.Begin, FdbKey.PrettyPrintMode.Begin) + ", " + FdbKey.PrettyPrint(this.End, FdbKey.PrettyPrintMode.End) + "}"; } + [DebuggerDisplay("Mode={m_mode}")] + public sealed class Comparer : IComparer, IEqualityComparer + { + private const int BOTH = 0; + private const int BEGIN = 1; + private const int END = 2; + + public static readonly Comparer Default = new Comparer(BOTH); + public static readonly Comparer Begin = new Comparer(BEGIN); + public static readonly Comparer End = new Comparer(END); + + private readonly int m_mode; + + private Comparer(int mode) + { + Contract.Requires(mode >= BOTH && mode <= END); + m_mode = mode; + } + + public int Compare(KeyRange x, KeyRange y) + { + switch (m_mode) + { + case BEGIN: return x.Begin.CompareTo(y.Begin); + case END: return x.End.CompareTo(y.End); + default: return x.CompareTo(y); + } + } + + public bool Equals(KeyRange x, KeyRange y) + { + switch(m_mode) + { + case BEGIN: return x.Begin.Equals(y.Begin); + case END: return x.End.Equals(y.End); + default: return x.Equals(y); + } + } + + public int GetHashCode(KeyRange obj) + { + switch(m_mode) + { + case BEGIN: return obj.Begin.GetHashCode(); + case END: return obj.End.GetHashCode(); + default: return obj.GetHashCode(); + } + } + } + + } } diff --git a/FoundationDB.Client/FdbKeySelector.cs b/FoundationDB.Client/KeySelector.cs similarity index 56% rename from FoundationDB.Client/FdbKeySelector.cs rename to FoundationDB.Client/KeySelector.cs index 4c260c918..c66564373 100644 --- a/FoundationDB.Client/FdbKeySelector.cs +++ b/FoundationDB.Client/KeySelector.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -34,15 +34,12 @@ namespace FoundationDB.Client using JetBrains.Annotations; /// Defines a selector for a key in the database - [DebuggerDisplay("{ToString()}")] - public struct FdbKeySelector : IEquatable + [DebuggerDisplay("{ToString(),nq}")] + public readonly struct KeySelector : IEquatable { - /// Empty key selector - public static readonly FdbKeySelector None = default(FdbKeySelector); /// Key of the selector - public Slice Key { get { return m_key; } } - private Slice m_key; //PERF: readonly struct + public readonly Slice Key; /// If true, the selected key can be equal to . public readonly bool OrEqual; @@ -51,158 +48,120 @@ public struct FdbKeySelector : IEquatable public readonly int Offset; /// Creates a new selector - public FdbKeySelector(Slice key, bool orEqual, int offset) + public KeySelector(Slice key, bool orEqual, int offset) { - m_key = key; + Key = key; this.OrEqual = orEqual; this.Offset = offset; } - /// Creates a new selector - public FdbKeySelector(IFdbKey key, bool orEqual, int offset) - { - if (key == null) throw new ArgumentNullException("key"); - m_key = key.ToFoundationDbKey(); - this.OrEqual = orEqual; - this.Offset = offset; - } - - /// Returns a displayable representation of the key selector - [Pure] - public string PrettyPrint(FdbKey.PrettyPrintMode mode) - { - var sb = new StringBuilder(); - int offset = this.Offset; - if (offset < 1) - { - sb.Append(this.OrEqual ? "lLE{" : "lLT{"); - } - else - { - --offset; - sb.Append(this.OrEqual ? "fGT{" : "fGE{"); - } - sb.Append(FdbKey.PrettyPrint(m_key, mode)); - sb.Append("}"); - - if (offset > 0) - sb.Append(" + ").Append(offset); - else if (offset < 0) - sb.Append(" - ").Append(-offset); - - return sb.ToString(); - } - - /// Converts the value of the current object into its equivalent string representation - public override string ToString() - { - return PrettyPrint(FdbKey.PrettyPrintMode.Single); - } + /// Empty key selector + public static readonly KeySelector None = default(KeySelector); - public bool Equals(FdbKeySelector other) + public bool Equals(KeySelector other) { - return this.Offset == other.Offset && this.OrEqual == other.OrEqual && m_key.Equals(other.m_key); + return this.Offset == other.Offset && this.OrEqual == other.OrEqual && Key.Equals(other.Key); } public override bool Equals(object obj) { - return obj is FdbKeySelector && Equals((FdbKeySelector)obj); + return obj is KeySelector selector && Equals(selector); } public override int GetHashCode() { // ReSharper disable once NonReadonlyMemberInGetHashCode - return m_key.GetHashCode() ^ this.Offset ^ (this.OrEqual ? 0 : -1); + return Key.GetHashCode() ^ this.Offset ^ (this.OrEqual ? 0 : -1); } /// Creates a key selector that will select the last key that is less than - public static FdbKeySelector LastLessThan(Slice key) + public static KeySelector LastLessThan(Slice key) { // #define FDB_KEYSEL_LAST_LESS_THAN(k, l) k, l, 0, 0 - return new FdbKeySelector(key, false, 0); + return new KeySelector(key, false, 0); } /// Creates a key selector that will select the last key that is less than or equal to - public static FdbKeySelector LastLessOrEqual(Slice key) + public static KeySelector LastLessOrEqual(Slice key) { // #define FDB_KEYSEL_LAST_LESS_OR_EQUAL(k, l) k, l, 1, 0 - return new FdbKeySelector(key, true, 0); + return new KeySelector(key, true, 0); } /// Creates a key selector that will select the first key that is greater than - public static FdbKeySelector FirstGreaterThan(Slice key) + public static KeySelector FirstGreaterThan(Slice key) { // #define FDB_KEYSEL_FIRST_GREATER_THAN(k, l) k, l, 1, 1 - return new FdbKeySelector(key, true, 1); + return new KeySelector(key, true, 1); } /// Creates a key selector that will select the first key that is greater than or equal to - public static FdbKeySelector FirstGreaterOrEqual(Slice key) + public static KeySelector FirstGreaterOrEqual(Slice key) { // #define FDB_KEYSEL_FIRST_GREATER_OR_EQUAL(k, l) k, l, 0, 1 - return new FdbKeySelector(key, false, 1); - } - - /// Creates a key selector that will select the last key that is less than - public static FdbKeySelector LastLessThan(TKey key) - where TKey : IFdbKey - { - if (key == null) throw new ArgumentNullException("key"); - return LastLessThan(key.ToFoundationDbKey()); - } - - /// Creates a key selector that will select the last key that is less than or equal to - public static FdbKeySelector LastLessOrEqual(TKey key) - where TKey : IFdbKey - { - if (key == null) throw new ArgumentNullException("key"); - return LastLessOrEqual(key.ToFoundationDbKey()); - } - - /// Creates a key selector that will select the first key that is greater than - public static FdbKeySelector FirstGreaterThan(TKey key) - where TKey : IFdbKey - { - if (key == null) throw new ArgumentNullException("key"); - return FirstGreaterThan(key.ToFoundationDbKey()); - } - - /// Creates a key selector that will select the first key that is greater than or equal to - public static FdbKeySelector FirstGreaterOrEqual(TKey key) - where TKey : IFdbKey - { - if (key == null) throw new ArgumentNullException("key"); - return FirstGreaterOrEqual(key.ToFoundationDbKey()); + return new KeySelector(key, false, 1); } /// Add a value to the selector's offset /// ex: fGE('abc') /// ex: 7 /// fGE('abc')+7 - public static FdbKeySelector operator +(FdbKeySelector selector, int offset) + public static KeySelector operator +(KeySelector selector, int offset) { - return new FdbKeySelector(selector.m_key, selector.OrEqual, selector.Offset + offset); + return new KeySelector(selector.Key, selector.OrEqual, selector.Offset + offset); } /// Substract a value to the selector's offset /// ex: fGE('abc') /// ex: 7 /// fGE('abc')-7 - public static FdbKeySelector operator -(FdbKeySelector selector, int offset) + public static KeySelector operator -(KeySelector selector, int offset) { - return new FdbKeySelector(selector.m_key, selector.OrEqual, selector.Offset - offset); + return new KeySelector(selector.Key, selector.OrEqual, selector.Offset - offset); } - public static bool operator ==(FdbKeySelector left, FdbKeySelector right) + public static bool operator ==(KeySelector left, KeySelector right) { return left.Equals(right); } - public static bool operator !=(FdbKeySelector left, FdbKeySelector right) + public static bool operator !=(KeySelector left, KeySelector right) { return !left.Equals(right); } + /// Converts the value of the current object into its equivalent string representation + public override string ToString() + { + return PrettyPrint(FdbKey.PrettyPrintMode.Single); + } + + /// Returns a displayable representation of the key selector + [Pure] + public string PrettyPrint(FdbKey.PrettyPrintMode mode) + { + var sb = new StringBuilder(); + int offset = this.Offset; + if (offset < 1) + { + sb.Append(this.OrEqual ? "lLE{" : "lLT{"); + } + else + { + --offset; + sb.Append(this.OrEqual ? "fGT{" : "fGE{"); + } + sb.Append(FdbKey.PrettyPrint(Key, mode)); + sb.Append("}"); + + if (offset > 0) + sb.Append(" + ").Append(offset); + else if (offset < 0) + sb.Append(" - ").Append(-offset); + + return sb.ToString(); + } + } } diff --git a/FoundationDB.Client/FdbKeySelectorPair.cs b/FoundationDB.Client/KeySelectorPair.cs similarity index 54% rename from FoundationDB.Client/FdbKeySelectorPair.cs rename to FoundationDB.Client/KeySelectorPair.cs index 0b9b3bb78..68f881de5 100644 --- a/FoundationDB.Client/FdbKeySelectorPair.cs +++ b/FoundationDB.Client/KeySelectorPair.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -33,87 +33,65 @@ namespace FoundationDB.Client /// Represents of pair of key selectors that range 'GetKey(Begin) <= key < GetKey(End)' [DebuggerDisplay("[ToString()]")] - public struct FdbKeySelectorPair + public readonly struct KeySelectorPair { /// Start of the range - public FdbKeySelector Begin { get { return m_begin; } } - private FdbKeySelector m_begin; //PERF: readonly struct + public readonly KeySelector Begin; /// End of the range - public FdbKeySelector End { get { return m_end; } } - private FdbKeySelector m_end; //PERF: readonly struct + public readonly KeySelector End; /// Create a new pair of key selectors /// Selector for key from which to start iterating /// Selector for key where to stop iterating - public FdbKeySelectorPair(FdbKeySelector beginInclusive, FdbKeySelector endExclusive) + public KeySelectorPair(KeySelector beginInclusive, KeySelector endExclusive) { - m_begin = beginInclusive; - m_end = endExclusive; + this.Begin = beginInclusive; + this.End = endExclusive; } /// Factory method for a pair of key selectors - public static FdbKeySelectorPair Create(FdbKeySelector beginInclusive, FdbKeySelector endExclusive) + public static KeySelectorPair Create(KeySelector beginInclusive, KeySelector endExclusive) { - return new FdbKeySelectorPair( + return new KeySelectorPair( beginInclusive, endExclusive ); } /// Create a new pair of key selectors using FIRST_GREATER_OR_EQUAL on both keys - public static FdbKeySelectorPair Create(Slice begin, Slice end) + public static KeySelectorPair Create(Slice begin, Slice end) { - return new FdbKeySelectorPair( - FdbKeySelector.FirstGreaterOrEqual(begin), - FdbKeySelector.FirstGreaterOrEqual(end) + return new KeySelectorPair( + KeySelector.FirstGreaterOrEqual(begin), + KeySelector.FirstGreaterOrEqual(end) ); } /// Create a new pair of key selectors using FIRST_GREATER_OR_EQUAL on both keys - public static FdbKeySelectorPair Create(TKey begin, TKey end) - where TKey : IFdbKey + public static KeySelectorPair Create(KeyRange range) { - if (begin == null) throw new ArgumentNullException("begin"); - if (end == null) throw new ArgumentNullException("end"); - return new FdbKeySelectorPair( - FdbKeySelector.FirstGreaterOrEqual(begin.ToFoundationDbKey()), - FdbKeySelector.FirstGreaterOrEqual(end.ToFoundationDbKey()) - ); - } - - /// Create a new pair of key selectors using FIRST_GREATER_OR_EQUAL on both keys - public static FdbKeySelectorPair Create(FdbKeyRange range) - { - return new FdbKeySelectorPair( - FdbKeySelector.FirstGreaterOrEqual(range.Begin), - FdbKeySelector.FirstGreaterOrEqual(range.End) + return new KeySelectorPair( + KeySelector.FirstGreaterOrEqual(range.Begin), + KeySelector.FirstGreaterOrEqual(range.End) ); } /// Create a new pair of key selectors that will select all the keys that start with the specified prefix - public static FdbKeySelectorPair StartsWith(Slice prefix) + public static KeySelectorPair StartsWith(Slice prefix) { - var range = FdbKeyRange.StartsWith(prefix); + var range = KeyRange.StartsWith(prefix); - return new FdbKeySelectorPair( - FdbKeySelector.FirstGreaterOrEqual(range.Begin), - FdbKeySelector.FirstGreaterOrEqual(range.End) + return new KeySelectorPair( + KeySelector.FirstGreaterOrEqual(range.Begin), + KeySelector.FirstGreaterOrEqual(range.End) ); } - /// Create a new pair of key selectors that will select all the keys that start with the specified prefix - public static FdbKeySelectorPair StartsWith(TKey prefix) - where TKey : IFdbKey - { - if (prefix == null) throw new ArgumentNullException("prefix"); - return StartsWith(prefix.ToFoundationDbKey()); - } - /// Returns a printable version of the pair of key selectors public override string ToString() { - return "[ " + m_begin.PrettyPrint(FdbKey.PrettyPrintMode.Begin) + ", " + m_end.PrettyPrint(FdbKey.PrettyPrintMode.End) + " )"; + return "[ " + this.Begin.PrettyPrint(FdbKey.PrettyPrintMode.Begin) + ", " + this.End.PrettyPrint(FdbKey.PrettyPrintMode.End) + " )"; } } diff --git a/FoundationDB.Client/Layers/Directories/FdbDirectoryExtensions.cs b/FoundationDB.Client/Layers/Directories/FdbDirectoryExtensions.cs index 2aea8bb71..5d08ffb13 100644 --- a/FoundationDB.Client/Layers/Directories/FdbDirectoryExtensions.cs +++ b/FoundationDB.Client/Layers/Directories/FdbDirectoryExtensions.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -45,52 +45,52 @@ public static class FdbDirectoryExtensions /// If the directory does not exist, it is created (creating parent directories if necessary). /// If layer is specified, it is checked against the layer of an existing directory or set as the layer of a new directory. /// - public static Task CreateOrOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable path, CancellationToken cancellationToken) + public static Task CreateOrOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable path, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (path == null) throw new ArgumentNullException("path"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (path == null) throw new ArgumentNullException(nameof(path)); - return db.ReadWriteAsync((tr) => directory.CreateOrOpenAsync(tr, path, Slice.Nil), cancellationToken); + return db.ReadWriteAsync((tr) => directory.CreateOrOpenAsync(tr, path, Slice.Nil), ct); } /// Opens the directory with the given . /// If the directory does not exist, it is created (creating parent directories if necessary). /// If layer is specified, it is checked against the layer of an existing directory or set as the layer of a new directory. /// - public static Task CreateOrOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable path, Slice layer, CancellationToken cancellationToken) + public static Task CreateOrOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable path, Slice layer, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (path == null) throw new ArgumentNullException("path"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (path == null) throw new ArgumentNullException(nameof(path)); - return db.ReadWriteAsync((tr) => directory.CreateOrOpenAsync(tr, path, layer), cancellationToken); + return db.ReadWriteAsync((tr) => directory.CreateOrOpenAsync(tr, path, layer), ct); } /// Opens the directory with the given . /// If the directory does not exist, it is created (creating parent directories if necessary). /// If layer is specified, it is checked against the layer of an existing directory or set as the layer of a new directory. /// - public static Task CreateOrOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, CancellationToken cancellationToken) + public static Task CreateOrOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (name == null) throw new ArgumentNullException(nameof(name)); - return db.ReadWriteAsync((tr) => directory.CreateOrOpenAsync(tr, new [] { name }, Slice.Nil), cancellationToken); + return db.ReadWriteAsync((tr) => directory.CreateOrOpenAsync(tr, new [] { name }, Slice.Nil), ct); } /// Opens the directory with the given . /// If the directory does not exist, it is created (creating parent directories if necessary). /// If layer is specified, it is checked against the layer of an existing directory or set as the layer of a new directory. /// - public static Task CreateOrOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, Slice layer, CancellationToken cancellationToken) + public static Task CreateOrOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, Slice layer, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (name == null) throw new ArgumentNullException(nameof(name)); - return db.ReadWriteAsync((tr) => directory.CreateOrOpenAsync(tr, new[] { name }, layer), cancellationToken); + return db.ReadWriteAsync((tr) => directory.CreateOrOpenAsync(tr, new[] { name }, layer), ct); } /// Opens the directory with the given . @@ -99,9 +99,9 @@ public static Task CreateOrOpenAsync([NotNull] this IFdbDi /// public static Task CreateOrOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbTransaction trans, [NotNull] string name, Slice layer = default(Slice)) { - if (directory == null) throw new ArgumentNullException("directory"); - if (trans == null) throw new ArgumentNullException("trans"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (name == null) throw new ArgumentNullException(nameof(name)); return directory.CreateOrOpenAsync(trans, new[] { name }, layer); } @@ -118,9 +118,9 @@ public static Task CreateOrOpenAsync([NotNull] this IFdbDi /// public static Task TryCreateOrOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbTransaction trans, [NotNull] IEnumerable path, bool readOnly, Slice layer = default(Slice)) { - if (directory == null) throw new ArgumentNullException("directory"); - if (trans == null) throw new ArgumentNullException("trans"); - if (path == null) throw new ArgumentNullException("path"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (path == null) throw new ArgumentNullException(nameof(path)); if (readOnly) return directory.TryOpenAsync(trans, path, layer); @@ -140,7 +140,7 @@ public static Task CreateOrOpenAsync([NotNull] this IFdbDi /// public static Task TryCreateOrOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbTransaction trans, [NotNull] string name, bool readOnly, Slice layer = default(Slice)) { - if (name == null) throw new ArgumentNullException("name"); + if (name == null) throw new ArgumentNullException(nameof(name)); return TryCreateOrOpenAsync(directory, trans, new[] { name }, readOnly, layer); } @@ -152,51 +152,51 @@ public static Task CreateOrOpenAsync([NotNull] this IFdbDi /// Creates a directory with the given (creating parent directories if necessary). /// An error is raised if the given directory already exists. /// - public static Task CreateAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable path, CancellationToken cancellationToken) + public static Task CreateAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable path, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (path == null) throw new ArgumentNullException("path"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (path == null) throw new ArgumentNullException(nameof(path)); - return db.ReadWriteAsync((tr) => directory.CreateAsync(tr, path, Slice.Nil), cancellationToken); + return db.ReadWriteAsync((tr) => directory.CreateAsync(tr, path, Slice.Nil), ct); } /// Creates a directory with the given (creating parent directories if necessary). /// An error is raised if the given directory already exists. /// If is specified, it is recorded with the directory and will be checked by future calls to open. /// - public static Task CreateAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable path, Slice layer, CancellationToken cancellationToken) + public static Task CreateAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable path, Slice layer, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (path == null) throw new ArgumentNullException("path"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (path == null) throw new ArgumentNullException(nameof(path)); - return db.ReadWriteAsync((tr) => directory.CreateAsync(tr, path, layer), cancellationToken); + return db.ReadWriteAsync((tr) => directory.CreateAsync(tr, path, layer), ct); } /// Creates a directory with the given . /// An error is raised if the given directory already exists. /// - public static Task CreateAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, CancellationToken cancellationToken) + public static Task CreateAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (name == null) throw new ArgumentNullException(nameof(name)); - return db.ReadWriteAsync((tr) => directory.CreateAsync(tr, new [] { name }, Slice.Nil), cancellationToken); + return db.ReadWriteAsync((tr) => directory.CreateAsync(tr, new [] { name }, Slice.Nil), ct); } /// Creates a directory with the given . /// An error is raised if the given directory already exists. /// If is specified, it is recorded with the directory and will be checked by future calls to open. /// - public static Task CreateAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, Slice layer, CancellationToken cancellationToken) + public static Task CreateAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, Slice layer, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (name == null) throw new ArgumentNullException(nameof(name)); - return db.ReadWriteAsync((tr) => directory.CreateAsync(tr, new[] { name }, layer), cancellationToken); + return db.ReadWriteAsync((tr) => directory.CreateAsync(tr, new[] { name }, layer), ct); } /// Creates a directory with the given . @@ -205,53 +205,53 @@ public static Task CreateAsync([NotNull] this IFdbDirector /// public static Task CreateAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbTransaction trans, [NotNull] string name, Slice layer = default(Slice)) { - if (directory == null) throw new ArgumentNullException("directory"); - if (trans == null) throw new ArgumentNullException("trans"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (name == null) throw new ArgumentNullException(nameof(name)); return directory.CreateAsync(trans, new[] { name }, layer); } /// Attempts to create a directory with the given (creating parent directories if necessary). - public static Task TryCreateAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable path, CancellationToken cancellationToken) + public static Task TryCreateAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable path, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (path == null) throw new ArgumentNullException("path"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (path == null) throw new ArgumentNullException(nameof(path)); - return db.ReadWriteAsync((tr) => directory.TryCreateAsync(tr, path, Slice.Nil), cancellationToken); + return db.ReadWriteAsync((tr) => directory.TryCreateAsync(tr, path, Slice.Nil), ct); } /// Attempts to create a directory with the given (creating parent directories if necessary). /// If is specified, it is recorded with the directory and will be checked by future calls to open. /// - public static Task TryCreateAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable path, Slice layer, CancellationToken cancellationToken) + public static Task TryCreateAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable path, Slice layer, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (path == null) throw new ArgumentNullException("path"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (path == null) throw new ArgumentNullException(nameof(path)); - return db.ReadWriteAsync((tr) => directory.TryCreateAsync(tr, path, layer), cancellationToken); + return db.ReadWriteAsync((tr) => directory.TryCreateAsync(tr, path, layer), ct); } /// Attempts to create a directory with the given . - public static Task TryCreateAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, CancellationToken cancellationToken) + public static Task TryCreateAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (name == null) throw new ArgumentNullException("name"); - return db.ReadWriteAsync((tr) => directory.TryCreateAsync(tr, new [] { name }, Slice.Nil), cancellationToken); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (name == null) throw new ArgumentNullException(nameof(name)); + return db.ReadWriteAsync((tr) => directory.TryCreateAsync(tr, new [] { name }, Slice.Nil), ct); } /// Attempts to create a directory with the given . /// If is specified, it is recorded with the directory and will be checked by future calls to open. /// - public static Task TryCreateAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, Slice layer, CancellationToken cancellationToken) + public static Task TryCreateAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, Slice layer, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (name == null) throw new ArgumentNullException("name"); - return db.ReadWriteAsync((tr) => directory.TryCreateAsync(tr, new[] { name }, layer), cancellationToken); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (name == null) throw new ArgumentNullException(nameof(name)); + return db.ReadWriteAsync((tr) => directory.TryCreateAsync(tr, new[] { name }, layer), ct); } /// Attempts to create a directory with the given . @@ -259,9 +259,9 @@ public static Task TryCreateAsync([NotNull] this IFdbDirec /// public static Task TryCreateAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbTransaction trans, [NotNull] string name, Slice layer = default(Slice)) { - if (directory == null) throw new ArgumentNullException("directory"); - if (trans == null) throw new ArgumentNullException("trans"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (name == null) throw new ArgumentNullException(nameof(name)); return directory.TryCreateAsync(trans, new[] { name }, layer); } @@ -273,49 +273,49 @@ public static Task TryCreateAsync([NotNull] this IFdbDirec /// Opens the directory with the given . /// An error is raised if the directory does not exist. /// - public static Task OpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable path, CancellationToken cancellationToken) + public static Task OpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable path, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (path == null) throw new ArgumentNullException("path"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (path == null) throw new ArgumentNullException(nameof(path)); - return db.ReadAsync((tr) => directory.OpenAsync(tr, path, Slice.Nil), cancellationToken); + return db.ReadAsync((tr) => directory.OpenAsync(tr, path, Slice.Nil), ct); } /// Opens the directory with the given . /// An error is raised if the directory does not exist, or if a layer is specified and a different layer was specified when the directory was created. /// - public static Task OpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable path, Slice layer, CancellationToken cancellationToken) + public static Task OpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable path, Slice layer, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (path == null) throw new ArgumentNullException("path"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (path == null) throw new ArgumentNullException(nameof(path)); - return db.ReadAsync((tr) => directory.OpenAsync(tr, path, layer), cancellationToken); + return db.ReadAsync((tr) => directory.OpenAsync(tr, path, layer), ct); } /// Opens the sub-directory with the given . /// An error is raised if the directory does not exist. /// - public static Task OpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, CancellationToken cancellationToken) + public static Task OpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (name == null) throw new ArgumentNullException(nameof(name)); - return db.ReadAsync((tr) => directory.OpenAsync(tr, new[] { name }, Slice.Nil), cancellationToken); + return db.ReadAsync((tr) => directory.OpenAsync(tr, new[] { name }, Slice.Nil), ct); } /// Opens the sub-directory with the given . /// An error is raised if the directory does not exist, or if a layer is specified and a different layer was specified when the directory was created. /// - public static Task OpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, Slice layer, CancellationToken cancellationToken) + public static Task OpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, Slice layer, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (name == null) throw new ArgumentNullException(nameof(name)); - return db.ReadAsync((tr) => directory.OpenAsync(tr, new[] { name }, layer), cancellationToken); + return db.ReadAsync((tr) => directory.OpenAsync(tr, new[] { name }, layer), ct); } /// Opens the sub-directory with the given . @@ -323,59 +323,59 @@ public static Task OpenAsync([NotNull] this IFdbDirectory /// public static Task OpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyTransaction trans, [NotNull] string name, Slice layer = default(Slice)) { - if (directory == null) throw new ArgumentNullException("directory"); - if (trans == null) throw new ArgumentNullException("trans"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (name == null) throw new ArgumentNullException(nameof(name)); return directory.OpenAsync(trans, new[] { name }, layer); } /// Attempts to open the directory with the given . - public static Task TryOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, [NotNull] IEnumerable path, CancellationToken cancellationToken) + public static Task TryOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, [NotNull] IEnumerable path, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (path == null) throw new ArgumentNullException("path"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (path == null) throw new ArgumentNullException(nameof(path)); - return db.ReadAsync((tr) => directory.TryOpenAsync(tr, path, Slice.Nil), cancellationToken); + return db.ReadAsync((tr) => directory.TryOpenAsync(tr, path, Slice.Nil), ct); } /// Attempts to open the directory with the given . - public static Task TryOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, [NotNull] IEnumerable path, Slice layer, CancellationToken cancellationToken) + public static Task TryOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, [NotNull] IEnumerable path, Slice layer, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (path == null) throw new ArgumentNullException("path"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (path == null) throw new ArgumentNullException(nameof(path)); - return db.ReadAsync((tr) => directory.TryOpenAsync(tr, path, layer), cancellationToken); + return db.ReadAsync((tr) => directory.TryOpenAsync(tr, path, layer), ct); } /// Attempts to open the directory with the given . - public static Task TryOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, [NotNull] string name, CancellationToken cancellationToken) + public static Task TryOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, [NotNull] string name, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (name == null) throw new ArgumentNullException(nameof(name)); - return db.ReadAsync((tr) => directory.TryOpenAsync(tr, new[] { name }, Slice.Nil), cancellationToken); + return db.ReadAsync((tr) => directory.TryOpenAsync(tr, new[] { name }, Slice.Nil), ct); } /// Attempts to open the directory with the given . - public static Task TryOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, [NotNull] string name, Slice layer, CancellationToken cancellationToken) + public static Task TryOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, [NotNull] string name, Slice layer, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (name == null) throw new ArgumentNullException(nameof(name)); - return db.ReadAsync((tr) => directory.TryOpenAsync(tr, new[] { name }, layer), cancellationToken); + return db.ReadAsync((tr) => directory.TryOpenAsync(tr, new[] { name }, layer), ct); } /// Attempts to open the directory with the given . public static Task TryOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyTransaction trans, [NotNull] string name) { - if (directory == null) throw new ArgumentNullException("directory"); - if (trans == null) throw new ArgumentNullException("trans"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (name == null) throw new ArgumentNullException(nameof(name)); return directory.TryOpenAsync(trans, new[] { name }, Slice.Nil); } @@ -383,9 +383,9 @@ public static Task TryOpenAsync([NotNull] this IFdbDirecto /// Attempts to open the directory with the given . public static Task TryOpenAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyTransaction trans, [NotNull] string name, Slice layer) { - if (directory == null) throw new ArgumentNullException("directory"); - if (trans == null) throw new ArgumentNullException("trans"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (name == null) throw new ArgumentNullException(nameof(name)); return directory.TryOpenAsync(trans, new[] { name }, layer); } @@ -398,27 +398,27 @@ public static Task TryOpenAsync([NotNull] this IFdbDirecto /// There is no effect on the physical prefix of the given directory, or on clients that already have the directory open. /// An error is raised if the old directory does not exist, a directory already exists at `new_path`, or the parent directory of `new_path` does not exist. /// - public static Task MoveAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable oldPath, [NotNull] IEnumerable newPath, CancellationToken cancellationToken) + public static Task MoveAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable oldPath, [NotNull] IEnumerable newPath, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (oldPath == null) throw new ArgumentNullException("oldPath"); - if (newPath == null) throw new ArgumentNullException("newPath"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (oldPath == null) throw new ArgumentNullException(nameof(oldPath)); + if (newPath == null) throw new ArgumentNullException(nameof(newPath)); - return db.ReadWriteAsync((tr) => directory.MoveAsync(tr, oldPath, newPath), cancellationToken); + return db.ReadWriteAsync((tr) => directory.MoveAsync(tr, oldPath, newPath), ct); } /// Attempts to move the directory found at to . /// There is no effect on the physical prefix of the given directory, or on clients that already have the directory open. /// - public static Task TryMoveAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable oldPath, [NotNull] IEnumerable newPath, CancellationToken cancellationToken) + public static Task TryMoveAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] IEnumerable oldPath, [NotNull] IEnumerable newPath, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (oldPath == null) throw new ArgumentNullException("oldPath"); - if (newPath == null) throw new ArgumentNullException("newPath"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (oldPath == null) throw new ArgumentNullException(nameof(oldPath)); + if (newPath == null) throw new ArgumentNullException(nameof(newPath)); - return db.ReadWriteAsync((tr) => directory.TryMoveAsync(tr, oldPath, newPath), cancellationToken); + return db.ReadWriteAsync((tr) => directory.TryMoveAsync(tr, oldPath, newPath), ct); } #endregion @@ -429,25 +429,25 @@ public static Task TryMoveAsync([NotNull] this IFdbDirecto /// There is no effect on the physical prefix of the given directory, or on clients that already have the directory open. /// An error is raised if a directory already exists at `new_path`, or if the new path points to a child of the current directory. /// - public static Task MoveToAsync([NotNull] this FdbDirectorySubspace subspace, [NotNull] IFdbRetryable db, [NotNull] IEnumerable newPath, CancellationToken cancellationToken) + public static Task MoveToAsync([NotNull] this FdbDirectorySubspace subspace, [NotNull] IFdbRetryable db, [NotNull] IEnumerable newPath, CancellationToken ct) { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (db == null) throw new ArgumentNullException("db"); - if (newPath == null) throw new ArgumentNullException("newPath"); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (newPath == null) throw new ArgumentNullException(nameof(newPath)); - return db.ReadWriteAsync((tr) => subspace.MoveToAsync(tr, newPath), cancellationToken); + return db.ReadWriteAsync((tr) => subspace.MoveToAsync(tr, newPath), ct); } /// Attempts to move the current directory to . /// There is no effect on the physical prefix of the given directory, or on clients that already have the directory open. /// - public static Task TryMoveToAsync([NotNull] this FdbDirectorySubspace subspace, [NotNull] IFdbRetryable db, [NotNull] IEnumerable newPath, CancellationToken cancellationToken) + public static Task TryMoveToAsync([NotNull] this FdbDirectorySubspace subspace, [NotNull] IFdbRetryable db, [NotNull] IEnumerable newPath, CancellationToken ct) { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (db == null) throw new ArgumentNullException("db"); - if (newPath == null) throw new ArgumentNullException("newPath"); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (newPath == null) throw new ArgumentNullException(nameof(newPath)); - return db.ReadWriteAsync((tr) => subspace.TryMoveToAsync(tr, newPath), cancellationToken); + return db.ReadWriteAsync((tr) => subspace.TryMoveToAsync(tr, newPath), ct); } #endregion @@ -457,34 +457,34 @@ public static Task TryMoveToAsync([NotNull] this FdbDirect /// Removes the directory, its contents, and all subdirectories. /// Warning: Clients that have already opened the directory might still insert data into its contents after it is removed. /// - public static Task RemoveAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, IEnumerable path, CancellationToken cancellationToken) + public static Task RemoveAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, IEnumerable path, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); - return db.ReadWriteAsync((tr) => directory.RemoveAsync(tr, path), cancellationToken); + return db.ReadWriteAsync((tr) => directory.RemoveAsync(tr, path), ct); } /// Removes the directory, its contents, and all subdirectories. /// Warning: Clients that have already opened the directory might still insert data into its contents after it is removed. /// - public static Task RemoveAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, CancellationToken cancellationToken) + public static Task RemoveAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (name == null) throw new ArgumentNullException("name"); - return db.ReadWriteAsync((tr) => directory.RemoveAsync(tr, new [] { name }), cancellationToken); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (name == null) throw new ArgumentNullException(nameof(name)); + return db.ReadWriteAsync((tr) => directory.RemoveAsync(tr, new [] { name }), ct); } /// Removes the directory, its contents, and all subdirectories. /// Warning: Clients that have already opened the directory might still insert data into its contents after it is removed. /// - public static Task RemoveAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, CancellationToken cancellationToken) + public static Task RemoveAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); - return db.ReadWriteAsync((tr) => directory.RemoveAsync(tr), cancellationToken); + return db.ReadWriteAsync((tr) => directory.RemoveAsync(tr), ct); } /// Removes the directory, its contents, and all subdirectories. @@ -492,9 +492,9 @@ public static Task RemoveAsync([NotNull] this IFdbDirectory directory, [NotNull] /// public static Task RemoveAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbTransaction trans, [NotNull] string name) { - if (directory == null) throw new ArgumentNullException("directory"); - if (trans == null) throw new ArgumentNullException("trans"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (name == null) throw new ArgumentNullException(nameof(name)); return directory.RemoveAsync(trans, new[] { name }); } @@ -502,24 +502,24 @@ public static Task RemoveAsync([NotNull] this IFdbDirectory directory, [NotNull] /// Removes the directory, its contents, and all subdirectories. /// Warning: Clients that have already opened the directory might still insert data into its contents after it is removed. /// - public static Task TryRemoveAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, IEnumerable path, CancellationToken cancellationToken) + public static Task TryRemoveAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, IEnumerable path, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); - return db.ReadWriteAsync((tr) => directory.TryRemoveAsync(tr, path), cancellationToken); + return db.ReadWriteAsync((tr) => directory.TryRemoveAsync(tr, path), ct); } /// Removes the directory, its contents, and all subdirectories. /// Warning: Clients that have already opened the directory might still insert data into its contents after it is removed. /// - public static Task TryRemoveAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, CancellationToken cancellationToken) + public static Task TryRemoveAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbRetryable db, [NotNull] string name, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (name == null) throw new ArgumentNullException(nameof(name)); - return db.ReadWriteAsync((tr) => directory.TryRemoveAsync(tr, new [] { name }), cancellationToken); + return db.ReadWriteAsync((tr) => directory.TryRemoveAsync(tr, new [] { name }), ct); } /// Removes the directory, its contents, and all subdirectories. @@ -527,9 +527,9 @@ public static Task TryRemoveAsync([NotNull] this IFdbDirectory directory, /// public static Task TryRemoveAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbTransaction trans, [NotNull] string name) { - if (directory == null) throw new ArgumentNullException("directory"); - if (trans == null) throw new ArgumentNullException("trans"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (name == null) throw new ArgumentNullException(nameof(name)); return directory.TryRemoveAsync(trans, new[] { name }); } @@ -540,44 +540,44 @@ public static Task TryRemoveAsync([NotNull] this IFdbDirectory directory, /// Checks if a directory already exists /// Returns true if the directory exists, otherwise false. - public static Task ExistsAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, IEnumerable path, CancellationToken cancellationToken) + public static Task ExistsAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, IEnumerable path, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); - return db.ReadAsync((tr) => directory.ExistsAsync(tr, path), cancellationToken); + return db.ReadAsync((tr) => directory.ExistsAsync(tr, path), ct); } /// Checks if a directory already exists /// Returns true if the directory exists, otherwise false. - public static Task ExistsAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, [NotNull] string name, CancellationToken cancellationToken) + public static Task ExistsAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, [NotNull] string name, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (name == null) throw new ArgumentNullException(nameof(name)); - return db.ReadAsync((tr) => directory.ExistsAsync(tr, new[] { name }), cancellationToken); + return db.ReadAsync((tr) => directory.ExistsAsync(tr, new[] { name }), ct); } /// Checks if a directory already exists /// Returns true if the directory exists, otherwise false. public static Task ExistsAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyTransaction trans, [NotNull] string name) { - if (directory == null) throw new ArgumentNullException("directory"); - if (trans == null) throw new ArgumentNullException("trans"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (name == null) throw new ArgumentNullException(nameof(name)); return directory.ExistsAsync(trans, new[] { name }); } /// Checks if this directory exists /// Returns true if the directory exists, otherwise false. - public static Task ExistsAsync([NotNull] this FdbDirectorySubspace subspace, [NotNull] IFdbReadOnlyRetryable db, CancellationToken cancellationToken) + public static Task ExistsAsync([NotNull] this FdbDirectorySubspace subspace, [NotNull] IFdbReadOnlyRetryable db, CancellationToken ct) { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (db == null) throw new ArgumentNullException("db"); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); + if (db == null) throw new ArgumentNullException(nameof(db)); - return db.ReadAsync((tr) => subspace.ExistsAsync(tr), cancellationToken); + return db.ReadAsync((tr) => subspace.ExistsAsync(tr), ct); } #endregion @@ -585,95 +585,95 @@ public static Task ExistsAsync([NotNull] this FdbDirectorySubspace subspac #region List / TryList... /// Returns the list of subdirectories of directory at . - public static Task> ListAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, [NotNull] IEnumerable path, CancellationToken cancellationToken) + public static Task> ListAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, [NotNull] IEnumerable path, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (path == null) throw new ArgumentNullException("path"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (path == null) throw new ArgumentNullException(nameof(path)); - return db.ReadAsync((tr) => directory.ListAsync(tr, path), cancellationToken); + return db.ReadAsync((tr) => directory.ListAsync(tr, path), ct); } /// Returns the list of subdirectories of the sub-directory with the given . - public static Task> ListAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, [NotNull] string name, CancellationToken cancellationToken) + public static Task> ListAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, [NotNull] string name, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (name == null) throw new ArgumentNullException("name"); - return db.ReadAsync((tr) => directory.ListAsync(tr, new [] { name }), cancellationToken); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (name == null) throw new ArgumentNullException(nameof(name)); + return db.ReadAsync((tr) => directory.ListAsync(tr, new [] { name }), ct); } /// Returns the list of subdirectories of the current directory. - public static Task> ListAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, CancellationToken cancellationToken) + public static Task> ListAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - return db.ReadAsync((tr) => directory.ListAsync(tr), cancellationToken); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + return db.ReadAsync((tr) => directory.ListAsync(tr), ct); } /// Returns the list of subdirectories of the current directory. public static Task> ListAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyTransaction trans) { - if (directory == null) throw new ArgumentNullException("directory"); - if (trans == null) throw new ArgumentNullException("trans"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (trans == null) throw new ArgumentNullException(nameof(trans)); return directory.ListAsync(trans); } /// Returns the list of subdirectories of the sub-directory with the given . public static Task> ListAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyTransaction trans, [NotNull] string name) { - if (directory == null) throw new ArgumentNullException("directory"); - if (trans == null) throw new ArgumentNullException("trans"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (name == null) throw new ArgumentNullException(nameof(name)); return directory.ListAsync(trans, new[] { name }); } /// Returns the list of subdirectories of directory at , if it exists - public static Task> TryListAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, IEnumerable path, CancellationToken cancellationToken) + public static Task> TryListAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, IEnumerable path, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (path == null) throw new ArgumentNullException("path"); //REVIEW: or not? + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (path == null) throw new ArgumentNullException(nameof(path)); //REVIEW: or not? - return db.ReadAsync((tr) => directory.TryListAsync(tr, path), cancellationToken); + return db.ReadAsync((tr) => directory.TryListAsync(tr, path), ct); } /// Returns the list of subdirectories of the sub-directory with the given , if it exists - public static Task> TryListAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, [NotNull] string name, CancellationToken cancellationToken) + public static Task> TryListAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyRetryable db, [NotNull] string name, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (name == null) throw new ArgumentNullException(nameof(name)); - return db.ReadAsync((tr) => directory.TryListAsync(tr, new [] { name }), cancellationToken); + return db.ReadAsync((tr) => directory.TryListAsync(tr, new [] { name }), ct); } /// Returns the list of subdirectories of the sub-directory with the given , if it exists public static Task> TryListAsync([NotNull] this IFdbDirectory directory, [NotNull] IFdbReadOnlyTransaction trans, [NotNull] string name) { - if (directory == null) throw new ArgumentNullException("directory"); - if (trans == null) throw new ArgumentNullException("trans"); - if (name == null) throw new ArgumentNullException("name"); + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (name == null) throw new ArgumentNullException(nameof(name)); return directory.TryListAsync(trans, new[] { name }); } /// Returns the list of all the subdirectories of the current directory. - public static Task> ListAsync([NotNull] this FdbDirectorySubspace subspace, [NotNull] IFdbReadOnlyRetryable db, CancellationToken cancellationToken) + public static Task> ListAsync([NotNull] this FdbDirectorySubspace subspace, [NotNull] IFdbReadOnlyRetryable db, CancellationToken ct) { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (db == null) throw new ArgumentNullException("db"); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); + if (db == null) throw new ArgumentNullException(nameof(db)); - return db.ReadAsync((tr) => subspace.ListAsync(tr), cancellationToken); + return db.ReadAsync((tr) => subspace.ListAsync(tr), ct); } /// Returns the list of all the subdirectories of the current directory, it it exists. - public static Task> TryListAsync([NotNull] this FdbDirectorySubspace subspace, [NotNull] IFdbReadOnlyRetryable db, CancellationToken cancellationToken) + public static Task> TryListAsync([NotNull] this FdbDirectorySubspace subspace, [NotNull] IFdbReadOnlyRetryable db, CancellationToken ct) { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (db == null) throw new ArgumentNullException("db"); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); + if (db == null) throw new ArgumentNullException(nameof(db)); - return db.ReadAsync((tr) => subspace.TryListAsync(tr), cancellationToken); + return db.ReadAsync((tr) => subspace.TryListAsync(tr), ct); } #endregion @@ -681,22 +681,22 @@ public static Task> TryListAsync([NotNull] this FdbDirectorySubspac #region Metadata /// Change the layer id of the directory at - public static Task ChangeLayerAsync([NotNull] this FdbDirectoryLayer directory, [NotNull] IFdbRetryable db, IEnumerable path, Slice newLayer, CancellationToken cancellationToken) + public static Task ChangeLayerAsync([NotNull] this FdbDirectoryLayer directory, [NotNull] IFdbRetryable db, IEnumerable path, Slice newLayer, CancellationToken ct) { - if (directory == null) throw new ArgumentNullException("directory"); - if (db == null) throw new ArgumentNullException("db"); - if (path == null) throw new ArgumentNullException("path"); //REVIEW: or not? + if (directory == null) throw new ArgumentNullException(nameof(directory)); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (path == null) throw new ArgumentNullException(nameof(path)); //REVIEW: or not? - return db.ReadWriteAsync((tr) => directory.ChangeLayerAsync(tr, path, newLayer), cancellationToken); + return db.ReadWriteAsync((tr) => directory.ChangeLayerAsync(tr, path, newLayer), ct); } /// Change the layer id of this directory - public static Task ChangeLayerAsync([NotNull] this FdbDirectorySubspace subspace, [NotNull] IFdbRetryable db, Slice newLayer, CancellationToken cancellationToken) + public static Task ChangeLayerAsync([NotNull] this FdbDirectorySubspace subspace, [NotNull] IFdbRetryable db, Slice newLayer, CancellationToken ct) { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (db == null) throw new ArgumentNullException("db"); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); + if (db == null) throw new ArgumentNullException(nameof(db)); - return db.ReadWriteAsync((tr) => subspace.ChangeLayerAsync(tr, newLayer), cancellationToken); + return db.ReadWriteAsync((tr) => subspace.ChangeLayerAsync(tr, newLayer), ct); } #endregion diff --git a/FoundationDB.Client/Layers/Directories/FdbDirectoryLayer.cs b/FoundationDB.Client/Layers/Directories/FdbDirectoryLayer.cs index e5be52787..2e9af0707 100644 --- a/FoundationDB.Client/Layers/Directories/FdbDirectoryLayer.cs +++ b/FoundationDB.Client/Layers/Directories/FdbDirectoryLayer.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,18 +28,19 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Directories { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using FoundationDB.Layers.Tuples; - using FoundationDB.Linq; - using FoundationDB.Filters.Logging; using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Threading.Tasks; using JetBrains.Annotations; - + using Doxense.Collections.Tuples; + using Doxense.Diagnostics.Contracts; + using Doxense.Linq; + using Doxense.Memory; + using Doxense.Serialization.Encoders; + using FoundationDB.Client; + using FoundationDB.Filters.Logging; /// Provides a FdbDirectoryLayer class for managing directories in FoundationDB. /// Directories are a recommended approach for administering layers and applications. Directories work in conjunction with subspaces. Each layer or application should create or open at least one directory with which to manage its subspace(s). @@ -52,9 +53,9 @@ public class FdbDirectoryLayer : IFdbDirectory { private const int SUBDIRS = 0; internal static readonly Version LayerVersion = new Version(1, 0, 0); - internal static readonly Slice LayerSuffix = Slice.FromAscii("layer"); - internal static readonly Slice HcaKey = Slice.FromAscii("hca"); - internal static readonly Slice VersionKey = Slice.FromAscii("version"); + internal static readonly Slice LayerSuffix = Slice.FromStringAscii("layer"); + internal static readonly Slice HcaKey = Slice.FromStringAscii("hca"); + internal static readonly Slice VersionKey = Slice.FromStringAscii("version"); /// Use this flag to make the Directory Layer start annotating the transactions with a descriptions of all operations. /// @@ -64,19 +65,19 @@ public class FdbDirectoryLayer : IFdbDirectory public static bool AnnotateTransactions { get; set; } /// Subspace where the content of each folder will be stored - public IFdbDynamicSubspace ContentSubspace { [NotNull] get; private set; } + public IDynamicKeySubspace ContentSubspace { [NotNull] get; } /// Subspace where all the metadata nodes for each folder will be stored - public IFdbDynamicSubspace NodeSubspace { [NotNull] get; private set; } + public IDynamicKeySubspace NodeSubspace { [NotNull] get; } /// Root node of the directory - internal IFdbDynamicSubspace RootNode { [NotNull] get; private set; } + internal IDynamicKeySubspace RootNode { [NotNull] get; } /// Allocated used to generated prefix for new content - internal FdbHighContentionAllocator Allocator { [NotNull] get; private set; } + internal FdbHighContentionAllocator Allocator { [NotNull] get; } /// Gets the path for the root node of this FdbDirectoryLayer. - internal IFdbTuple Location { [NotNull] get; private set; } + internal ITuple Location { [NotNull] get; } /// Name of root directory of this layer /// Returns String.Empty for the root Directory Layer, or the name of the partition @@ -101,7 +102,7 @@ public string Name /// Convert a relative path in this Directory Layer, into an absolute path from the root of partition of the database [NotNull] - internal IFdbTuple PartitionSubPath(IFdbTuple path = null) + internal ITuple PartitionSubPath(ITuple path = null) { // If the DL is the root, the path is already absolute // If the DL is used by a partition, then the path of the partition will be prepended to the path @@ -112,7 +113,7 @@ void IFdbDirectory.CheckLayer(Slice layer) { if (layer.IsPresent) { - throw new InvalidOperationException(String.Format("The directory layer {0} is not compatible with layer {1}.", this.FullName, layer.ToAsciiOrHexaString())); + throw new InvalidOperationException($"The directory layer {this.FullName} is not compatible with layer {layer:K}."); } } @@ -129,7 +130,7 @@ Task IFdbDirectory.ChangeLayerAsync(IFdbTransaction trans, /// Subspace where all the node metadata will be stored ('\xFE' by default) /// Subspace where all automatically allocated directories will be stored (empty by default) /// Location of the root of all the directories managed by this Directory Layer. Ususally empty for the root partition of the database. - internal FdbDirectoryLayer(IFdbDynamicSubspace nodeSubspace, IFdbDynamicSubspace contentSubspace, IFdbTuple location) + internal FdbDirectoryLayer(IDynamicKeySubspace nodeSubspace, IDynamicKeySubspace contentSubspace, ITuple location) { Contract.Requires(nodeSubspace != null && contentSubspace != null); @@ -138,11 +139,11 @@ internal FdbDirectoryLayer(IFdbDynamicSubspace nodeSubspace, IFdbDynamicSubspace this.NodeSubspace = nodeSubspace; // The root node is the one whose contents are the node subspace - this.RootNode = nodeSubspace.Partition.ByKey(nodeSubspace.Key); + this.RootNode = nodeSubspace.Partition.ByKey(nodeSubspace.GetPrefix()); this.Allocator = new FdbHighContentionAllocator(this.RootNode.Partition.ByKey(HcaKey)); if (location == null || location.Count == 0) { - this.Location = FdbTuple.Empty; + this.Location = STuple.Empty; this.Path = new string[0]; } else @@ -152,34 +153,29 @@ internal FdbDirectoryLayer(IFdbDynamicSubspace nodeSubspace, IFdbDynamicSubspace } } - /// Create an instance of the default Directory Layer - [NotNull] - public static FdbDirectoryLayer Create() - { - return Create(Slice.Empty); - } - /// Create an instance of a Directory Layer located under a specific prefix and path /// Prefix for the content. The nodes will be stored under + <FE> /// Optional path, if the Directory Layer is not located at the root of the database. + /// Optional key encoding scheme. If not specified, will use the encoding by default. [NotNull] - public static FdbDirectoryLayer Create(Slice prefix, IEnumerable path = null) + public static FdbDirectoryLayer Create(Slice prefix, IEnumerable path = null, IKeyEncoding encoding = null) { - var subspace = FdbSubspace.CreateDynamic(prefix, TypeSystem.Tuples); - var location = path != null ? ParsePath(path) : FdbTuple.Empty; + var subspace = KeySubspace.CreateDynamic(prefix, encoding ?? TuPack.Encoding); + var location = path != null ? ParsePath(path) : STuple.Empty; return new FdbDirectoryLayer(subspace.Partition[FdbKey.Directory], subspace, location); } /// Create an instance of a Directory Layer located under a specific subspace and path /// Subspace for the content. The nodes will be stored under .Key + <FE> /// Optional path, if the Directory Layer is not located at the root of the database. + /// Optional key encoding scheme. If not specified, will use the encoding by default. [NotNull] - public static FdbDirectoryLayer Create(IFdbSubspace subspace, IEnumerable path = null) + public static FdbDirectoryLayer Create(IKeySubspace subspace, IEnumerable path = null, IKeyEncoding encoding = null) { - if (subspace == null) throw new ArgumentNullException("subspace"); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); - var location = path != null ? ParsePath(path) : FdbTuple.Empty; - var space = subspace.Using(TypeSystem.Tuples); + var location = path != null ? ParsePath(path) : STuple.Empty; + var space = subspace.AsDynamic(encoding ?? TuPack.Encoding); return new FdbDirectoryLayer(space.Partition[FdbKey.Directory], space, location); } @@ -188,12 +184,12 @@ public static FdbDirectoryLayer Create(IFdbSubspace subspace, IEnumerableSubspace for the content of the Directory Layer. /// Optional path, if the Directory Layer is not located at the root of the database [NotNull] - public static FdbDirectoryLayer Create(IFdbDynamicSubspace nodeSubspace, IFdbDynamicSubspace contentSubspace, IEnumerable path = null) + public static FdbDirectoryLayer Create(IDynamicKeySubspace nodeSubspace, IDynamicKeySubspace contentSubspace, IEnumerable path = null) { - if (nodeSubspace == null) throw new ArgumentNullException("nodeSubspace"); - if (contentSubspace == null) throw new ArgumentNullException("contentSubspace"); + if (nodeSubspace == null) throw new ArgumentNullException(nameof(nodeSubspace)); + if (contentSubspace == null) throw new ArgumentNullException(nameof(contentSubspace)); - var location = path != null ? ParsePath(path) : FdbTuple.Empty; + var location = path != null ? ParsePath(path) : STuple.Empty; //TODO: check that nodeSubspace != contentSubspace? return new FdbDirectoryLayer(nodeSubspace, contentSubspace, location); } @@ -208,10 +204,11 @@ public static FdbDirectoryLayer Create(IFdbDynamicSubspace nodeSubspace, IFdbDyn /// Transaction to use for the operation /// Path of the directory to create or open /// If layer is specified, it is checked against the layer of an existing directory or set as the layer of a new directory. + [ItemNotNull] public Task CreateOrOpenAsync([NotNull] IFdbTransaction trans, [NotNull] IEnumerable path, Slice layer = default(Slice)) { - if (trans == null) throw new ArgumentNullException("trans"); - if (path == null) throw new ArgumentNullException("path"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (path == null) throw new ArgumentNullException(nameof(path)); return CreateOrOpenInternalAsync(null, trans, ParsePath(path), layer, Slice.Nil, allowCreate: true, allowOpen: true, throwOnError: true); } @@ -222,10 +219,11 @@ public static FdbDirectoryLayer Create(IFdbDynamicSubspace nodeSubspace, IFdbDyn /// Transaction to use for the operation /// Path of the directory to open. /// Optional layer id of the directory. If it is different than the layer specified when creating the directory, an exception will be thrown. + [ItemNotNull] public Task OpenAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNull] IEnumerable path, Slice layer = default(Slice)) { - if (trans == null) throw new ArgumentNullException("trans"); - if (path == null) throw new ArgumentNullException("path"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (path == null) throw new ArgumentNullException(nameof(path)); return CreateOrOpenInternalAsync(trans, null, ParsePath(path), layer, prefix: Slice.Nil, allowCreate: false, allowOpen: true, throwOnError: true); } @@ -236,10 +234,11 @@ public static FdbDirectoryLayer Create(IFdbDynamicSubspace nodeSubspace, IFdbDyn /// Transaction to use for the operation /// Path of the directory to create /// If is specified, it is recorded with the directory and will be checked by future calls to open. + [ItemNotNull] public Task CreateAsync([NotNull] IFdbTransaction trans, [NotNull] IEnumerable path, Slice layer = default(Slice)) { - if (trans == null) throw new ArgumentNullException("trans"); - if (path == null) throw new ArgumentNullException("path"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (path == null) throw new ArgumentNullException(nameof(path)); return CreateOrOpenInternalAsync(null, trans, ParsePath(path), layer, prefix: Slice.Nil, allowCreate: true, allowOpen: false, throwOnError: true); } @@ -248,10 +247,11 @@ public static FdbDirectoryLayer Create(IFdbDynamicSubspace nodeSubspace, IFdbDyn /// Transaction to use for the operation /// Path of the directory to open. /// Optional layer id of the directory. If it is different than the layer specified when creating the directory, an exception will be thrown. + [ItemCanBeNull] public Task TryOpenAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNull] IEnumerable path, Slice layer = default(Slice)) { - if (trans == null) throw new ArgumentNullException("trans"); - if (path == null) throw new ArgumentNullException("path"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (path == null) throw new ArgumentNullException(nameof(path)); return CreateOrOpenInternalAsync(trans, null, ParsePath(path), layer, prefix: Slice.Nil, allowCreate: false, allowOpen: true, throwOnError: false); } @@ -260,10 +260,11 @@ public static FdbDirectoryLayer Create(IFdbDynamicSubspace nodeSubspace, IFdbDyn /// Transaction to use for the operation /// Path of the directory to create /// If is specified, it is recorded with the directory and will be checked by future calls to open. + [ItemCanBeNull] public Task TryCreateAsync([NotNull] IFdbTransaction trans, [NotNull] IEnumerable path, Slice layer = default(Slice)) { - if (trans == null) throw new ArgumentNullException("trans"); - if (path == null) throw new ArgumentNullException("path"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (path == null) throw new ArgumentNullException(nameof(path)); return CreateOrOpenInternalAsync(null, trans, ParsePath(path), layer, prefix: Slice.Nil, allowCreate: true, allowOpen: false, throwOnError: false); } @@ -273,10 +274,11 @@ public static FdbDirectoryLayer Create(IFdbDynamicSubspace nodeSubspace, IFdbDyn /// Path of the directory to create /// If is specified, it is recorded with the directory and will be checked by future calls to open. /// The directory will be created with the given physical prefix; otherwise a prefix is allocated automatically. + [ItemNotNull] public Task RegisterAsync([NotNull] IFdbTransaction trans, [NotNull] IEnumerable path, Slice layer, Slice prefix) { - if (trans == null) throw new ArgumentNullException("trans"); - if (path == null) throw new ArgumentNullException("path"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (path == null) throw new ArgumentNullException(nameof(path)); return CreateOrOpenInternalAsync(null, trans, ParsePath(path), layer, prefix: prefix, allowCreate: true, allowOpen: false, throwOnError: true); } @@ -286,10 +288,11 @@ public Task RegisterAsync([NotNull] IFdbTransaction trans, /// Path of the directory to create /// If is specified, it is recorded with the directory and will be checked by future calls to open. /// The directory will be created with the given physical prefix; otherwise a prefix is allocated automatically. + [ItemCanBeNull] public Task TryRegisterAsync([NotNull] IFdbTransaction trans, [NotNull] IEnumerable path, Slice layer, Slice prefix) { - if (trans == null) throw new ArgumentNullException("trans"); - if (path == null) throw new ArgumentNullException("path"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (path == null) throw new ArgumentNullException(nameof(path)); return CreateOrOpenInternalAsync(null, trans, ParsePath(path), layer, prefix: prefix, allowCreate: true, allowOpen: false, throwOnError: false); } @@ -305,15 +308,16 @@ public Task TryRegisterAsync([NotNull] IFdbTransaction tra /// Transaction to use for the operation /// Path of the directory to move /// New path of the directory - public Task MoveAsync([NotNull] IFdbTransaction trans, [NotNull] IEnumerable oldPath, [NotNull] IEnumerable newPath) + [ItemNotNull] + public Task MoveAsync(IFdbTransaction trans, IEnumerable oldPath, IEnumerable newPath) { - if (trans == null) throw new ArgumentNullException("trans"); - if (oldPath == null) throw new ArgumentNullException("oldPath"); - if (newPath == null) throw new ArgumentNullException("newPath"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (oldPath == null) throw new ArgumentNullException(nameof(oldPath)); + if (newPath == null) throw new ArgumentNullException(nameof(newPath)); - var oldLocation = FdbTuple.FromEnumerable(oldPath); + var oldLocation = STuple.FromEnumerable(oldPath); VerifyPath(oldLocation, "oldPath"); - var newLocation = FdbTuple.FromEnumerable(newPath); + var newLocation = STuple.FromEnumerable(newPath); VerifyPath(newLocation, "newPath"); return MoveInternalAsync(trans, oldLocation, newLocation, throwOnError: true); @@ -326,15 +330,16 @@ public Task MoveAsync([NotNull] IFdbTransaction trans, [No /// Transaction to use for the operation /// Path of the directory to move /// New path of the directory - public Task TryMoveAsync([NotNull] IFdbTransaction trans, [NotNull] IEnumerable oldPath, [NotNull] IEnumerable newPath) + [ItemCanBeNull] + public Task TryMoveAsync(IFdbTransaction trans, IEnumerable oldPath, IEnumerable newPath) { - if (trans == null) throw new ArgumentNullException("trans"); - if (oldPath == null) throw new ArgumentNullException("oldPath"); - if (newPath == null) throw new ArgumentNullException("newPath"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (oldPath == null) throw new ArgumentNullException(nameof(oldPath)); + if (newPath == null) throw new ArgumentNullException(nameof(newPath)); - var oldLocation = FdbTuple.FromEnumerable(oldPath); + var oldLocation = STuple.FromEnumerable(oldPath); VerifyPath(oldLocation, "oldPath"); - var newLocation = FdbTuple.FromEnumerable(newPath); + var newLocation = STuple.FromEnumerable(newPath); VerifyPath(newLocation, "newPath"); return MoveInternalAsync(trans, oldLocation, newLocation, throwOnError: false); @@ -365,8 +370,8 @@ Task IFdbDirectory.TryMoveToAsync(IFdbTransaction trans, I /// Path of the directory to remove (including any subdirectories) public Task RemoveAsync([NotNull] IFdbTransaction trans, [NotNull] IEnumerable path) { - if (trans == null) throw new ArgumentNullException("trans"); - if (path == null) throw new ArgumentNullException("path"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (path == null) throw new ArgumentNullException(nameof(path)); return RemoveInternalAsync(trans, ParsePath(path), throwIfMissing: true); } @@ -378,8 +383,8 @@ public Task RemoveAsync([NotNull] IFdbTransaction trans, [NotNull] IEnumerablePath of the directory to remove (including any subdirectories) public Task TryRemoveAsync([NotNull] IFdbTransaction trans, [NotNull] IEnumerable path) { - if (trans == null) throw new ArgumentNullException("trans"); - if (path == null) throw new ArgumentNullException("path"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (path == null) throw new ArgumentNullException(nameof(path)); var location = ParsePath(path); if (location.Count == 0) throw new NotSupportedException("Cannot remove a directory layer"); @@ -396,8 +401,8 @@ public Task TryRemoveAsync([NotNull] IFdbTransaction trans, [NotNull] IEnu /// Returns true if the directory exists, otherwise false. public Task ExistsAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNull] IEnumerable path) { - if (trans == null) throw new ArgumentNullException("trans"); - if (path == null) throw new ArgumentNullException("path"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (path == null) throw new ArgumentNullException(nameof(path)); // no reason to disallow checking for the root directory (could be used to check if a directory layer is initialized?) var location = ParsePath(path); @@ -413,36 +418,40 @@ public Task ExistsAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNull] /// Returns the list of subdirectories of directory at /// Transaction to use for the operation /// Path of the directory to list + [ItemNotNull] public Task> ListAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNull] IEnumerable path) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); return ListInternalAsync(trans, ParsePath(path), throwIfMissing: true); } /// Returns the list of subdirectories of the root directory /// Transaction to use for the operation + [ItemNotNull] public Task> ListAsync([NotNull] IFdbReadOnlyTransaction trans) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); - return ListInternalAsync(trans, FdbTuple.Empty, throwIfMissing: true); + return ListInternalAsync(trans, STuple.Empty, throwIfMissing: true); } /// Returns the list of subdirectories of directory at , if it exists. /// Transaction to use for the operation /// Path of the directory to list + [ItemCanBeNull] public Task> TryListAsync([NotNull] IFdbReadOnlyTransaction trans, IEnumerable path) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); return ListInternalAsync(trans, ParsePath(path), throwIfMissing: false); } + [ItemCanBeNull] public Task> TryListAsync([NotNull] IFdbReadOnlyTransaction trans) { - if (trans == null) throw new ArgumentNullException("trans"); - return ListInternalAsync(trans, FdbTuple.Empty, throwIfMissing: false); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + return ListInternalAsync(trans, STuple.Empty, throwIfMissing: false); } #endregion @@ -451,10 +460,11 @@ public Task> TryListAsync([NotNull] IFdbReadOnlyTransaction trans) /// Transaction to use for the operation /// Path of the directory to change /// New layer id of the directory + [ItemCanBeNull] public async Task ChangeLayerAsync([NotNull] IFdbTransaction trans, [NotNull] IEnumerable path, Slice newLayer) { - if (trans == null) throw new ArgumentNullException("trans"); - if (path == null) throw new ArgumentNullException("path"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (path == null) throw new ArgumentNullException(nameof(path)); var location = ParsePath(path); @@ -467,17 +477,17 @@ public async Task ChangeLayerAsync([NotNull] IFdbTransacti public override string ToString() { - return String.Format("DirectoryLayer(path={0}, contents={1}, nodes={2})", this.FullName, this.ContentSubspace.Key.ToAsciiOrHexaString(), this.NodeSubspace.Key.ToAsciiOrHexaString()); + return $"DirectoryLayer(path={this.FullName}, contents={this.ContentSubspace.GetPrefix():K}, nodes={this.NodeSubspace.GetPrefix():K})"; } #endregion #region Internal Helpers... - private struct Node + private readonly struct Node { - public Node(IFdbDynamicSubspace subspace, IFdbTuple path, IFdbTuple targetPath, Slice layer) + public Node(IDynamicKeySubspace subspace, ITuple path, ITuple targetPath, Slice layer) { this.Subspace = subspace; this.Path = path; @@ -485,14 +495,15 @@ public Node(IFdbDynamicSubspace subspace, IFdbTuple path, IFdbTuple targetPath, this.Layer = layer; } - public readonly IFdbDynamicSubspace Subspace; - public readonly IFdbTuple Path; - public readonly IFdbTuple TargetPath; - public Slice Layer; //PERF: readonly struct + public readonly IDynamicKeySubspace Subspace; + public readonly ITuple Path; + public readonly ITuple TargetPath; + public readonly Slice Layer; - public bool Exists { get { return this.Subspace != null; } } + public bool Exists => this.Subspace != null; - public IFdbTuple PartitionSubPath { [NotNull] get { return this.TargetPath.Substring(this.Path.Count); } } + [NotNull] + public ITuple PartitionSubPath => this.TargetPath.Substring(this.Path.Count); public bool IsInPartition(bool includeEmptySubPath) { @@ -501,16 +512,16 @@ public bool IsInPartition(bool includeEmptySubPath) } - private static void SetLayer([NotNull] IFdbTransaction trans, [NotNull] IFdbDynamicSubspace subspace, Slice layer) + private static void SetLayer([NotNull] IFdbTransaction trans, [NotNull] IDynamicKeySubspace subspace, Slice layer) { if (layer.IsNull) layer = Slice.Empty; trans.Set(subspace.Keys.Encode(LayerSuffix), layer); } [NotNull] - internal static IFdbTuple ParsePath(IEnumerable path, string argName = null) + internal static ITuple ParsePath(IEnumerable path, string argName = null) { - if (path == null) return FdbTuple.Empty; + if (path == null) return STuple.Empty; var pathCopy = path.ToArray(); foreach (var s in pathCopy) @@ -520,19 +531,11 @@ internal static IFdbTuple ParsePath(IEnumerable path, string argName = n throw new ArgumentException("The path of a directory cannot contain null elements", argName ?? "path"); } } - return FdbTuple.FromArray(pathCopy); - } - - [NotNull] - internal static IFdbTuple ParsePath([NotNull] string name, string argName = null) - { - if (name == null) throw new ArgumentNullException(argName ?? "name"); - - return FdbTuple.Create(name); + return STuple.FromArray(pathCopy); } [NotNull] - internal static IFdbTuple VerifyPath([NotNull] IFdbTuple path, string argName = null) + internal static ITuple VerifyPath([NotNull] ITuple path, string argName = null) { // The path should not contain any null strings if (path == null) throw new ArgumentNullException(argName ?? "path"); @@ -548,7 +551,7 @@ internal static IFdbTuple VerifyPath([NotNull] IFdbTuple path, string argName = } [NotNull] - internal IReadOnlyList ToAbsolutePath([NotNull] IFdbTuple path) + internal IReadOnlyList ToAbsolutePath([NotNull] ITuple path) { if (path.Count == 0) return this.Path; var converted = path.ToArray(); @@ -558,15 +561,16 @@ internal IReadOnlyList ToAbsolutePath([NotNull] IFdbTuple path) /// Maps an absolute path to a relative path within this directory layer [NotNull] - internal IFdbTuple ToRelativePath([NotNull] IFdbTuple path) + internal ITuple ToRelativePath([NotNull] ITuple path) { - if (path == null) throw new ArgumentNullException("path"); + if (path == null) throw new ArgumentNullException(nameof(path)); if (!path.StartsWith(this.Location)) throw new InvalidOperationException("The path cannot be outside of this partition"); return path.Substring(this.Location.Count); } - internal async Task CreateOrOpenInternalAsync(IFdbReadOnlyTransaction readTrans, IFdbTransaction trans, [NotNull] IFdbTuple path, Slice layer, Slice prefix, bool allowCreate, bool allowOpen, bool throwOnError) + [ItemCanBeNull] + internal async Task CreateOrOpenInternalAsync(IFdbReadOnlyTransaction readTrans, IFdbTransaction trans, [NotNull] ITuple path, Slice layer, Slice prefix, bool allowCreate, bool allowOpen, bool throwOnError) { Contract.Requires(readTrans != null || trans != null, "Need at least one transaction"); Contract.Requires(path != null, "Path must be specified"); @@ -599,20 +603,20 @@ internal async Task CreateOrOpenInternalAsync(IFdbReadOnly if (!allowOpen) { - if (throwOnError) throw new InvalidOperationException(string.Format("The directory {0} already exists.", path)); + if (throwOnError) throw new InvalidOperationException($"The directory {path} already exists."); return null; } if (layer.IsPresent && layer != existingNode.Layer) { - throw new InvalidOperationException(String.Format("The directory {0} was created with incompatible layer {1} instead of expected {2}.", path, layer.ToAsciiOrHexaString(), existingNode.Layer.ToAsciiOrHexaString())); + throw new InvalidOperationException($"The directory {path} was created with incompatible layer {layer:P} instead of expected {existingNode.Layer:P}."); } return ContentsOfNode(existingNode.Subspace, path, existingNode.Layer); } if (!allowCreate) { - if (throwOnError) throw new InvalidOperationException(string.Format("The directory {0} does not exist.", path)); + if (throwOnError) throw new InvalidOperationException($"The directory {path} does not exist."); return null; } @@ -621,16 +625,16 @@ internal async Task CreateOrOpenInternalAsync(IFdbReadOnly await CheckWriteVersionAsync(trans).ConfigureAwait(false); - if (prefix == null) + if (prefix.IsNull) { // automatically allocate a new prefix inside the ContentSubspace long id = await this.Allocator.AllocateAsync(trans).ConfigureAwait(false); prefix = this.ContentSubspace.Keys.Encode(id); // ensure that there is no data already present under this prefix if (FdbDirectoryLayer.AnnotateTransactions) trans.Annotate("Ensure that there is no data already present under prefix {0}", prefix); - if (await trans.GetRange(FdbKeyRange.StartsWith(prefix)).AnyAsync().ConfigureAwait(false)) + if (await trans.GetRange(KeyRange.StartsWith(prefix)).AnyAsync().ConfigureAwait(false)) { - throw new InvalidOperationException(String.Format("The database has keys stored at the prefix chosen by the automatic prefix allocator: {0}", prefix.ToAsciiOrHexaString())); + throw new InvalidOperationException($"The database has keys stored at the prefix chosen by the automatic prefix allocator: {prefix:K}"); } // ensure that the prefix has not already been allocated @@ -651,17 +655,17 @@ internal async Task CreateOrOpenInternalAsync(IFdbReadOnly } // we need to recursively create any missing parents - IFdbDynamicSubspace parentNode; + IDynamicKeySubspace parentNode; if (path.Count > 1) { var parentSubspace = await CreateOrOpenInternalAsync(readTrans, trans, path.Substring(0, path.Count - 1), Slice.Nil, Slice.Nil, true, true, true).ConfigureAwait(false); - parentNode = NodeWithPrefix(parentSubspace.Key); + parentNode = NodeWithPrefix(parentSubspace.GetPrefix()); } else { parentNode = this.RootNode; } - if (parentNode == null) throw new InvalidOperationException(string.Format("The parent directory of {0} doesn't exist.", path)); + if (parentNode == null) throw new InvalidOperationException($"The parent directory of {path} doesn't exist."); // initialize the metadata for this new directory var node = NodeWithPrefix(prefix); @@ -672,7 +676,8 @@ internal async Task CreateOrOpenInternalAsync(IFdbReadOnly return ContentsOfNode(node, path, layer); } - internal async Task MoveInternalAsync([NotNull] IFdbTransaction trans, [NotNull] IFdbTuple oldPath, [NotNull] IFdbTuple newPath, bool throwOnError) + [ItemCanBeNull] + internal async Task MoveInternalAsync([NotNull] IFdbTransaction trans, [NotNull] ITuple oldPath, [NotNull] ITuple newPath, bool throwOnError) { Contract.Requires(trans != null && oldPath != null && newPath != null); @@ -686,7 +691,7 @@ internal async Task MoveInternalAsync([NotNull] IFdbTransa } if (newPath.StartsWith(oldPath)) { - throw new InvalidOperationException(string.Format("The destination directory({0}) cannot be a subdirectory of the source directory({1}).", newPath, oldPath)); + throw new InvalidOperationException($"The destination directory({newPath}) cannot be a subdirectory of the source directory({oldPath})."); } await CheckWriteVersionAsync(trans).ConfigureAwait(false); @@ -694,7 +699,7 @@ internal async Task MoveInternalAsync([NotNull] IFdbTransa var oldNode = await FindAsync(trans, oldPath).ConfigureAwait(false); if (!oldNode.Exists) { - if (throwOnError) throw new InvalidOperationException(string.Format("The source directory '{0}' does not exist.", oldPath)); + if (throwOnError) throw new InvalidOperationException($"The source directory '{oldPath}' does not exist."); return null; } @@ -703,7 +708,7 @@ internal async Task MoveInternalAsync([NotNull] IFdbTransa // we have already checked that old and new are under this partition path, but one of them (or both?) could be under a sub-partition.. if (oldNode.IsInPartition(false) || newNode.IsInPartition(false)) { - if (!oldNode.IsInPartition(false) || !newNode.IsInPartition(false) || !FdbTuple.Equals(oldNode.Path, newNode.Path)) + if (!oldNode.IsInPartition(false) || !newNode.IsInPartition(false) || !STuple.Equals(oldNode.Path, newNode.Path)) { throw new InvalidOperationException("Cannot move between partitions."); } @@ -713,25 +718,25 @@ internal async Task MoveInternalAsync([NotNull] IFdbTransa if (newNode.Exists) { - if (throwOnError) throw new InvalidOperationException(string.Format("The destination directory '{0}' already exists. Remove it first.", newPath)); + if (throwOnError) throw new InvalidOperationException($"The destination directory '{newPath}' already exists. Remove it first."); return null; } var parentNode = await FindAsync(trans, newPath.Substring(0, newPath.Count - 1)).ConfigureAwait(false); if (!parentNode.Exists) { - if (throwOnError) throw new InvalidOperationException(string.Format("The parent of the destination directory '{0}' does not exist. Create it first.", newPath)); + if (throwOnError) throw new InvalidOperationException($"The parent of the destination directory '{newPath}' does not exist. Create it first."); return null; } - if (FdbDirectoryLayer.AnnotateTransactions) trans.Annotate("Register the prefix {0} to its new location in the folder sub-tree", oldNode.Subspace.Key); - trans.Set(GetSubDirKey(parentNode.Subspace, newPath.Get(-1)), this.NodeSubspace.Keys.Decode(oldNode.Subspace.Key)); + if (FdbDirectoryLayer.AnnotateTransactions) trans.Annotate("Register the prefix {0} to its new location in the folder sub-tree", oldNode.Subspace.GetPrefix()); + trans.Set(GetSubDirKey(parentNode.Subspace, newPath.Get(-1)), this.NodeSubspace.Keys.Decode(oldNode.Subspace.GetPrefix())); await RemoveFromParent(trans, oldPath).ConfigureAwait(false); return ContentsOfNode(oldNode.Subspace, newPath, oldNode.Layer); } - internal async Task RemoveInternalAsync([NotNull] IFdbTransaction trans, [NotNull] IFdbTuple path, bool throwIfMissing) + internal async Task RemoveInternalAsync([NotNull] IFdbTransaction trans, [NotNull] ITuple path, bool throwIfMissing) { Contract.Requires(trans != null && path != null); @@ -743,7 +748,7 @@ internal async Task RemoveInternalAsync([NotNull] IFdbTransaction trans, [ var n = await FindAsync(trans, path).ConfigureAwait(false); if (!n.Exists) { - if (throwIfMissing) throw new InvalidOperationException(string.Format("The directory '{0}' does not exist.", path)); + if (throwIfMissing) throw new InvalidOperationException($"The directory '{path}' does not exist."); return false; } @@ -761,7 +766,8 @@ internal async Task RemoveInternalAsync([NotNull] IFdbTransaction trans, [ return true; } - internal async Task> ListInternalAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNull] IFdbTuple path, bool throwIfMissing) + [ItemCanBeNull] + internal async Task> ListInternalAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNull] ITuple path, bool throwIfMissing) { Contract.Requires(trans != null && path != null); @@ -771,7 +777,7 @@ internal async Task> ListInternalAsync([NotNull] IFdbReadOnlyTransa if (!node.Exists) { - if (throwIfMissing) throw new InvalidOperationException(string.Format("The directory '{0}' does not exist.", path)); + if (throwIfMissing) throw new InvalidOperationException($"The directory '{path}' does not exist."); return null; } @@ -786,7 +792,7 @@ internal async Task> ListInternalAsync([NotNull] IFdbReadOnlyTransa .ConfigureAwait(false); } - internal async Task ExistsInternalAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNull] IFdbTuple path) + internal async Task ExistsInternalAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNull] ITuple path) { Contract.Requires(trans != null && path != null); @@ -804,7 +810,7 @@ internal async Task ExistsInternalAsync([NotNull] IFdbReadOnlyTransaction return true; } - internal async Task ChangeLayerInternalAsync([NotNull] IFdbTransaction trans, [NotNull] IFdbTuple path, Slice newLayer) + internal async Task ChangeLayerInternalAsync([NotNull] IFdbTransaction trans, [NotNull] ITuple path, Slice newLayer) { Contract.Requires(trans != null && path != null); @@ -814,7 +820,7 @@ internal async Task ChangeLayerInternalAsync([NotNull] IFdbTransaction trans, [N if (!node.Exists) { - throw new InvalidOperationException(string.Format("The directory '{0}' does not exist, or as already been removed.", path)); + throw new InvalidOperationException($"The directory '{path}' does not exist, or as already been removed."); } if (node.IsInPartition(includeEmptySubPath: false)) @@ -860,8 +866,8 @@ private static void CheckVersion(Slice value, bool writeAccess) var minor = reader.ReadFixed32(); var upgrade = reader.ReadFixed32(); - if (major > LayerVersion.Major) throw new InvalidOperationException(String.Format("Cannot load directory with version {0}.{1}.{2} using directory layer {3}", major, minor, upgrade, LayerVersion)); - if (writeAccess && minor > LayerVersion.Minor) throw new InvalidOperationException(String.Format("Directory with version {0}.{1}.{2} is read-only when opened using directory layer {3}", major, minor, upgrade, LayerVersion)); + if (major > LayerVersion.Major) throw new InvalidOperationException($"Cannot load directory with version {major}.{minor}.{upgrade} using directory layer {FdbDirectoryLayer.LayerVersion}"); + if (writeAccess && minor > LayerVersion.Minor) throw new InvalidOperationException($"Directory with version {major}.{minor}.{upgrade} is read-only when opened using directory layer {FdbDirectoryLayer.LayerVersion}"); } private void InitializeDirectory([NotNull] IFdbTransaction trans) @@ -876,7 +882,8 @@ private void InitializeDirectory([NotNull] IFdbTransaction trans) trans.Set(this.RootNode.Keys.Encode(VersionKey), writer.ToSlice()); } - private async Task NodeContainingKey([NotNull] IFdbReadOnlyTransaction tr, Slice key) + [ItemCanBeNull] + private async Task NodeContainingKey([NotNull] IFdbReadOnlyTransaction tr, Slice key) { Contract.Requires(tr != null); @@ -909,7 +916,7 @@ private async Task NodeContainingKey([NotNull] IFdbReadOnlyTransac /// Returns the subspace to a node metadata, given its prefix [CanBeNull] - private IFdbDynamicSubspace NodeWithPrefix(Slice prefix) + private IDynamicKeySubspace NodeWithPrefix(Slice prefix) { if (prefix.IsNullOrEmpty) return null; return this.NodeSubspace.Partition.ByKey(prefix); @@ -917,19 +924,19 @@ private IFdbDynamicSubspace NodeWithPrefix(Slice prefix) /// Returns a new Directory Subspace given its node subspace, path and layer id [NotNull] - private FdbDirectorySubspace ContentsOfNode([NotNull] IFdbSubspace node, [NotNull] IFdbTuple relativePath, Slice layer) + private FdbDirectorySubspace ContentsOfNode([NotNull] IKeySubspace node, [NotNull] ITuple relativePath, Slice layer) { Contract.Requires(node != null); var path = this.Location.Concat(relativePath); - var prefix = this.NodeSubspace.Keys.Decode(node.Key); + var prefix = this.NodeSubspace.Keys.Decode(node.GetPrefix()); if (layer == FdbDirectoryPartition.LayerId) { - return new FdbDirectoryPartition(path, relativePath, prefix, this); + return new FdbDirectoryPartition(path, relativePath, prefix, this, TuPack.Encoding); } else { - return new FdbDirectorySubspace(path, relativePath, prefix, this, layer, TypeSystem.Default.GetDynamicEncoder()); + return new FdbDirectorySubspace(path, relativePath, prefix, this, layer, TuPack.Encoding); } } @@ -942,7 +949,7 @@ private FdbDirectoryPartition GetPartitionForNode(Node node) /// Finds a node subspace, given its path, by walking the tree from the root. /// Node if it was found, or null - private async Task FindAsync([NotNull] IFdbReadOnlyTransaction tr, [NotNull] IFdbTuple path) + private async Task FindAsync([NotNull] IFdbReadOnlyTransaction tr, [NotNull] ITuple path) { Contract.Requires(tr != null && path != null); @@ -953,14 +960,14 @@ private async Task FindAsync([NotNull] IFdbReadOnlyTransaction tr, [NotNul Slice layer = Slice.Nil; while (i < path.Count) { - if (FdbDirectoryLayer.AnnotateTransactions) tr.Annotate("Looking for child {0} under node {1}...", path.Get(i), n.Key); + if (FdbDirectoryLayer.AnnotateTransactions) tr.Annotate("Looking for child {0} under node {1}...", path.Get(i), n.GetPrefix()); n = NodeWithPrefix(await tr.GetAsync(GetSubDirKey(n, path.Get(i))).ConfigureAwait(false)); if (n == null) { return new Node(null, path.Substring(0, i + 1), path, Slice.Empty); } - if (FdbDirectoryLayer.AnnotateTransactions) tr.Annotate("Reading Layer value for subfolder {0} found at {1}", path, n.Key); + if (FdbDirectoryLayer.AnnotateTransactions) tr.Annotate("Reading Layer value for subfolder {0} found at {1}", path, n.GetPrefix()); layer = await tr.GetAsync(n.Keys.Encode(LayerSuffix)).ConfigureAwait(false); if (layer == FdbDirectoryPartition.LayerId) { // stop when reaching a partition @@ -974,14 +981,14 @@ private async Task FindAsync([NotNull] IFdbReadOnlyTransaction tr, [NotNul /// Returns the list of names and nodes of all children of the specified node [NotNull] - private IFdbAsyncEnumerable> SubdirNamesAndNodes([NotNull] IFdbReadOnlyTransaction tr, [NotNull] IFdbDynamicSubspace node) + private IAsyncEnumerable> SubdirNamesAndNodes([NotNull] IFdbReadOnlyTransaction tr, [NotNull] IDynamicKeySubspace node) { Contract.Requires(tr != null && node != null); var sd = node.Partition.ByKey(SUBDIRS); return tr .GetRange(sd.Keys.ToRange()) - .Select(kvp => new KeyValuePair( + .Select(kvp => new KeyValuePair( sd.Keys.Decode(kvp.Key), NodeWithPrefix(kvp.Value) )); @@ -989,14 +996,14 @@ private IFdbAsyncEnumerable> SubdirNam /// Remove an existing node from its parents /// True if the parent node was found, otherwise false - private async Task RemoveFromParent([NotNull] IFdbTransaction tr, [NotNull] IFdbTuple path) + private async Task RemoveFromParent([NotNull] IFdbTransaction tr, [NotNull] ITuple path) { Contract.Requires(tr != null && path != null); var parent = await FindAsync(tr, path.Substring(0, path.Count - 1)).ConfigureAwait(false); if (parent.Exists) { - if (FdbDirectoryLayer.AnnotateTransactions) tr.Annotate("Removing path {0} from its parent folder at {1}", path, parent.Subspace.Key); + if (FdbDirectoryLayer.AnnotateTransactions) tr.Annotate("Removing path {0} from its parent folder at {1}", path, parent.Subspace.GetPrefix()); tr.Clear(GetSubDirKey(parent.Subspace, path.Get(-1))); return true; } @@ -1004,7 +1011,7 @@ private async Task RemoveFromParent([NotNull] IFdbTransaction tr, [NotNull } /// Resursively remove a node (including the content), all its children - private async Task RemoveRecursive([NotNull] IFdbTransaction tr, [NotNull] IFdbDynamicSubspace node) + private async Task RemoveRecursive([NotNull] IFdbTransaction tr, [NotNull] IDynamicKeySubspace node) { Contract.Requires(tr != null && node != null); @@ -1012,10 +1019,10 @@ private async Task RemoveRecursive([NotNull] IFdbTransaction tr, [NotNull] IFdbD await SubdirNamesAndNodes(tr, node).ForEachAsync((kvp) => RemoveRecursive(tr, kvp.Value)).ConfigureAwait(false); // remove ALL the contents - if (FdbDirectoryLayer.AnnotateTransactions) tr.Annotate("Removing all content located under {0}", node.Key); - tr.ClearRange(ContentsOfNode(node, FdbTuple.Empty, Slice.Empty).ToRange()); + if (FdbDirectoryLayer.AnnotateTransactions) tr.Annotate("Removing all content located under {0}", node.GetPrefix()); + tr.ClearRange(ContentsOfNode(node, STuple.Empty, Slice.Empty).ToRange()); // and all the metadata for this folder - if (FdbDirectoryLayer.AnnotateTransactions) tr.Annotate("Removing all metadata for folder under {0}", node.Key); + if (FdbDirectoryLayer.AnnotateTransactions) tr.Annotate("Removing all metadata for folder under {0}", node.GetPrefix()); tr.ClearRange(node.Keys.ToRange()); } @@ -1039,7 +1046,7 @@ private async Task IsPrefixFree([NotNull] IFdbReadOnlyTransaction tr, Slic .ConfigureAwait(false); } - private static Slice GetSubDirKey([NotNull] IFdbDynamicSubspace parent, [NotNull] string path) + private static Slice GetSubDirKey([NotNull] IDynamicKeySubspace parent, [NotNull] string path) { Contract.Requires(parent != null && path != null); @@ -1056,9 +1063,9 @@ private static Slice GetSubDirKey([NotNull] IFdbDynamicSubspace parent, [NotNull /// Tuple that should only contain strings /// Array of strings [NotNull] - public static string[] ParsePath([NotNull] IFdbTuple path) + public static string[] ParsePath([NotNull] ITuple path) { - if (path == null) throw new ArgumentNullException("path"); + if (path == null) throw new ArgumentNullException(nameof(path)); var tmp = new string[path.Count]; for (int i = 0; i < tmp.Length; i++) { @@ -1070,23 +1077,23 @@ public static string[] ParsePath([NotNull] IFdbTuple path) [NotNull] public static string[] Combine([NotNull] IEnumerable parent, string path) { - if (parent == null) throw new ArgumentNullException("parent"); + if (parent == null) throw new ArgumentNullException(nameof(parent)); return parent.Concat(new[] { path }).ToArray(); } [NotNull] public static string[] Combine(IEnumerable parent, params string[] paths) { - if (parent == null) throw new ArgumentNullException("parent"); - if (paths == null) throw new ArgumentNullException("paths"); + if (parent == null) throw new ArgumentNullException(nameof(parent)); + if (paths == null) throw new ArgumentNullException(nameof(paths)); return parent.Concat(paths).ToArray(); } [NotNull] public static string[] Combine([NotNull] IEnumerable parent, [NotNull] IEnumerable paths) { - if (parent == null) throw new ArgumentNullException("parent"); - if (paths == null) throw new ArgumentNullException("paths"); + if (parent == null) throw new ArgumentNullException(nameof(parent)); + if (paths == null) throw new ArgumentNullException(nameof(paths)); return parent.Concat(paths).ToArray(); } @@ -1141,7 +1148,7 @@ public static string[] Parse(string path) [NotNull] public static string FormatPath([NotNull] IEnumerable paths) { - if (paths == null) throw new ArgumentNullException("paths"); + if (paths == null) throw new ArgumentNullException(nameof(paths)); return String.Join("/", paths.Select(path => { diff --git a/FoundationDB.Client/Layers/Directories/FdbDirectoryPartition.cs b/FoundationDB.Client/Layers/Directories/FdbDirectoryPartition.cs index 7ccc2fd6a..d183be738 100644 --- a/FoundationDB.Client/Layers/Directories/FdbDirectoryPartition.cs +++ b/FoundationDB.Client/Layers/Directories/FdbDirectoryPartition.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,41 +28,47 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Directories { - using FoundationDB.Client; - using FoundationDB.Layers.Tuples; using System; + using Doxense.Collections.Tuples; + using Doxense.Serialization.Encoders; + using FoundationDB.Client; + using JetBrains.Annotations; + public class FdbDirectoryPartition : FdbDirectorySubspace { /// Returns a slice with the ASCII string "partition" - public static Slice LayerId { get { return Slice.FromString("partition"); } } - - private readonly FdbDirectoryLayer m_parentDirectoryLayer; + public static Slice LayerId => Slice.FromString("partition"); - internal FdbDirectoryPartition(IFdbTuple location, IFdbTuple relativeLocation, Slice prefix, FdbDirectoryLayer directoryLayer) - : base(location, relativeLocation, prefix, new FdbDirectoryLayer(FdbSubspace.CreateDynamic(prefix + FdbKey.Directory, TypeSystem.Tuples), FdbSubspace.CreateDynamic(prefix, TypeSystem.Tuples), location), LayerId, TypeSystem.Tuples.GetDynamicEncoder()) + internal FdbDirectoryPartition([NotNull] ITuple location, [NotNull] ITuple relativeLocation, Slice prefix, [NotNull] FdbDirectoryLayer directoryLayer, [NotNull] IKeyEncoding keyEncoding) + : base(location, relativeLocation, prefix, new FdbDirectoryLayer(FromKey(prefix + FdbKey.Directory).AsDynamic(keyEncoding), FromKey(prefix).AsDynamic(keyEncoding), location), LayerId, keyEncoding) { - m_parentDirectoryLayer = directoryLayer; + this.ParentDirectoryLayer = directoryLayer; } - internal FdbDirectoryLayer ParentDirectoryLayer { get { return m_parentDirectoryLayer; } } + internal FdbDirectoryLayer ParentDirectoryLayer { get; } protected override Slice GetKeyPrefix() { throw new InvalidOperationException("Cannot create keys in the root of a directory partition."); } + protected override KeyRange GetKeyRange() + { + throw new InvalidOperationException("Cannot create a key range in the root of a directory partition."); + } + public override bool Contains(Slice key) { throw new InvalidOperationException("Cannot check whether a key belongs to the root of a directory partition."); } - protected override IFdbTuple ToRelativePath(IFdbTuple location) + protected override ITuple ToRelativePath(ITuple location) { - return location ?? FdbTuple.Empty; + return location ?? STuple.Empty; } - protected override FdbDirectoryLayer GetLayerForPath(IFdbTuple relativeLocation) + protected override FdbDirectoryLayer GetLayerForPath(ITuple relativeLocation) { if (relativeLocation.Count == 0) { // Forward all actions on the Partition itself (empty path) to its parent's DL @@ -76,7 +82,7 @@ protected override FdbDirectoryLayer GetLayerForPath(IFdbTuple relativeLocation) public override string ToString() { - return String.Format("DirectoryPartition(path={0}, prefix={1})", this.FullName, this.InternalKey.ToAsciiOrHexaString()); + return $"DirectoryPartition(path={this.FullName}, prefix={GetPrefixUnsafe():K})"; } } diff --git a/FoundationDB.Client/Layers/Directories/FdbDirectorySubspace.cs b/FoundationDB.Client/Layers/Directories/FdbDirectorySubspace.cs index 205804016..045abe205 100644 --- a/FoundationDB.Client/Layers/Directories/FdbDirectorySubspace.cs +++ b/FoundationDB.Client/Layers/Directories/FdbDirectorySubspace.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,23 +28,24 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Directories { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using FoundationDB.Layers.Tuples; - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.Diagnostics; using System.Threading.Tasks; + using Doxense.Collections.Tuples; + using Doxense.Diagnostics.Contracts; + using Doxense.Serialization.Encoders; + using FoundationDB.Client; + using JetBrains.Annotations; /// A Directory Subspace represents the contents of a directory, but it also remembers the path with which it was opened and offers convenience methods to operate on the directory at that path. /// An instance of DirectorySubspace can be used for all the usual subspace operations. It can also be used to operate on the directory with which it was opened. - [DebuggerDisplay("Path={this.FullName}, Prefix={InternalKey}, Layer={Layer}")] - public class FdbDirectorySubspace : FdbDynamicSubspace, IFdbDirectory + [DebuggerDisplay("Path={this.FullName}, Prefix={Key}, Layer={Layer}")] + public class FdbDirectorySubspace : DynamicKeySubspace, IFdbDirectory { - internal FdbDirectorySubspace(IFdbTuple location, IFdbTuple relativeLocation, Slice prefix, FdbDirectoryLayer directoryLayer, Slice layer, IDynamicKeyEncoder encoder) - : base(prefix, encoder) + internal FdbDirectorySubspace([NotNull] ITuple location, [NotNull] ITuple relativeLocation, Slice prefix, [NotNull] FdbDirectoryLayer directoryLayer, Slice layer, [NotNull] IKeyEncoding encoding) + : base(prefix, encoding) { Contract.Requires(location != null && relativeLocation != null && prefix != null && directoryLayer != null); if (layer.IsNull) layer = Slice.Empty; @@ -60,10 +61,10 @@ internal FdbDirectorySubspace(IFdbTuple location, IFdbTuple relativeLocation, Sl } /// Absolute location of the directory - protected IFdbTuple Location { [NotNull] get; private set; } + protected ITuple Location { [NotNull] get; private set; } /// Location of the directory relative to its parent Directory Layer - protected IFdbTuple RelativeLocation { [NotNull] get; private set; } + protected ITuple RelativeLocation { [NotNull] get; private set; } /// Absolute path of this directory public IReadOnlyList Path { [NotNull] get; private set; } @@ -90,7 +91,7 @@ public string FullName /// Return the DirectoryLayer instance that should be called for the given path /// Location relative to this directory subspace - protected virtual FdbDirectoryLayer GetLayerForPath(IFdbTuple relativeLocation) + protected virtual FdbDirectoryLayer GetLayerForPath(ITuple relativeLocation) { // for regular directories, always returns its DL. return this.DirectoryLayer; @@ -100,7 +101,7 @@ protected virtual FdbDirectoryLayer GetLayerForPath(IFdbTuple relativeLocation) /// Path relative from this directory /// Path relative to the path of the current partition [NotNull] - protected virtual IFdbTuple ToRelativePath(IFdbTuple location) + protected virtual ITuple ToRelativePath(ITuple location) { return location == null ? this.RelativeLocation : this.RelativeLocation.Concat(location); } @@ -109,9 +110,9 @@ protected virtual IFdbTuple ToRelativePath(IFdbTuple location) /// Path relative from this directory /// Path relative to the path of the current partition [NotNull] - protected IFdbTuple ToRelativePath(IEnumerable path) + protected ITuple ToRelativePath(IEnumerable path) { - return ToRelativePath(path == null ? null : FdbTuple.FromEnumerable(path)); + return ToRelativePath(path == null ? null : STuple.FromEnumerable(path)); } /// Ensure that this directory was registered with the correct layer id @@ -121,16 +122,17 @@ public void CheckLayer(Slice layer) { if (layer.IsPresent && layer != this.Layer) { - throw new InvalidOperationException(String.Format("The directory {0} was created with incompatible layer {1} instead of expected {2}.", this.FullName, this.Layer.ToAsciiOrHexaString(), layer.ToAsciiOrHexaString())); + throw new InvalidOperationException($"The directory {this.FullName} was created with incompatible layer {this.Layer:P} instead of expected {layer:P}."); } } /// Change the layer id of this directory /// Transaction to use for the operation /// New layer id of this directory - public async Task ChangeLayerAsync([NotNull] IFdbTransaction trans, Slice newLayer) + [ItemNotNull] + public async Task ChangeLayerAsync(IFdbTransaction trans, Slice newLayer) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); if (newLayer.IsNull) newLayer = Slice.Empty; if (this.RelativeLocation.Count == 0) @@ -151,7 +153,7 @@ public async Task ChangeLayerAsync([NotNull] IFdbTransacti // set the layer to the new value await this.DirectoryLayer.ChangeLayerInternalAsync(trans, this.RelativeLocation, newLayer).ConfigureAwait(false); // and return the new version of the subspace - return new FdbDirectorySubspace(this.Location, this.RelativeLocation, this.InternalKey, this.DirectoryLayer, newLayer, TypeSystem.Default.GetDynamicEncoder()); + return new FdbDirectorySubspace(this.Location, this.RelativeLocation, GetKeyPrefix(), this.DirectoryLayer, newLayer, this.Encoding); } /// Opens a subdirectory with the given . @@ -160,10 +162,10 @@ public async Task ChangeLayerAsync([NotNull] IFdbTransacti /// Transaction to use for the operation /// Relative path of the subdirectory to create or open /// If is specified, it is checked against the layer of an existing subdirectory or set as the layer of a new subdirectory. - public Task CreateOrOpenAsync([NotNull] IFdbTransaction trans, [NotNull] IEnumerable path, Slice layer = default(Slice)) + public Task CreateOrOpenAsync(IFdbTransaction trans, IEnumerable path, Slice layer = default(Slice)) { - if (trans == null) throw new ArgumentNullException("trans"); - if (path == null) throw new ArgumentNullException("path"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (path == null) throw new ArgumentNullException(nameof(path)); return this.DirectoryLayer.CreateOrOpenInternalAsync(null, trans, ToRelativePath(path), layer, Slice.Nil, allowCreate: true, allowOpen: true, throwOnError: true); } @@ -174,10 +176,10 @@ public async Task ChangeLayerAsync([NotNull] IFdbTransacti /// Transaction to use for the operation /// Relative path of the subdirectory to open /// If specified, the opened directory must have the same layer id. - public Task OpenAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNull] IEnumerable path, Slice layer = default(Slice)) + public Task OpenAsync(IFdbReadOnlyTransaction trans, IEnumerable path, Slice layer = default(Slice)) { - if (trans == null) throw new ArgumentNullException("trans"); - if (path == null) throw new ArgumentNullException("path"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (path == null) throw new ArgumentNullException(nameof(path)); return this.DirectoryLayer.CreateOrOpenInternalAsync(trans, null, ToRelativePath(path), layer, prefix: Slice.Nil, allowCreate: false, allowOpen: true, throwOnError: true); } @@ -188,10 +190,10 @@ public async Task ChangeLayerAsync([NotNull] IFdbTransacti /// Relative path of the subdirectory to open /// If specified, the opened directory must have the same layer id. /// Returns the directory if it exists, or null if it was not found - public Task TryOpenAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNull] IEnumerable path, Slice layer = default(Slice)) + public Task TryOpenAsync(IFdbReadOnlyTransaction trans, IEnumerable path, Slice layer = default(Slice)) { - if (trans == null) throw new ArgumentNullException("trans"); - if (path == null) throw new ArgumentNullException("path"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (path == null) throw new ArgumentNullException(nameof(path)); return this.DirectoryLayer.CreateOrOpenInternalAsync(trans, null, ToRelativePath(path), layer, prefix: Slice.Nil, allowCreate: false, allowOpen: true, throwOnError: false); } @@ -201,10 +203,10 @@ public async Task ChangeLayerAsync([NotNull] IFdbTransacti /// Transaction to use for the operation /// Relative path of the subdirectory to create /// If is specified, it is recorded with the subdirectory and will be checked by future calls to open. - public Task CreateAsync([NotNull] IFdbTransaction trans, [NotNull] IEnumerable path, Slice layer = default(Slice)) + public Task CreateAsync(IFdbTransaction trans, IEnumerable path, Slice layer = default(Slice)) { - if (trans == null) throw new ArgumentNullException("trans"); - if (path == null) throw new ArgumentNullException("path"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (path == null) throw new ArgumentNullException(nameof(path)); return this.DirectoryLayer.CreateOrOpenInternalAsync(null, trans, ToRelativePath(path), layer, prefix: Slice.Nil, allowCreate: true, allowOpen: false, throwOnError: true); } @@ -214,10 +216,10 @@ public async Task ChangeLayerAsync([NotNull] IFdbTransacti /// Transaction to use for the operation /// Relative path of the subdirectory to create /// If is specified, it is recorded with the subdirectory and will be checked by future calls to open. - public Task TryCreateAsync([NotNull] IFdbTransaction trans, [NotNull] IEnumerable path, Slice layer = default(Slice)) + public Task TryCreateAsync(IFdbTransaction trans, IEnumerable path, Slice layer = default(Slice)) { - if (trans == null) throw new ArgumentNullException("trans"); - if (path == null) throw new ArgumentNullException("path"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (path == null) throw new ArgumentNullException(nameof(path)); return this.DirectoryLayer.CreateOrOpenInternalAsync(null, trans, ToRelativePath(path), layer, prefix: Slice.Nil, allowCreate: true, allowOpen: false, throwOnError: false); } @@ -226,10 +228,10 @@ public async Task ChangeLayerAsync([NotNull] IFdbTransacti /// Path of the directory to create /// If is specified, it is recorded with the directory and will be checked by future calls to open. /// The directory will be created with the given physical prefix; otherwise a prefix is allocated automatically. - public Task RegisterAsync([NotNull] IFdbTransaction trans, [NotNull] IEnumerable path, Slice layer, Slice prefix) + public Task RegisterAsync(IFdbTransaction trans, IEnumerable path, Slice layer, Slice prefix) { - if (trans == null) throw new ArgumentNullException("trans"); - if (path == null) throw new ArgumentNullException("path"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (path == null) throw new ArgumentNullException(nameof(path)); return this.DirectoryLayer.CreateOrOpenInternalAsync(null, trans, ToRelativePath(path), layer, prefix: prefix, allowCreate: true, allowOpen: false, throwOnError: true); } @@ -239,13 +241,13 @@ public Task RegisterAsync([NotNull] IFdbTransaction trans, /// /// Transaction to use for the operation /// Full path (from the root) where this directory will be moved - public Task MoveToAsync([NotNull] IFdbTransaction trans, [NotNull] IEnumerable newAbsolutePath) + public Task MoveToAsync(IFdbTransaction trans, IEnumerable newAbsolutePath) { - if (trans == null) throw new ArgumentNullException("trans"); - if (newAbsolutePath == null) throw new ArgumentNullException("newAbsolutePath"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (newAbsolutePath == null) throw new ArgumentNullException(nameof(newAbsolutePath)); // if 'this' is a Directory Partition, we need to move it via the parent DL ! - var directoryLayer = GetLayerForPath(FdbTuple.Empty); + var directoryLayer = GetLayerForPath(STuple.Empty); // verify that it is still inside the same partition var location = FdbDirectoryLayer.ParsePath(newAbsolutePath, "newAbsolutePath"); @@ -273,13 +275,13 @@ Task IFdbDirectory.MoveAsync(IFdbTransaction trans, IEnume /// /// Transaction to use for the operation /// Full path (from the root) where this directory will be moved - public Task TryMoveToAsync([NotNull] IFdbTransaction trans, [NotNull] IEnumerable newPath) + public Task TryMoveToAsync(IFdbTransaction trans, IEnumerable newPath) { - if (trans == null) throw new ArgumentNullException("trans"); - if (newPath == null) throw new ArgumentNullException("newPath"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (newPath == null) throw new ArgumentNullException(nameof(newPath)); // if 'this' is a Directory Partition, we need to move it via the parent DL ! - var directoryLayer = GetLayerForPath(FdbTuple.Empty); + var directoryLayer = GetLayerForPath(STuple.Empty); var location = FdbDirectoryLayer.ParsePath(newPath, "newPath"); if (!location.StartsWith(directoryLayer.Location)) throw new InvalidOperationException("Cannot move between partitions."); @@ -307,10 +309,10 @@ Task IFdbDirectory.TryMoveAsync(IFdbTransaction trans, IEn /// Transaction to use for the operation public Task RemoveAsync([NotNull] IFdbTransaction trans) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); // if 'this' is a Directory Partition, we need to remove it from the parent DL ! - var directoryLayer = GetLayerForPath(FdbTuple.Empty); + var directoryLayer = GetLayerForPath(STuple.Empty); return directoryLayer.RemoveInternalAsync(trans, this.RelativeLocation, throwIfMissing: true); } @@ -320,9 +322,9 @@ public Task RemoveAsync([NotNull] IFdbTransaction trans) /// /// Transaction to use for the operation /// Path of the sub-directory to remove (relative to this directory) - public Task RemoveAsync([NotNull] IFdbTransaction trans, IEnumerable path) + public Task RemoveAsync(IFdbTransaction trans, IEnumerable path) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); // If path is empty, we are removing ourselves! var location = FdbDirectoryLayer.ParsePath(path, "path"); @@ -340,10 +342,10 @@ public Task RemoveAsync([NotNull] IFdbTransaction trans, IEnumerable pat /// Transaction to use for the operation public Task TryRemoveAsync([NotNull] IFdbTransaction trans) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); // if 'this' is a Directory Partition, we need to remove it from the parent DL ! - var directoryLayer = GetLayerForPath(FdbTuple.Empty); + var directoryLayer = GetLayerForPath(STuple.Empty); return directoryLayer.RemoveInternalAsync(trans, this.RelativeLocation, throwIfMissing: false); } @@ -353,9 +355,9 @@ public Task TryRemoveAsync([NotNull] IFdbTransaction trans) /// /// Transaction to use for the operation /// Path of the sub-directory to remove (relative to this directory) - public Task TryRemoveAsync([NotNull] IFdbTransaction trans, IEnumerable path) + public Task TryRemoveAsync(IFdbTransaction trans, IEnumerable path) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); // If path is empty, we are removing ourselves! var location = FdbDirectoryLayer.ParsePath(path, "path"); @@ -371,19 +373,19 @@ public Task TryRemoveAsync([NotNull] IFdbTransaction trans, IEnumerableReturns true if the directory exists, otherwise false. public Task ExistsAsync([NotNull] IFdbReadOnlyTransaction trans) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); // if 'this' is a Directory Partition, we need to remove it from the parent DL ! - var directoryLayer = GetLayerForPath(FdbTuple.Empty); + var directoryLayer = GetLayerForPath(STuple.Empty); return directoryLayer.ExistsInternalAsync(trans, this.RelativeLocation); } /// Checks if a sub-directory exists /// Returns true if the directory exists, otherwise false. - public Task ExistsAsync([NotNull] IFdbReadOnlyTransaction trans, IEnumerable path) + public Task ExistsAsync(IFdbReadOnlyTransaction trans, IEnumerable path) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); // If path is empty, we are checking ourselves! var location = FdbDirectoryLayer.ParsePath(path, "path"); @@ -398,35 +400,34 @@ public Task ExistsAsync([NotNull] IFdbReadOnlyTransaction trans, IEnumerab /// Returns the list of all the subdirectories of the current directory. public Task> ListAsync([NotNull] IFdbReadOnlyTransaction trans) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); return this.DirectoryLayer.ListInternalAsync(trans, this.RelativeLocation, throwIfMissing: true); } /// Returns the list of all the subdirectories of a sub-directory. - public Task> ListAsync([NotNull] IFdbReadOnlyTransaction trans, IEnumerable path) + public Task> ListAsync(IFdbReadOnlyTransaction trans, IEnumerable path) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); return this.DirectoryLayer.ListInternalAsync(trans, ToRelativePath(path), throwIfMissing: true); } /// Returns the list of all the subdirectories of a sub-directory, it it exists. public Task> TryListAsync([NotNull] IFdbReadOnlyTransaction trans) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); return this.DirectoryLayer.ListInternalAsync(trans, this.RelativeLocation, throwIfMissing: false); } /// Returns the list of all the subdirectories of the current directory, it it exists. - public Task> TryListAsync([NotNull] IFdbReadOnlyTransaction trans, IEnumerable path) + public Task> TryListAsync(IFdbReadOnlyTransaction trans, IEnumerable path) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); return this.DirectoryLayer.ListInternalAsync(trans, ToRelativePath(path), throwIfMissing: false); } public override string DumpKey(Slice key) { - string str = base.DumpKey(key); - return String.Format("[/{0}]:{1}", this.FullName, str); + return $"[/{this.FullName}]:{base.DumpKey(key)}"; } /// Returns a user-friendly description of this directory @@ -434,11 +435,11 @@ public override string ToString() { if (this.Layer.IsNullOrEmpty) { - return String.Format("DirectorySubspace(path={0}, prefix={1})", this.FullName, this.InternalKey.ToAsciiOrHexaString()); + return $"DirectorySubspace(path={this.FullName}, prefix={GetPrefixUnsafe():K})"; } else { - return String.Format("DirectorySubspace(path={0}, prefix={1}, layer={2})", this.FullName, this.InternalKey.ToAsciiOrHexaString(), this.Layer.ToAsciiOrHexaString()); + return $"DirectorySubspace(path={this.FullName}, prefix={GetPrefixUnsafe():K}, layer={this.Layer:P})"; } } diff --git a/FoundationDB.Client/Layers/Directories/FdbHighContentionAllocator.cs b/FoundationDB.Client/Layers/Directories/FdbHighContentionAllocator.cs index cf9b268cc..07918ff8d 100644 --- a/FoundationDB.Client/Layers/Directories/FdbHighContentionAllocator.cs +++ b/FoundationDB.Client/Layers/Directories/FdbHighContentionAllocator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -46,7 +46,7 @@ public sealed class FdbHighContentionAllocator /// Create an allocator operating under a specific location /// - public FdbHighContentionAllocator(IFdbDynamicSubspace subspace) + public FdbHighContentionAllocator(IDynamicKeySubspace subspace) { if (subspace == null) throw new ArgumentException("subspace"); @@ -56,13 +56,13 @@ public FdbHighContentionAllocator(IFdbDynamicSubspace subspace) } /// Location of the allocator - public IFdbDynamicSubspace Subspace { [NotNull] get; private set; } + public IDynamicKeySubspace Subspace { [NotNull] get; private set; } /// Subspace used to store the allocation count for the current window - private IFdbDynamicSubspace Counters { [NotNull] get; set; } + private IDynamicKeySubspace Counters { [NotNull] get; set; } /// Subspace used to store the prefixes allocated in the current window - private IFdbDynamicSubspace Recent { [NotNull] get; set; } + private IDynamicKeySubspace Recent { [NotNull] get; set; } /// Returns a 64-bit integer that /// 1) has never and will never be returned by another call to this @@ -91,19 +91,19 @@ public async Task AllocateAsync([NotNull] IFdbTransaction trans) if ((count + 1) * 2 >= window) { // advance the window if (FdbDirectoryLayer.AnnotateTransactions) trans.Annotate("Advance allocator window size to {0} starting at {1}", window, start + window); - trans.ClearRange(this.Counters.Key, this.Counters.Keys.Encode(start) + FdbKey.MinValue); + trans.ClearRange(this.Counters.GetPrefix(), this.Counters.Keys.Encode(start) + FdbKey.MinValue); start += window; count = 0; - trans.ClearRange(this.Recent.Key, this.Recent.Keys.Encode(start)); + trans.ClearRange(this.Recent.GetPrefix(), this.Recent.Keys.Encode(start)); } // Increment the allocation count for the current window trans.AtomicAdd(this.Counters.Keys.Encode(start), Slice.FromFixed64(1)); // As of the snapshot being read from, the window is less than half - // full, so this should be expected to take 2 tries. Under high - // contention (and when the window advances), there is an additional - // subsequent risk of conflict for this transaction. + // full, so this should be expected to take 2 tries. Under high + // contention (and when the window advances), there is an additional + // subsequent risk of conflict for this transaction. while (true) { // Find a random free slot in the current window... diff --git a/FoundationDB.Client/Layers/Directories/IFdbDirectory.cs b/FoundationDB.Client/Layers/Directories/IFdbDirectory.cs index 927a13192..12319f5b4 100644 --- a/FoundationDB.Client/Layers/Directories/IFdbDirectory.cs +++ b/FoundationDB.Client/Layers/Directories/IFdbDirectory.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/FoundationDB.Client/Layers/Tuples/FdbTuple.cs b/FoundationDB.Client/Layers/Tuples/FdbTuple.cs deleted file mode 100644 index 17e9b17df..000000000 --- a/FoundationDB.Client/Layers/Tuples/FdbTuple.cs +++ /dev/null @@ -1,1458 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Layers.Tuples -{ - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; - using System; - using System.Collections; - using System.Collections.Generic; - using System.Diagnostics; - using System.Globalization; - using System.Linq; - using System.Text; - - /// Factory class for Tuples - public static class FdbTuple - { - /// Empty tuple - /// Not to be mistaken with a 1-tuple containing 'null' ! - public static readonly IFdbTuple Empty = new EmptyTuple(); - - /// Empty tuple (singleton that is used as a base for other tuples) - internal sealed class EmptyTuple : IFdbTuple - { - - public int Count - { - get { return 0; } - } - - object IReadOnlyList.this[int index] - { - get { throw new InvalidOperationException("Tuple is empty"); } - } - - public IFdbTuple this[int? from, int? to] - { - //REVIEW: should we throw if from/to are not null, 0 or -1 ? - get { return this; } - } - - public R Get(int index) - { - throw new InvalidOperationException("Tuple is empty"); - } - - R IFdbTuple.Last() - { - throw new InvalidOperationException("Tuple is empty"); - } - - public IFdbTuple Append(T1 value) - { - return new FdbTuple(value); - } - - public IFdbTuple Concat(IFdbTuple tuple) - { - if (tuple == null) throw new ArgumentNullException("tuple"); - if (tuple is EmptyTuple || tuple.Count == 0) return this; - return tuple; - } - - public void PackTo(ref TupleWriter writer) - { - //NO-OP - } - - public Slice ToSlice() - { - return Slice.Empty; - } - - Slice IFdbKey.ToFoundationDbKey() - { - return this.ToSlice(); - } - - public void CopyTo(object[] array, int offset) - { - //NO-OP - } - - public IEnumerator GetEnumerator() - { - yield break; - } - - System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() - { - return this.GetEnumerator(); - } - - public override string ToString() - { - return "()"; - } - - public override int GetHashCode() - { - return 0; - } - - public bool Equals(IFdbTuple value) - { - return value != null && value.Count == 0; - } - - public override bool Equals(object obj) - { - return Equals(obj as IFdbTuple); - } - - bool System.Collections.IStructuralEquatable.Equals(object other, System.Collections.IEqualityComparer comparer) - { - var tuple = other as IFdbTuple; - return tuple != null && tuple.Count == 0; - } - - int System.Collections.IStructuralEquatable.GetHashCode(System.Collections.IEqualityComparer comparer) - { - return 0; - } - - } - - #region Creation - - /// Create a new 1-tuple, holding only one item - /// This is the non-generic equivalent of FdbTuple.Create<object>() - [NotNull] - public static IFdbTuple CreateBoxed(object item) - { - return new FdbTuple(item); - } - - /// Create a new 1-tuple, holding only one item - [DebuggerStepThrough] - public static FdbTuple Create(T1 item1) - { - return new FdbTuple(item1); - } - - /// Create a new 2-tuple, holding two items - [DebuggerStepThrough] - public static FdbTuple Create(T1 item1, T2 item2) - { - return new FdbTuple(item1, item2); - } - - /// Create a new 3-tuple, holding three items - [DebuggerStepThrough] - public static FdbTuple Create(T1 item1, T2 item2, T3 item3) - { - return new FdbTuple(item1, item2, item3); - } - - /// Create a new 4-tuple, holding four items - [DebuggerStepThrough] - public static FdbTuple Create(T1 item1, T2 item2, T3 item3, T4 item4) - { - return new FdbTuple(item1, item2, item3, item4); - } - - /// Create a new 5-tuple, holding five items - [DebuggerStepThrough] - public static FdbTuple Create(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) - { - return new FdbTuple(item1, item2, item3, item4, item5); - } - - /// Create a new N-tuple, from N items - /// Items to wrap in a tuple - /// If you already have an array of items, you should call instead. Mutating the array, would also mutate the tuple! - [NotNull] - public static IFdbTuple Create([NotNull] params object[] items) - { - if (items == null) throw new ArgumentNullException("items"); - - //note: this is a convenience method for people that wants to pass more than 3 args arguments, and not have to call CreateRange(object[]) method - - if (items.Length == 0) return FdbTuple.Empty; - - // We don't copy the array, and rely on the fact that the array was created by the compiler and that nobody will get a reference on it. - return new FdbListTuple(items, 0, items.Length); - } - - /// Create a new N-tuple that wraps an array of untyped items - /// If the original array is mutated, the tuple will reflect the changes! - [NotNull] - public static IFdbTuple Wrap([NotNull] object[] items) - { - //note: this method only exists to differentiate between Create(object[]) and Create() - if (items == null) throw new ArgumentException("items"); - return FromObjects(items, 0, items.Length, copy: false); - } - - /// Create a new N-tuple that wraps a section of an array of untyped items - /// If the original array is mutated, the tuple will reflect the changes! - [NotNull] - public static IFdbTuple Wrap([NotNull] object[] items, int offset, int count) - { - return FromObjects(items, offset, count, copy: false); - } - - /// Create a new N-tuple by copying the content of an array of untyped items - [NotNull] - public static IFdbTuple FromObjects([NotNull] object[] items) - { - //note: this method only exists to differentiate between Create(object[]) and Create() - if (items == null) throw new ArgumentException("items"); - return FromObjects(items, 0, items.Length, copy: true); - } - - /// Create a new N-tuple by copying a section of an array of untyped items - [NotNull] - public static IFdbTuple FromObjects([NotNull] object[] items, int offset, int count) - { - return FromObjects(items, offset, count, copy: true); - } - - /// Create a new N-tuple that wraps a section of an array of untyped items - /// If is true, and the original array is mutated, the tuple will reflect the changes! - [NotNull] - public static IFdbTuple FromObjects([NotNull] object[] items, int offset, int count, bool copy) - { - if (items == null) throw new ArgumentNullException("items"); - if (offset < 0) throw new ArgumentOutOfRangeException("offset", "Offset cannot be less than zero"); - if (count < 0) throw new ArgumentOutOfRangeException("count", "Count cannot be less than zero"); - if (offset + count > items.Length) throw new ArgumentOutOfRangeException("count", "Source array is too small"); - - if (count == 0) return FdbTuple.Empty; - - if (copy) - { - var tmp = new object[count]; - Array.Copy(items, offset, tmp, 0, count); - return new FdbListTuple(tmp, 0, count); - } - else - { - // can mutate if passed a pre-allocated array: { var foo = new objec[123]; Create(foo); foo[42] = "bad"; } - return new FdbListTuple(items, offset, count); - } - } - - /// Create a new tuple, from an array of typed items - /// Array of items - /// Tuple with the same size as and where all the items are of type - [NotNull] - public static IFdbTuple FromArray([NotNull] T[] items) - { - if (items == null) throw new ArgumentNullException("items"); - - return FromArray(items, 0, items.Length); - } - - /// Create a new tuple, from a section of an array of typed items - [NotNull] - public static IFdbTuple FromArray([NotNull] T[] items, int offset, int count) - { - if (items == null) throw new ArgumentNullException("items"); - if (offset < 0) throw new ArgumentOutOfRangeException("offset", "Offset cannot be less than zero"); - if (count < 0) throw new ArgumentOutOfRangeException("count", "Count cannot be less than zero"); - if (offset + count > items.Length) throw new ArgumentOutOfRangeException("count", "Source array is too small"); - - switch(count) - { - case 0: return FdbTuple.Empty; - case 1: return FdbTuple.Create(items[offset]); - case 2: return FdbTuple.Create(items[offset], items[offset + 1]); - case 3: return FdbTuple.Create(items[offset], items[offset + 1], items[offset + 2]); - case 4: return FdbTuple.Create(items[offset], items[offset + 1], items[offset + 2], items[offset + 3]); - default: - { // copy the items in a temp array - //TODO: we would probably benefit from having an FdbListTuple here! - var tmp = new object[count]; - Array.Copy(items, offset, tmp, 0, count); - return new FdbListTuple(tmp, 0, count); - } - } - } - - /// Create a new tuple from a sequence of typed items - [NotNull] - public static IFdbTuple FromEnumerable([NotNull] IEnumerable items) - { - if (items == null) throw new ArgumentNullException("items"); - - var arr = items as T[]; - if (arr != null) - { - return FromArray(arr, 0, arr.Length); - } - - // may already be a tuple (because it implements IE) - var tuple = items as IFdbTuple; - if (tuple != null) - { - return tuple; - } - - object[] tmp = items.Cast().ToArray(); - //TODO: we would probably benefit from having an FdbListTuple here! - return new FdbListTuple(tmp, 0, tmp.Length); - } - - /// Concatenates two tuples together - [NotNull] - public static IFdbTuple Concat([NotNull] IFdbTuple head, [NotNull] IFdbTuple tail) - { - if (head == null) throw new ArgumentNullException("head"); - if (tail == null) throw new ArgumentNullException("tail"); - - int n1 = head.Count; - if (n1 == 0) return tail; - - int n2 = tail.Count; - if (n2 == 0) return head; - - return new FdbJoinedTuple(head, tail); - } - - #endregion - - #region Packing... - - // Without prefix - - /// Pack a tuple into a slice - /// Tuple that must be serialized into a binary slice - public static Slice Pack([NotNull] IFdbTuple tuple) - { - //note: this is redundant with tuple.ToSlice() - // => maybe we should remove this method? - - if (tuple == null) throw new ArgumentNullException("tuple"); - return tuple.ToSlice(); - } - - /// Pack an array of N-tuples, all sharing the same buffer - /// Sequence of N-tuples to pack - /// Array containing the buffer segment of each packed tuple - /// BatchPack([ ("Foo", 1), ("Foo", 2) ]) => [ "\x02Foo\x00\x15\x01", "\x02Foo\x00\x15\x02" ] - [NotNull] - public static Slice[] Pack([NotNull] params IFdbTuple[] tuples) - { - return Pack(Slice.Nil, tuples); - } - - /// Pack a sequence of N-tuples, all sharing the same buffer - /// Sequence of N-tuples to pack - /// Array containing the buffer segment of each packed tuple - /// BatchPack([ ("Foo", 1), ("Foo", 2) ]) => [ "\x02Foo\x00\x15\x01", "\x02Foo\x00\x15\x02" ] - [NotNull] - public static Slice[] Pack([NotNull] IEnumerable tuples) - { - return Pack(Slice.Nil, tuples); - } - - // With prefix - - public static void Pack(ref TupleWriter writer, [CanBeNull] IFdbTuple tuple) - { - if (tuple == null || tuple.Count == 0) return; - tuple.PackTo(ref writer); - } - - /// Efficiently concatenate a prefix with the packed representation of a tuple - public static Slice Pack(Slice prefix, [CanBeNull] IFdbTuple tuple) - { - if (tuple == null || tuple.Count == 0) return prefix; - - var writer = new TupleWriter(); - writer.Output.WriteBytes(prefix); - tuple.PackTo(ref writer); - return writer.Output.ToSlice(); - } - - /// Pack an array of N-tuples, all sharing the same buffer - /// Commong prefix added to all the tuples - /// Sequence of N-tuples to pack - /// Array containing the buffer segment of each packed tuple - /// BatchPack("abc", [ ("Foo", 1), ("Foo", 2) ]) => [ "abc\x02Foo\x00\x15\x01", "abc\x02Foo\x00\x15\x02" ] - [NotNull] - public static Slice[] Pack(Slice prefix, [NotNull] params IFdbTuple[] tuples) - { - if (tuples == null) throw new ArgumentNullException("tuples"); - - // pre-allocate by supposing that each tuple will take at least 16 bytes - var writer = new TupleWriter(tuples.Length * (16 + prefix.Count)); - var next = new List(tuples.Length); - - //TODO: use multiple buffers if item count is huge ? - - foreach (var tuple in tuples) - { - writer.Output.WriteBytes(prefix); - tuple.PackTo(ref writer); - next.Add(writer.Output.Position); - } - - return FdbKey.SplitIntoSegments(writer.Output.Buffer, 0, next); - } - - /// Pack a sequence of N-tuples, all sharing the same buffer - /// Commong prefix added to all the tuples - /// Sequence of N-tuples to pack - /// Array containing the buffer segment of each packed tuple - /// BatchPack("abc", [ ("Foo", 1), ("Foo", 2) ]) => [ "abc\x02Foo\x00\x15\x01", "abc\x02Foo\x00\x15\x02" ] - [NotNull] - public static Slice[] Pack(Slice prefix, [NotNull] IEnumerable tuples) - { - if (tuples == null) throw new ArgumentNullException("tuples"); - - // use optimized version for arrays - var array = tuples as IFdbTuple[]; - if (array != null) return Pack(prefix, array); - - var next = new List(); - var writer = new TupleWriter(); - - //TODO: use multiple buffers if item count is huge ? - - foreach (var tuple in tuples) - { - writer.Output.WriteBytes(prefix); - tuple.PackTo(ref writer); - next.Add(writer.Output.Position); - } - - return FdbKey.SplitIntoSegments(writer.Output.Buffer, 0, next); - } - - [NotNull] - public static Slice[] Pack(Slice prefix, [NotNull] TElement[] elements, Func transform) - { - if (elements == null) throw new ArgumentNullException("elements"); - if (transform == null) throw new ArgumentNullException("transform"); - - var next = new List(elements.Length); - var writer = new TupleWriter(); - - //TODO: use multiple buffers if item count is huge ? - - foreach (var element in elements) - { - var tuple = transform(element); - if (tuple == null) - { - next.Add(writer.Output.Position); - } - else - { - writer.Output.WriteBytes(prefix); - tuple.PackTo(ref writer); - next.Add(writer.Output.Position); - } - } - - return FdbKey.SplitIntoSegments(writer.Output.Buffer, 0, next); - } - - [NotNull] - public static Slice[] Pack(Slice prefix, [NotNull] IEnumerable elements, Func transform) - { - if (elements == null) throw new ArgumentNullException("elements"); - if (transform == null) throw new ArgumentNullException("transform"); - - // use optimized version for arrays - var array = elements as TElement[]; - if (array != null) return Pack(prefix, array, transform); - - var next = new List(); - var writer = new TupleWriter(); - - //TODO: use multiple buffers if item count is huge ? - - foreach (var element in elements) - { - var tuple = transform(element); - if (tuple == null) - { - next.Add(writer.Output.Position); - } - else - { - writer.Output.WriteBytes(prefix); - tuple.PackTo(ref writer); - next.Add(writer.Output.Position); - } - } - - return FdbKey.SplitIntoSegments(writer.Output.Buffer, 0, next); - } - - #endregion - - #region Encode - - //REVIEW: EncodeKey/EncodeKeys? Encode/EncodeRange? EncodeValues? EncodeItems? - - /// Pack a 1-tuple directly into a slice - public static Slice EncodeKey(T1 item1) - { - var writer = new TupleWriter(); - FdbTuplePacker.SerializeTo(ref writer, item1); - return writer.Output.ToSlice(); - } - - /// Pack a 2-tuple directly into a slice - public static Slice EncodeKey(T1 item1, T2 item2) - { - var writer = new TupleWriter(); - FdbTuplePacker.SerializeTo(ref writer, item1); - FdbTuplePacker.SerializeTo(ref writer, item2); - return writer.Output.ToSlice(); - } - - /// Pack a 3-tuple directly into a slice - public static Slice EncodeKey(T1 item1, T2 item2, T3 item3) - { - var writer = new TupleWriter(); - FdbTuplePacker.SerializeTo(ref writer, item1); - FdbTuplePacker.SerializeTo(ref writer, item2); - FdbTuplePacker.SerializeTo(ref writer, item3); - return writer.Output.ToSlice(); - } - - /// Pack a 4-tuple directly into a slice - public static Slice EncodeKey(T1 item1, T2 item2, T3 item3, T4 item4) - { - var writer = new TupleWriter(); - FdbTuplePacker.SerializeTo(ref writer, item1); - FdbTuplePacker.SerializeTo(ref writer, item2); - FdbTuplePacker.SerializeTo(ref writer, item3); - FdbTuplePacker.SerializeTo(ref writer, item4); - return writer.Output.ToSlice(); - } - - /// Pack a 5-tuple directly into a slice - public static Slice EncodeKey(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) - { - var writer = new TupleWriter(); - FdbTuplePacker.SerializeTo(ref writer, item1); - FdbTuplePacker.SerializeTo(ref writer, item2); - FdbTuplePacker.SerializeTo(ref writer, item3); - FdbTuplePacker.SerializeTo(ref writer, item4); - FdbTuplePacker.SerializeTo(ref writer, item5); - return writer.Output.ToSlice(); - } - - /// Pack a 6-tuple directly into a slice - public static Slice EncodeKey(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6) - { - var writer = new TupleWriter(); - FdbTuplePacker.SerializeTo(ref writer, item1); - FdbTuplePacker.SerializeTo(ref writer, item2); - FdbTuplePacker.SerializeTo(ref writer, item3); - FdbTuplePacker.SerializeTo(ref writer, item4); - FdbTuplePacker.SerializeTo(ref writer, item5); - FdbTuplePacker.SerializeTo(ref writer, item6); - return writer.Output.ToSlice(); - } - - /// Pack a 6-tuple directly into a slice - public static Slice EncodeKey(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7) - { - var writer = new TupleWriter(); - FdbTuplePacker.SerializeTo(ref writer, item1); - FdbTuplePacker.SerializeTo(ref writer, item2); - FdbTuplePacker.SerializeTo(ref writer, item3); - FdbTuplePacker.SerializeTo(ref writer, item4); - FdbTuplePacker.SerializeTo(ref writer, item5); - FdbTuplePacker.SerializeTo(ref writer, item6); - FdbTuplePacker.SerializeTo(ref writer, item7); - return writer.Output.ToSlice(); - } - - /// Pack a 6-tuple directly into a slice - public static Slice EncodeKey(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7, T8 item8) - { - var writer = new TupleWriter(); - FdbTuplePacker.SerializeTo(ref writer, item1); - FdbTuplePacker.SerializeTo(ref writer, item2); - FdbTuplePacker.SerializeTo(ref writer, item3); - FdbTuplePacker.SerializeTo(ref writer, item4); - FdbTuplePacker.SerializeTo(ref writer, item5); - FdbTuplePacker.SerializeTo(ref writer, item6); - FdbTuplePacker.SerializeTo(ref writer, item7); - FdbTuplePacker.SerializeTo(ref writer, item8); - return writer.Output.ToSlice(); - } - - [NotNull] - public static Slice[] EncodeKeys([NotNull] IEnumerable keys) - { - return EncodePrefixedKeys(Slice.Nil, keys); - } - - /// Merge a sequence of keys with a same prefix, all sharing the same buffer - /// Type of the keys - /// Prefix shared by all keys - /// Sequence of keys to pack - /// Array of slices (for all keys) that share the same underlying buffer - [NotNull] - public static Slice[] EncodePrefixedKeys(Slice prefix, [NotNull] IEnumerable keys) - { - if (prefix == null) throw new ArgumentNullException("prefix"); - if (keys == null) throw new ArgumentNullException("keys"); - - // use optimized version for arrays - var array = keys as T[]; - if (array != null) return EncodePrefixedKeys(prefix, array); - - var next = new List(); - var writer = new TupleWriter(); - var packer = FdbTuplePacker.Encoder; - - //TODO: use multiple buffers if item count is huge ? - - foreach (var key in keys) - { - if (prefix.IsPresent) writer.Output.WriteBytes(prefix); - packer(ref writer, key); - next.Add(writer.Output.Position); - } - - return FdbKey.SplitIntoSegments(writer.Output.Buffer, 0, next); - } - - [NotNull] - public static Slice[] EncodeKeys([NotNull] params T[] keys) - { - return EncodePrefixedKeys(Slice.Nil, keys); - } - - /// Merge an array of keys with a same prefix, all sharing the same buffer - /// Type of the keys - /// Prefix shared by all keys - /// Sequence of keys to pack - /// Array of slices (for all keys) that share the same underlying buffer - [NotNull] - public static Slice[] EncodePrefixedKeys(Slice prefix, [NotNull] params T[] keys) - { - if (keys == null) throw new ArgumentNullException("keys"); - - // pre-allocate by guessing that each key will take at least 8 bytes. Even if 8 is too small, we should have at most one or two buffer resize - var writer = new TupleWriter(keys.Length * (prefix.Count + 8)); - var next = new List(keys.Length); - var packer = FdbTuplePacker.Encoder; - - //TODO: use multiple buffers if item count is huge ? - - foreach (var key in keys) - { - if (prefix.Count > 0) writer.Output.WriteBytes(prefix); - packer(ref writer, key); - next.Add(writer.Output.Position); - } - - return FdbKey.SplitIntoSegments(writer.Output.Buffer, 0, next); - } - - /// Merge an array of elements, all sharing the same buffer - /// Type of the elements - /// Type of the keys extracted from the elements - /// Sequence of elements to pack - /// Lambda that extract the key from each element - /// Array of slices (for all keys) that share the same underlying buffer - [NotNull] - public static Slice[] EncodeKeys([NotNull] TElement[] elements, [NotNull] Func selector) - { - return EncodePrefixedKeys(Slice.Empty, elements, selector); - } - - /// Merge an array of elements with a same prefix, all sharing the same buffer - /// Type of the elements - /// Type of the keys extracted from the elements - /// Prefix shared by all keys (can be empty) - /// Sequence of elements to pack - /// Lambda that extract the key from each element - /// Array of slices (for all keys) that share the same underlying buffer - [NotNull] - public static Slice[] EncodePrefixedKeys(Slice prefix, [NotNull] TElement[] elements, [NotNull] Func selector) - { - if (elements == null) throw new ArgumentNullException("elements"); - if (selector == null) throw new ArgumentNullException("selector"); - - // pre-allocate by guessing that each key will take at least 8 bytes. Even if 8 is too small, we should have at most one or two buffer resize - var writer = new TupleWriter(elements.Length * (prefix.Count + 8)); - var next = new List(elements.Length); - var packer = FdbTuplePacker.Encoder; - - //TODO: use multiple buffers if item count is huge ? - - foreach (var value in elements) - { - if (prefix.Count > 0) writer.Output.WriteBytes(prefix); - packer(ref writer, selector(value)); - next.Add(writer.Output.Position); - } - - return FdbKey.SplitIntoSegments(writer.Output.Buffer, 0, next); - } - - /// Pack a sequence of keys with a same prefix, all sharing the same buffer - /// Type of the keys - /// Prefix shared by all keys - /// Sequence of keys to pack - /// Array of slices (for all keys) that share the same underlying buffer - [NotNull] - public static Slice[] EncodePrefixedKeys([NotNull] IFdbTuple prefix, [NotNull] IEnumerable keys) - { - if (prefix == null) throw new ArgumentNullException("prefix"); - - return EncodePrefixedKeys(prefix.ToSlice(), keys); - } - - /// Pack a sequence of keys with a same prefix, all sharing the same buffer - /// Type of the keys - /// Prefix shared by all keys - /// Sequence of keys to pack - /// Array of slices (for all keys) that share the same underlying buffer - [NotNull] - public static Slice[] EncodePrefixedKeys([NotNull] IFdbTuple prefix, [NotNull] params T[] keys) - { - if (prefix == null) throw new ArgumentNullException("prefix"); - - return EncodePrefixedKeys(prefix.ToSlice(), keys); - } - - #endregion - - #region Unpacking... - - /// Unpack a tuple from a serialied key blob - /// Binary key containing a previously packed tuple - /// Unpacked tuple, or the empty tuple if the key is - /// If is equal to - [NotNull] - public static IFdbTuple Unpack(Slice packedKey) - { - if (packedKey.IsNull) throw new ArgumentNullException("packedKey"); - if (packedKey.Count == 0) return FdbTuple.Empty; - - return FdbTuplePackers.Unpack(packedKey, false); - } - - /// Unpack a tuple from a binary representation - /// Binary key containing a previously packed tuple, or Slice.Nil - /// Unpacked tuple, the empty tuple if is equal to , or null if the key is - [CanBeNull] - public static IFdbTuple UnpackOrDefault(Slice packedKey) - { - if (packedKey.IsNull) return null; - if (packedKey.Count == 0) return FdbTuple.Empty; - return FdbTuplePackers.Unpack(packedKey, false); - } - - /// Unpack a tuple and only return its first element - /// Type of the first value in the decoded tuple - /// Slice that should be entirely parsable as a tuple - /// Decoded value of the first item in the tuple - public static T DecodeFirst(Slice packedKey) - { - if (packedKey.IsNullOrEmpty) throw new InvalidOperationException("Cannot unpack the first element of an empty tuple"); - - var slice = FdbTuplePackers.UnpackFirst(packedKey); - if (slice.IsNull) throw new InvalidOperationException("Failed to unpack tuple"); - - return FdbTuplePacker.Deserialize(slice); - } - - /// Unpack a tuple and only return its last element - /// Type of the last value in the decoded tuple - /// Slice that should be entirely parsable as a tuple - /// Decoded value of the last item in the tuple - public static T DecodeLast(Slice packedKey) - { - if (packedKey.IsNullOrEmpty) throw new InvalidOperationException("Cannot unpack the last element of an empty tuple"); - - var slice = FdbTuplePackers.UnpackLast(packedKey); - if (slice.IsNull) throw new InvalidOperationException("Failed to unpack tuple"); - - return FdbTuplePacker.Deserialize(slice); - } - - /// Unpack the value of a singletion tuple - /// Type of the single value in the decoded tuple - /// Slice that should contain the packed representation of a tuple with a single element - /// Decoded value of the only item in the tuple. Throws an exception if the tuple is empty of has more than one element. - public static T DecodeKey(Slice packedKey) - { - if (packedKey.IsNullOrEmpty) throw new InvalidOperationException("Cannot unpack a single value out of an empty tuple"); - - var slice = FdbTuplePackers.UnpackSingle(packedKey); - if (slice.IsNull) throw new InvalidOperationException("Failed to unpack singleton tuple"); - - return FdbTuplePacker.Deserialize(slice); - } - - /// Unpack a key containing two elements - /// Slice that should contain the packed representation of a tuple with two elements - /// Decoded value of the elements int the tuple. Throws an exception if the tuple is empty of has more than elements. - public static FdbTuple DecodeKey(Slice packedKey) - { - if (packedKey.IsNullOrEmpty) throw new InvalidOperationException("Cannot unpack an empty tuple"); - - var reader = new TupleReader(packedKey); - - T1 item1; - if (!DecodeNext(ref reader, out item1)) throw new FormatException("Failed to decode first item"); - - T2 item2; - if (!DecodeNext(ref reader, out item2)) throw new FormatException("Failed to decode second item"); - - if (reader.Input.HasMore) throw new FormatException("The key contains more than two items"); - - return Create(item1, item2); - } - - /// Unpack a key containing three elements - /// Slice that should contain the packed representation of a tuple with three elements - /// Decoded value of the elements int the tuple. Throws an exception if the tuple is empty of has more than elements. - public static FdbTuple DecodeKey(Slice packedKey) - { - if (packedKey.IsNullOrEmpty) throw new InvalidOperationException("Cannot unpack an empty tuple"); - - var reader = new TupleReader(packedKey); - - T1 item1; - if (!DecodeNext(ref reader, out item1)) throw new FormatException("Failed to decode first item"); - - T2 item2; - if (!DecodeNext(ref reader, out item2)) throw new FormatException("Failed to decode second item"); - - T3 item3; - if (!DecodeNext(ref reader, out item3)) throw new FormatException("Failed to decode third item"); - - if (reader.Input.HasMore) throw new FormatException("The key contains more than three items"); - - return Create(item1, item2, item3); - } - - /// Unpack a key containing four elements - /// Slice that should contain the packed representation of a tuple with four elements - /// Decoded value of the elements int the tuple. Throws an exception if the tuple is empty of has more than elements. - public static FdbTuple DecodeKey(Slice packedKey) - { - if (packedKey.IsNullOrEmpty) throw new InvalidOperationException("Cannot unpack an empty tuple"); - - var reader = new TupleReader(packedKey); - - T1 item1; - if (!DecodeNext(ref reader, out item1)) throw new FormatException("Failed to decode first item"); - - T2 item2; - if (!DecodeNext(ref reader, out item2)) throw new FormatException("Failed to decode second item"); - - T3 item3; - if (!DecodeNext(ref reader, out item3)) throw new FormatException("Failed to decode third item"); - - T4 item4; - if (!DecodeNext(ref reader, out item4)) throw new FormatException("Failed to decode fourth item"); - - if (reader.Input.HasMore) throw new FormatException("The key contains more than four items"); - - return Create(item1, item2, item3, item4); - } - - /// Unpack a key containing five elements - /// Slice that should contain the packed representation of a tuple with five elements - /// Decoded value of the elements int the tuple. Throws an exception if the tuple is empty of has more than elements. - public static FdbTuple DecodeKey(Slice packedKey) - { - if (packedKey.IsNullOrEmpty) throw new InvalidOperationException("Cannot unpack an empty tuple"); - - var reader = new TupleReader(packedKey); - - T1 item1; - if (!DecodeNext(ref reader, out item1)) throw new FormatException("Failed to decode first item"); - - T2 item2; - if (!DecodeNext(ref reader, out item2)) throw new FormatException("Failed to decode second item"); - - T3 item3; - if (!DecodeNext(ref reader, out item3)) throw new FormatException("Failed to decode third item"); - - T4 item4; - if (!DecodeNext(ref reader, out item4)) throw new FormatException("Failed to decode fourth item"); - - T5 item5; - if (!DecodeNext(ref reader, out item5)) throw new FormatException("Failed to decode fiftyh item"); - - if (reader.Input.HasMore) throw new FormatException("The key contains more than four items"); - - return Create(item1, item2, item3, item4, item5); - } - - /// Unpack the next item in the tuple, and advance the cursor - /// Type of the next value in the tuple - /// Reader positionned at the start of the next item to read - /// If decoding succeedsd, receives the decoded value. - /// True if the decoded succeeded (and receives the decoded value). False if the tuple has reached the end. - public static bool DecodeNext(ref TupleReader input, out T value) - { - if (!input.Input.HasMore) - { - value = default(T); - return false; - } - - var slice = FdbTupleParser.ParseNext(ref input); - value = FdbTuplePacker.Deserialize(slice); - return true; - } - - #endregion - - #region PackWithPrefix... - - //note: they are equivalent to the Pack<...>() methods, they only take a binary prefix - - /// Efficiently concatenate a prefix with the packed representation of a 1-tuple - public static Slice EncodePrefixedKey(Slice prefix, T value) - { - var writer = new TupleWriter(); - writer.Output.WriteBytes(prefix); - FdbTuplePacker.Encoder(ref writer, value); - return writer.Output.ToSlice(); - } - - /// Efficiently concatenate a prefix with the packed representation of a 2-tuple - public static Slice EncodePrefixedKey(Slice prefix, T1 value1, T2 value2) - { - var writer = new TupleWriter(); - writer.Output.WriteBytes(prefix); - FdbTuplePacker.Encoder(ref writer, value1); - FdbTuplePacker.Encoder(ref writer, value2); - return writer.Output.ToSlice(); - } - - /// Efficiently concatenate a prefix with the packed representation of a 3-tuple - public static Slice EncodePrefixedKey(Slice prefix, T1 value1, T2 value2, T3 value3) - { - var writer = new TupleWriter(); - writer.Output.WriteBytes(prefix); - FdbTuplePacker.Encoder(ref writer, value1); - FdbTuplePacker.Encoder(ref writer, value2); - FdbTuplePacker.Encoder(ref writer, value3); - return writer.Output.ToSlice(); - } - - /// Efficiently concatenate a prefix with the packed representation of a 4-tuple - public static Slice EncodePrefixedKey(Slice prefix, T1 value1, T2 value2, T3 value3, T4 value4) - { - var writer = new TupleWriter(); - writer.Output.WriteBytes(prefix); - FdbTuplePacker.Encoder(ref writer, value1); - FdbTuplePacker.Encoder(ref writer, value2); - FdbTuplePacker.Encoder(ref writer, value3); - FdbTuplePacker.Encoder(ref writer, value4); - return writer.Output.ToSlice(); - } - - /// Efficiently concatenate a prefix with the packed representation of a 5-tuple - public static Slice EncodePrefixedKey(Slice prefix, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5) - { - var writer = new TupleWriter(); - writer.Output.WriteBytes(prefix); - FdbTuplePacker.Encoder(ref writer, value1); - FdbTuplePacker.Encoder(ref writer, value2); - FdbTuplePacker.Encoder(ref writer, value3); - FdbTuplePacker.Encoder(ref writer, value4); - FdbTuplePacker.Encoder(ref writer, value5); - return writer.Output.ToSlice(); - } - - /// Efficiently concatenate a prefix with the packed representation of a 6-tuple - public static Slice EncodePrefixedKey(Slice prefix, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6) - { - var writer = new TupleWriter(); - writer.Output.WriteBytes(prefix); - FdbTuplePacker.Encoder(ref writer, value1); - FdbTuplePacker.Encoder(ref writer, value2); - FdbTuplePacker.Encoder(ref writer, value3); - FdbTuplePacker.Encoder(ref writer, value4); - FdbTuplePacker.Encoder(ref writer, value5); - FdbTuplePacker.Encoder(ref writer, value6); - return writer.Output.ToSlice(); - } - - /// Efficiently concatenate a prefix with the packed representation of a 7-tuple - public static Slice EncodePrefixedKey(Slice prefix, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6, T7 value7) - { - var writer = new TupleWriter(); - writer.Output.WriteBytes(prefix); - FdbTuplePacker.Encoder(ref writer, value1); - FdbTuplePacker.Encoder(ref writer, value2); - FdbTuplePacker.Encoder(ref writer, value3); - FdbTuplePacker.Encoder(ref writer, value4); - FdbTuplePacker.Encoder(ref writer, value5); - FdbTuplePacker.Encoder(ref writer, value6); - FdbTuplePacker.Encoder(ref writer, value7); - return writer.Output.ToSlice(); - } - - /// Efficiently concatenate a prefix with the packed representation of a 8-tuple - public static Slice EncodePrefixedKey(Slice prefix, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6, T7 value7, T8 value8) - { - var writer = new TupleWriter(); - writer.Output.WriteBytes(prefix); - FdbTuplePacker.Encoder(ref writer, value1); - FdbTuplePacker.Encoder(ref writer, value2); - FdbTuplePacker.Encoder(ref writer, value3); - FdbTuplePacker.Encoder(ref writer, value4); - FdbTuplePacker.Encoder(ref writer, value5); - FdbTuplePacker.Encoder(ref writer, value6); - FdbTuplePacker.Encoder(ref writer, value7); - FdbTuplePacker.Encoder(ref writer, value8); - return writer.Output.ToSlice(); - } - - #endregion - - #region Internal Helpers... - - /// Determines whether the specified tuple instances are considered equal - /// Left tuple - /// Right tuple - /// True if the tuples are considered equal; otherwise, false. If both and are null, the methods returns true; - /// This method is equivalent of calling left.Equals(right), - public static bool Equals(IFdbTuple left, IFdbTuple right) - { - if (object.ReferenceEquals(left, null)) return object.ReferenceEquals(right, null); - return left.Equals(right); - } - - /// Determines whether the specifield tuple instances are considered similar - /// Left tuple - /// Right tuple - /// True if the tuples are considered similar; otherwise, false. If both and are null, the methods returns true; - public static bool Equivalent(IFdbTuple left, IFdbTuple right) - { - if (object.ReferenceEquals(left, null)) return object.ReferenceEquals(right, null); - return !object.ReferenceEquals(right, null) && Equals(left, right, FdbTupleComparisons.Default); - } - - /// Create a range that selects all tuples that are stored under the specified subspace: 'prefix\x00' <= k < 'prefix\xFF' - /// Subspace binary prefix (that will be excluded from the range) - /// Range including all possible tuples starting with the specified prefix. - /// FdbTuple.ToRange(Slice.FromAscii("abc")) returns the range [ 'abc\x00', 'abc\xFF' ) - public static FdbKeyRange ToRange(Slice prefix) - { - if (prefix.IsNull) throw new ArgumentNullException("prefix"); - - //note: there is no guarantee that prefix is a valid packed tuple (could be any exotic binary prefix) - - // prefix => [ prefix."\0", prefix."\xFF" ) - return new FdbKeyRange( - prefix + FdbKey.MinValue, - prefix + FdbKey.MaxValue - ); - } - - /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' - /// FdbTuple.ToRange(FdbTuple.Create("a", "b")) includes all tuples ("a", "b", ...), but not the tuple ("a", "b") itself. - public static FdbKeyRange ToRange([NotNull] IFdbTuple tuple) - { - if (tuple == null) throw new ArgumentNullException("tuple"); - - // tuple => [ packed."\0", packed."\xFF" ) - var packed = tuple.ToSlice(); - - return new FdbKeyRange( - packed + FdbKey.MinValue, - packed + FdbKey.MaxValue - ); - } - - /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' - /// FdbTuple.ToRange(Slice.FromInt32(42), FdbTuple.Create("a", "b")) includes all tuples \x2A.("a", "b", ...), but not the tuple \x2A.("a", "b") itself. - /// If is the packed representation of a tuple, then unpacking the resulting key will produce a valid tuple. If not, then the resulting key will need to be truncated first before unpacking. - public static FdbKeyRange ToRange(Slice prefix, [NotNull] IFdbTuple tuple) - { - if (tuple == null) throw new ArgumentNullException("tuple"); - - // tuple => [ prefix.packed."\0", prefix.packed."\xFF" ) - var packed = prefix + tuple.ToSlice(); - - return new FdbKeyRange( - packed + FdbKey.MinValue, - packed + FdbKey.MaxValue - ); - } - - private const string TokenNull = "null"; - private const string TokenDoubleQuote = "\""; - private const string TokenSingleQuote = "'"; - private const string TokenOpenBracket = "{"; - private const string TokenCloseBracket = "}"; - private const string TokenTupleEmpty = "()"; - private const string TokenTupleSep = ", "; - private const string TokenTupleClose = ")"; - private const string TokenTupleSingleClose = ",)"; - - /// Converts any object into a displayble string, for logging/debugging purpose - /// Object to stringify - /// String representation of the object - /// - /// Stringify(null) => "nil" - /// Stringify("hello") => "\"hello\"" - /// Stringify(123) => "123" - /// Stringify(123.4) => "123.4" - /// Stringify(true) => "true" - /// Stringify(Slice) => hexa decimal string ("01 23 45 67 89 AB CD EF") - /// - [NotNull] - internal static string Stringify(object item) - { - if (item == null) return TokenNull; - - var s = item as string; - //TODO: escape the string? If it contains \0 or control chars, it can cause problems in the console or debugger output - if (s != null) return TokenDoubleQuote + s + TokenDoubleQuote; /* "hello" */ - - if (item is int) return ((int)item).ToString(null, CultureInfo.InvariantCulture); - if (item is long) return ((long)item).ToString(null, CultureInfo.InvariantCulture); - - if (item is char) return TokenSingleQuote + new string((char)item, 1) + TokenSingleQuote; /* 'X' */ - - if (item is Slice) return ((Slice)item).ToAsciiOrHexaString(); - if (item is byte[]) return Slice.Create((byte[]) item).ToAsciiOrHexaString(); - - if (item is FdbTupleAlias) return TokenOpenBracket + ((FdbTupleAlias)item).ToString() + TokenCloseBracket; /* {X} */ - - // decimals need the "R" representation to have all the digits - if (item is double) return ((double)item).ToString("R", CultureInfo.InvariantCulture); - if (item is float) return ((float)item).ToString("R", CultureInfo.InvariantCulture); - - if (item is Guid) return ((Guid)item).ToString("B", CultureInfo.InstalledUICulture); /* {xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx} */ - if (item is Uuid128) return ((Uuid128)item).ToString("B", CultureInfo.InstalledUICulture); /* {xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx} */ - if (item is Uuid64) return ((Uuid64)item).ToString("B", CultureInfo.InstalledUICulture); /* {xxxxxxxx-xxxxxxxx} */ - - var f = item as IFormattable; - if (f != null) return f.ToString(null, CultureInfo.InvariantCulture); - - // This will probably not give a meaningful result ... :( - return item.ToString(); - } - - /// Converts a list of object into a displaying string, for loggin/debugging purpose - /// Array containing items to stringfy - /// Start offset of the items to convert - /// Number of items to convert - /// String representation of the tuple in the form "(item1, item2, ... itemN,)" - /// ToString(FdbTuple.Create("hello", 123, true, "world")) => "(\"hello\", 123, true, \"world\",) - [NotNull] - internal static string ToString(object[] items, int offset, int count) - { - if (items == null) return String.Empty; - Contract.Requires(offset >= 0 && count >= 0); - - if (count <= 0) - { // empty tuple: "()" - return TokenTupleEmpty; - } - - var sb = new StringBuilder(); - sb.Append('(').Append(Stringify(items[offset++])); - - if (count == 1) - { // singleton tuple : "(X,)" - return sb.Append(TokenTupleSingleClose).ToString(); - } - - while (--count > 0) - { - sb.Append(TokenTupleSep /* ", " */).Append(Stringify(items[offset++])); - } - return sb.Append(TokenTupleClose /* ",)" */).ToString(); - } - - /// Converts a sequence of object into a displaying string, for loggin/debugging purpose - /// Sequence of items to stringfy - /// String representation of the tuple in the form "(item1, item2, ... itemN,)" - /// ToString(FdbTuple.Create("hello", 123, true, "world")) => "(\"hello\", 123, true, \"world\") - [NotNull] - internal static string ToString(IEnumerable items) - { - if (items == null) return String.Empty; - using (var enumerator = items.GetEnumerator()) - { - if (!enumerator.MoveNext()) - { // empty tuple : "()" - return TokenTupleEmpty; - } - - var sb = new StringBuilder(); - sb.Append('(').Append(Stringify(enumerator.Current)); - bool singleton = true; - while (enumerator.MoveNext()) - { - singleton = false; - sb.Append(TokenTupleSep).Append(Stringify(enumerator.Current)); - } - // add a trailing ',' for singletons - return sb.Append(singleton ? TokenTupleSingleClose : TokenTupleClose).ToString(); - } - } - - /// Default (non-optimized) implementation of IFdbTuple.this[long?, long?] - /// Tuple to slice - /// Start offset of the section (included) - /// End offset of the section (included) - /// New tuple only containing items inside this section - [NotNull] - internal static IFdbTuple Splice([NotNull] IFdbTuple tuple, int? fromIncluded, int? toExcluded) - { - Contract.Requires(tuple != null); - int count = tuple.Count; - if (count == 0) return FdbTuple.Empty; - - int start = fromIncluded.HasValue ? MapIndexBounded(fromIncluded.Value, count) : 0; - int end = toExcluded.HasValue ? MapIndexBounded(toExcluded.Value, count) : count; - - int len = end - start; - - if (len <= 0) return FdbTuple.Empty; - if (start == 0 && len == count) return tuple; - switch(len) - { - case 1: return new FdbListTuple(new object[] { tuple[start] }, 0, 1); - case 2: return new FdbListTuple(new object[] { tuple[start], tuple[start + 1] }, 0, 2); - default: - { - var items = new object[len]; - //note: can be slow for tuples using linked-lists, but hopefully they will have their own Slice implementation... - int q = start; - for (int p = 0; p < items.Length; p++) - { - items[p] = tuple[q++]; - } - return new FdbListTuple(items, 0, len); - } - } - } - - /// Default (non-optimized) implementation for IFdbTuple.StartsWith() - /// Larger tuple - /// Smaller tuple - /// True if starts with (or is equal to) - internal static bool StartsWith([NotNull] IFdbTuple a, [NotNull] IFdbTuple b) - { - Contract.Requires(a != null && b != null); - if (object.ReferenceEquals(a, b)) return true; - int an = a.Count; - int bn = b.Count; - - if (bn > an) return false; - if (bn == 0) return true; // note: 'an' can only be 0 because of previous test - - for (int i = 0; i < bn; i++) - { - if (!object.Equals(a[i], b[i])) return false; - } - return true; - } - - /// Default (non-optimized) implementation for IFdbTuple.EndsWith() - /// Larger tuple - /// Smaller tuple - /// True if starts with (or is equal to) - internal static bool EndsWith([NotNull] IFdbTuple a, [NotNull] IFdbTuple b) - { - Contract.Requires(a != null && b != null); - if (object.ReferenceEquals(a, b)) return true; - int an = a.Count; - int bn = b.Count; - - if (bn > an) return false; - if (bn == 0) return true; // note: 'an' can only be 0 because of previous test - - int offset = an - bn; - for (int i = 0; i < bn; i++) - { - if (!object.Equals(a[offset + i], b[i])) return false; - } - return true; - } - - /// Helper to copy the content of a tuple at a specific position in an array - /// Updated offset just after the last element of the copied tuple - internal static int CopyTo([NotNull] IFdbTuple tuple, [NotNull] object[] array, int offset) - { - Contract.Requires(tuple != null && array != null && offset >= 0); - - foreach (var item in tuple) - { - array[offset++] = item; - } - return offset; - } - - /// Maps a relative index into an absolute index - /// Relative index in the tuple (from the end if negative) - /// Size of the tuple - /// Absolute index from the start of the tuple, or exception if outside of the tuple - /// If the absolute index is outside of the tuple (<0 or >=) - internal static int MapIndex(int index, int count) - { - int offset = index; - if (offset < 0) offset += count; - if (offset < 0 || offset >= count) FailIndexOutOfRange(index, count); - return offset; - } - - /// Maps a relative index into an absolute index - /// Relative index in the tuple (from the end if negative) - /// Size of the tuple - /// Absolute index from the start of the tuple. Truncated to 0 if index is before the start of the tuple, or to if the index is after the end of the tuple - internal static int MapIndexBounded(int index, int count) - { - if (index < 0) index += count; - return Math.Max(Math.Min(index, count), 0); - } - - [ContractAnnotation("=> halt")] - internal static void FailIndexOutOfRange(int index, int count) - { - throw new IndexOutOfRangeException(String.Format("Index {0} is outside of the tuple's range (0..{1})", index, count - 1)); - } - - internal static int CombineHashCodes(int h1, int h2) - { - return ((h1 << 5) + h1) ^ h2; - } - - internal static int CombineHashCodes(int h1, int h2, int h3) - { - int h = ((h1 << 5) + h1) ^ h2; - return ((h << 5) + h) ^ h3; - } - - internal static int CombineHashCodes(int h1, int h2, int h3, int h4) - { - return CombineHashCodes(CombineHashCodes(h1, h2), CombineHashCodes(h3, h4)); - } - - internal static int CombineHashCodes(int h1, int h2, int h3, int h4, int h5) - { - return CombineHashCodes(CombineHashCodes(h1, h2, h3), CombineHashCodes(h4, h5)); - } - - internal static bool Equals(IFdbTuple left, object other, [NotNull] IEqualityComparer comparer) - { - return object.ReferenceEquals(left, null) ? other == null : FdbTuple.Equals(left, other as IFdbTuple, comparer); - } - - internal static bool Equals(IFdbTuple x, IFdbTuple y, [NotNull] IEqualityComparer comparer) - { - if (object.ReferenceEquals(x, y)) return true; - if (object.ReferenceEquals(x, null) || object.ReferenceEquals(y, null)) return false; - - return x.Count == y.Count && DeepEquals(x, y, comparer); - } - - internal static bool DeepEquals([NotNull] IFdbTuple x, [NotNull] IFdbTuple y, [NotNull] IEqualityComparer comparer) - { - Contract.Requires(x != null && y != null && comparer != null); - - using (var xs = x.GetEnumerator()) - using (var ys = y.GetEnumerator()) - { - while (xs.MoveNext()) - { - if (!ys.MoveNext()) return false; - - return comparer.Equals(xs.Current, ys.Current); - } - - return !ys.MoveNext(); - } - } - - internal static int StructuralGetHashCode(IFdbTuple tuple, [NotNull] IEqualityComparer comparer) - { - Contract.Requires(comparer != null); - - if (object.ReferenceEquals(tuple, null)) - { - return comparer.GetHashCode(null); - } - - int h = 0; - foreach(var item in tuple) - { - h = CombineHashCodes(h, comparer.GetHashCode(item)); - } - return h; - } - - internal static int StructuralCompare(IFdbTuple x, IFdbTuple y, [NotNull] IComparer comparer) - { - Contract.Requires(comparer != null); - - if (object.ReferenceEquals(x, y)) return 0; - if (object.ReferenceEquals(x, null)) return -1; - if (object.ReferenceEquals(y, null)) return 1; - - using (var xs = x.GetEnumerator()) - using (var ys = y.GetEnumerator()) - { - while (xs.MoveNext()) - { - if (!ys.MoveNext()) return 1; - - int cmp = comparer.Compare(xs.Current, ys.Current); - if (cmp != 0) return cmp; - - } - return ys.MoveNext() ? -1 : 0; - } - } - - #endregion - - } - -} diff --git a/FoundationDB.Client/Layers/Tuples/FdbTuplePackers.cs b/FoundationDB.Client/Layers/Tuples/FdbTuplePackers.cs deleted file mode 100644 index ab944cbf0..000000000 --- a/FoundationDB.Client/Layers/Tuples/FdbTuplePackers.cs +++ /dev/null @@ -1,1270 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Layers.Tuples -{ - using FoundationDB.Client; - using FoundationDB.Client.Converters; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; - using System; - using System.Collections.Generic; - using System.Globalization; - using System.Linq.Expressions; - using System.Reflection; - - /// Helper methods used during serialization of values to the tuple binary format - public static class FdbTuplePackers - { - - #region Serializers... - - public delegate void Encoder(ref TupleWriter writer, T value); - - /// Returns a lambda that will be able to serialize values of type - /// Type of values to serialize - /// Reusable action that knows how to serialize values of type into binary buffers, or an exception if the type is not supported - [ContractAnnotation("true => notnull")] - internal static Encoder GetSerializer(bool required) - { - var encoder = (Encoder)GetSerializerFor(typeof(T)); - if (encoder == null && required) - { - encoder = delegate { throw new InvalidOperationException(String.Format("Does not know how to serialize values of type {0} into keys", typeof(T).Name)); }; - } - return encoder; - } - - private static Delegate GetSerializerFor([NotNull] Type type) - { - if (type == null) throw new ArgumentNullException("type"); - - if (type == typeof(object)) - { // return a generic serializer that will inspect the runtime type of the object - return new Encoder(FdbTuplePackers.SerializeObjectTo); - } - - var typeArgs = new[] { typeof(TupleWriter).MakeByRefType(), type }; - var method = typeof(FdbTuplePackers).GetMethod("SerializeTo", BindingFlags.Static | BindingFlags.Public, null, typeArgs, null); - if (method != null) - { // we have a direct serializer - return method.CreateDelegate(typeof(Encoder<>).MakeGenericType(type)); - } - - // maybe if it is a tuple ? - if (typeof(IFdbTuple).IsAssignableFrom(type)) - { - method = typeof(FdbTuplePackers).GetMethod("SerializeTupleTo", BindingFlags.Static | BindingFlags.Public); - if (method != null) - { - return method.MakeGenericMethod(type).CreateDelegate(typeof(Encoder<>).MakeGenericType(type)); - } - } - - if (typeof(ITupleFormattable).IsAssignableFrom(type)) - { - method = typeof(FdbTuplePackers).GetMethod("SerializeFormattableTo", BindingFlags.Static | BindingFlags.Public); - if (method != null) - { - return method.CreateDelegate(typeof(Encoder<>).MakeGenericType(type)); - } - } - - if (typeof(IFdbKey).IsAssignableFrom(type)) - { - method = typeof(FdbTuplePackers).GetMethod("SerializeFdbKeyTo", BindingFlags.Static | BindingFlags.Public); - if (method != null) - { - return method.CreateDelegate(typeof(Encoder<>).MakeGenericType(type)); - } - } - - var nullableType = Nullable.GetUnderlyingType(type); - if (nullableType != null) - { // nullable types can reuse the underlying type serializer - method = typeof(FdbTuplePackers).GetMethod("SerializeNullableTo", BindingFlags.Static | BindingFlags.Public); - if (method != null) - { - return method.MakeGenericMethod(nullableType).CreateDelegate(typeof(Encoder<>).MakeGenericType(type)); - } - } - - // TODO: look for a static SerializeTo(BWB, T) method on the type itself ? - - // no luck.. - return null; - } - - /// Serialize a nullable value, by checking for null at runtime - /// Underling type of the nullable type - /// Target buffer - /// Nullable value to serialize - /// Uses the underlying type's serializer if the value is not null - public static void SerializeNullableTo(ref TupleWriter writer, T? value) - where T : struct - { - if (value == null) - FdbTupleParser.WriteNil(ref writer); - else - FdbTuplePacker.Encoder(ref writer, value.Value); - } - - /// Serialize an untyped object, by checking its type at runtime - /// Target buffer - /// Untyped value whose type will be inspected at runtime - /// May throw at runtime if the type is not supported - public static void SerializeObjectTo(ref TupleWriter writer, object value) - { - if (value == null) - { // null value - // includes all null references to ref types, as nullables where HasValue == false - FdbTupleParser.WriteNil(ref writer); - return; - } - - switch (Type.GetTypeCode(value.GetType())) - { - case TypeCode.Empty: - case TypeCode.Object: - { - byte[] bytes = value as byte[]; - if (bytes != null) - { - SerializeTo(ref writer, bytes); - return; - } - - if (value is Slice) - { - SerializeTo(ref writer, (Slice)value); - return; - } - - if (value is Guid) - { - SerializeTo(ref writer, (Guid)value); - return; - } - - if (value is Uuid128) - { - SerializeTo(ref writer, (Uuid128)value); - return; - } - - if (value is Uuid64) - { - SerializeTo(ref writer, (Uuid64)value); - return; - } - - if (value is TimeSpan) - { - SerializeTo(ref writer, (TimeSpan)value); - return; - } - - if (value is FdbTupleAlias) - { - SerializeTo(ref writer, (FdbTupleAlias)value); - return; - } - - break; - } - case TypeCode.DBNull: - { // same as null - FdbTupleParser.WriteNil(ref writer); - return; - } - case TypeCode.Boolean: - { - SerializeTo(ref writer, (bool)value); - return; - } - case TypeCode.Char: - { - // should be treated as a string with only one char - SerializeTo(ref writer, (char)value); - return; - } - case TypeCode.SByte: - { - SerializeTo(ref writer, (sbyte)value); - return; - } - case TypeCode.Byte: - { - SerializeTo(ref writer, (byte)value); - return; - } - case TypeCode.Int16: - { - SerializeTo(ref writer, (short)value); - return; - } - case TypeCode.UInt16: - { - SerializeTo(ref writer, (ushort)value); - return; - } - case TypeCode.Int32: - { - SerializeTo(ref writer, (int)value); - return; - } - case TypeCode.UInt32: - { - SerializeTo(ref writer, (uint)value); - return; - } - case TypeCode.Int64: - { - SerializeTo(ref writer, (long)value); - return; - } - case TypeCode.UInt64: - { - SerializeTo(ref writer, (ulong)value); - return; - } - case TypeCode.String: - { - SerializeTo(ref writer, value as string); - return; - } - case TypeCode.DateTime: - { - SerializeTo(ref writer, (DateTime)value); - return; - } - case TypeCode.Double: - { - SerializeTo(ref writer, (double)value); - return; - } - case TypeCode.Single: - { - SerializeTo(ref writer, (float)value); - return; - } - } - - var tuple = value as IFdbTuple; - if (tuple != null) - { - SerializeTupleTo(ref writer, tuple); - return; - } - - var fmt = value as ITupleFormattable; - if (fmt != null) - { - tuple = fmt.ToTuple(); - if (tuple == null) throw new InvalidOperationException(String.Format("An instance of type {0} returned a null Tuple while serialiazing", value.GetType().Name)); - SerializeTupleTo(ref writer, tuple); - return; - } - - // Not Supported ? - throw new NotSupportedException(String.Format("Doesn't know how to serialize objects of type {0} into Tuple Encoding format", value.GetType().Name)); - } - - /// Writes a slice as a byte[] array - public static void SerializeTo(ref TupleWriter writer, Slice value) - { - if (value.IsNull) - { - FdbTupleParser.WriteNil(ref writer); - } - else if (value.Offset == 0 && value.Count == value.Array.Length) - { - FdbTupleParser.WriteBytes(ref writer, value.Array); - } - else - { - FdbTupleParser.WriteBytes(ref writer, value.Array, value.Offset, value.Count); - } - } - - /// Writes a byte[] array - public static void SerializeTo(ref TupleWriter writer, byte[] value) - { - FdbTupleParser.WriteBytes(ref writer, value); - } - - /// Writes an array segment as a byte[] array - public static void SerializeTo(ref TupleWriter writer, ArraySegment value) - { - SerializeTo(ref writer, Slice.Create(value)); - } - - /// Writes a char as Unicode string - public static void SerializeTo(ref TupleWriter writer, char value) - { - FdbTupleParser.WriteChar(ref writer, value); - } - - /// Writes a boolean as an integer - /// Uses 0 for false, and -1 for true - public static void SerializeTo(ref TupleWriter writer, bool value) - { - FdbTupleParser.WriteBool(ref writer, value); - } - - /// Writes a boolean as an integer or null - public static void SerializeTo(ref TupleWriter writer, bool? value) - { - if (value == null) - { // null => 00 - FdbTupleParser.WriteNil(ref writer); - } - else - { - FdbTupleParser.WriteBool(ref writer, value.Value); - } - } - - /// Writes a signed byte as an integer - public static void SerializeTo(ref TupleWriter writer, sbyte value) - { - FdbTupleParser.WriteInt32(ref writer, value); - } - - /// Writes an unsigned byte as an integer - public static void SerializeTo(ref TupleWriter writer, byte value) - { - FdbTupleParser.WriteByte(ref writer, value); - } - - /// Writes a signed word as an integer - public static void SerializeTo(ref TupleWriter writer, short value) - { - FdbTupleParser.WriteInt32(ref writer, value); - } - - /// Writes an unsigned word as an integer - public static void SerializeTo(ref TupleWriter writer, ushort value) - { - FdbTupleParser.WriteUInt32(ref writer, value); - } - - /// Writes a signed int as an integer - public static void SerializeTo(ref TupleWriter writer, int value) - { - FdbTupleParser.WriteInt32(ref writer, value); - } - - /// Writes an unsigned int as an integer - public static void SerializeTo(ref TupleWriter writer, uint value) - { - FdbTupleParser.WriteUInt32(ref writer, value); - } - - /// Writes a signed long as an integer - public static void SerializeTo(ref TupleWriter writer, long value) - { - FdbTupleParser.WriteInt64(ref writer, value); - } - - /// Writes an unsigned long as an integer - public static void SerializeTo(ref TupleWriter writer, ulong value) - { - FdbTupleParser.WriteUInt64(ref writer, value); - } - - /// Writes a 32-bit IEEE floating point number - public static void SerializeTo(ref TupleWriter writer, float value) - { - FdbTupleParser.WriteSingle(ref writer, value); - } - - /// Writes a 64-bit IEEE floating point number - public static void SerializeTo(ref TupleWriter writer, double value) - { - FdbTupleParser.WriteDouble(ref writer, value); - } - - /// Writes a string as an Unicode string - public static void SerializeTo(ref TupleWriter writer, string value) - { - FdbTupleParser.WriteString(ref writer, value); - } - - /// Writes a DateTime converted to the number of days since the Unix Epoch and stored as a 64-bit decimal - public static void SerializeTo(ref TupleWriter writer, DateTime value) - { - // The problem of serializing DateTime: TimeZone? Precision? - // - Since we are going to lose the TimeZone infos anyway, we can just store everything in UTC and let the caller deal with it - // - DateTime in .NET uses Ticks which produce numbers too large to fit in the 56 bits available in JavaScript - // - Most other *nix uses the number of milliseconds since 1970-Jan-01 UTC, but if we store as an integer we will lose some precision (rounded to nearest millisecond) - // - We could store the number of milliseconds as a floating point value, which would require support of Floating Points in the Tuple Encoding (currently a Draft) - // - Other database engines store dates as a number of DAYS since Epoch, using a floating point number. This allows for quickly extracting the date by truncating the value, and the time by using the decimal part - - // Right now, we will store the date as the number of DAYS since Epoch, using a 64-bit float. - // => storing a number of ticks would be MS-only anyway (56-bit limit in JS) - // => JS binding MAY support decoding of 64-bit floats in the future, in which case the value would be preserved exactly. - - const long UNIX_EPOCH_EPOCH = 621355968000000000L; - double ms = (value.ToUniversalTime().Ticks - UNIX_EPOCH_EPOCH) / (double)TimeSpan.TicksPerDay; - - FdbTupleParser.WriteDouble(ref writer, ms); - } - - /// Writes a TimeSpan converted to to a number seconds encoded as a 64-bit decimal - public static void SerializeTo(ref TupleWriter writer, TimeSpan value) - { - // We have the same precision problem with storing DateTimes: - // - Storing the number of ticks keeps the exact value, but is Windows-centric - // - Storing the number of milliseconds as an integer will round the precision to 1 millisecond, which is not acceptable - // - We could store the the number of milliseconds as a floating point value, which would require support of Floating Points in the Tuple Encoding (currently a Draft) - // - It is frequent for JSON APIs and other database engines to represent durations as a number of SECONDS, using a floating point number. - - // Right now, we will store the duration as the number of seconds, using a 64-bit float - - FdbTupleParser.WriteDouble(ref writer, value.TotalSeconds); - } - - /// Writes a Guid as a 128-bit UUID - public static void SerializeTo(ref TupleWriter writer, Guid value) - { - //REVIEW: should we consider serializing Guid.Empty as <14> (integer 0) ? or maybe <01><00> (empty bytestring) ? - // => could spare ~16 bytes per key in indexes on GUID properties that are frequently missing or empty (== default(Guid)) - FdbTupleParser.WriteGuid(ref writer, value); - } - - /// Writes a Uuid as a 128-bit UUID - public static void SerializeTo(ref TupleWriter writer, Uuid128 value) - { - FdbTupleParser.WriteUuid128(ref writer, value); - } - - /// Writes a Uuid as a 64-bit UUID - public static void SerializeTo(ref TupleWriter writer, Uuid64 value) - { - FdbTupleParser.WriteUuid64(ref writer, value); - } - - /// Writes an IPaddress as a 32-bit (IPv4) or 128-bit (IPv6) byte array - public static void SerializeTo(ref TupleWriter writer, System.Net.IPAddress value) - { - FdbTupleParser.WriteBytes(ref writer, value != null ? value.GetAddressBytes() : null); - } - - public static void SerializeTo(ref TupleWriter writer, FdbTupleAlias value) - { - Contract.Requires(Enum.IsDefined(typeof(FdbTupleAlias), value)); - - writer.Output.WriteByte((byte)value); - } - - public static void SerializeTupleTo(ref TupleWriter writer, TTuple tuple) - where TTuple : IFdbTuple - { - Contract.Requires(tuple != null); - - FdbTupleParser.BeginTuple(ref writer); - tuple.PackTo(ref writer); - FdbTupleParser.EndTuple(ref writer); - } - - public static void SerializeFormattableTo(ref TupleWriter writer, ITupleFormattable formattable) - { - if (formattable == null) - { - FdbTupleParser.WriteNil(ref writer); - return; - } - - var tuple = formattable.ToTuple(); - if (tuple == null) throw new InvalidOperationException(String.Format("Custom formatter {0}.ToTuple() cannot return null", formattable.GetType().Name)); - - FdbTupleParser.BeginTuple(ref writer); - tuple.PackTo(ref writer); - FdbTupleParser.EndTuple(ref writer); - } - - public static void SerializeFdbKeyTo(ref TupleWriter writer, IFdbKey key) - { - Contract.Requires(key != null); - var slice = key.ToFoundationDbKey(); - FdbTupleParser.WriteBytes(ref writer, slice); - } - - #endregion - - #region Deserializers... - - private static readonly Dictionary s_sliceUnpackers = InitializeDefaultUnpackers(); - - [NotNull] - private static Dictionary InitializeDefaultUnpackers() - { - var map = new Dictionary(); - - map[typeof(Slice)] = new Func(FdbTuplePackers.DeserializeSlice); - map[typeof(byte[])] = new Func(FdbTuplePackers.DeserializeBytes); - map[typeof(bool)] = new Func(FdbTuplePackers.DeserializeBoolean); - map[typeof(string)] = new Func(FdbTuplePackers.DeserializeString); - map[typeof(sbyte)] = new Func(FdbTuplePackers.DeserializeSByte); - map[typeof(short)] = new Func(FdbTuplePackers.DeserializeInt16); - map[typeof(int)] = new Func(FdbTuplePackers.DeserializeInt32); - map[typeof(long)] = new Func(FdbTuplePackers.DeserializeInt64); - map[typeof(byte)] = new Func(FdbTuplePackers.DeserializeByte); - map[typeof(ushort)] = new Func(FdbTuplePackers.DeserializeUInt16); - map[typeof(uint)] = new Func(FdbTuplePackers.DeserializeUInt32); - map[typeof(ulong)] = new Func(FdbTuplePackers.DeserializeUInt64); - map[typeof(float)] = new Func(FdbTuplePackers.DeserializeSingle); - map[typeof(double)] = new Func(FdbTuplePackers.DeserializeDouble); - map[typeof(Guid)] = new Func(FdbTuplePackers.DeserializeGuid); - map[typeof(Uuid128)] = new Func(FdbTuplePackers.DeserializeUuid128); - map[typeof(Uuid64)] = new Func(FdbTuplePackers.DeserializeUuid64); - map[typeof(TimeSpan)] = new Func(FdbTuplePackers.DeserializeTimeSpan); - map[typeof(DateTime)] = new Func(FdbTuplePackers.DeserializeDateTime); - map[typeof(System.Net.IPAddress)] = new Func(FdbTuplePackers.DeserializeIPAddress); - - // add Nullable versions for all these types - return map; - } - - /// Returns a lambda that will be able to serialize values of type - /// Type of values to serialize - /// Reusable action that knows how to serialize values of type into binary buffers, or an exception if the type is not supported - [NotNull] - internal static Func GetDeserializer(bool required) - { - Type type = typeof(T); - - Delegate decoder; - if (s_sliceUnpackers.TryGetValue(type, out decoder)) - { - return (Func)decoder; - } - - //TODO: handle nullable types? - var underlyingType = Nullable.GetUnderlyingType(typeof(T)); - if (underlyingType != null && s_sliceUnpackers.TryGetValue(underlyingType, out decoder)) - { - decoder = MakeNullableDeserializer(type, underlyingType, decoder); - if (decoder != null) return (Func)decoder; - } - - if (required) - { - return (_) => { throw new InvalidOperationException(String.Format("Does not know how to deserialize keys into values of type {0}", typeof(T).Name)); }; - } - else - { // when all else fails... - return (value) => FdbConverters.ConvertBoxed(DeserializeBoxed(value)); - } - } - - /// Check if a tuple segment is the equivalent of 'Nil' - internal static bool IsNilSegment(Slice slice) - { - return slice.IsNullOrEmpty || slice[0] == FdbTupleTypes.Nil; - } - - private static Delegate MakeNullableDeserializer([NotNull] Type nullableType, [NotNull] Type type, [NotNull] Delegate decoder) - { - Contract.Requires(nullableType != null && type != null && decoder != null); - // We have a Decoder of T, but we have to transform it into a Decoder for Nullable, which returns null if the slice is "nil", or falls back to the underlying decoder if the slice contains something - - var prmSlice = Expression.Parameter(typeof(Slice), "slice"); - var body = Expression.Condition( - // IsNilSegment(slice) ? - Expression.Call(typeof(FdbTuplePackers).GetMethod("IsNilSegment", BindingFlags.Static | BindingFlags.NonPublic), prmSlice), - // True => default(Nullable) - Expression.Default(nullableType), - // False => decoder(slice) - Expression.Convert(Expression.Invoke(Expression.Constant(decoder), prmSlice), nullableType) - ); - - return Expression.Lambda(body, prmSlice).Compile(); - } - - /// Deserialize a packed element into an object by choosing the most appropriate type at runtime - /// Slice that contains a single packed element - /// Decoded element, in the type that is the best fit. - /// You should avoid working with untyped values as much as possible! Blindly casting the returned object may be problematic because this method may need to return very large intergers as Int64 or even UInt64. - [CanBeNull] - public static object DeserializeBoxed(Slice slice) - { - if (slice.IsNullOrEmpty) return null; - - int type = slice[0]; - if (type <= FdbTupleTypes.IntPos8) - { - if (type >= FdbTupleTypes.IntNeg8) return FdbTupleParser.ParseInt64(type, slice); - - switch (type) - { - case FdbTupleTypes.Nil: return null; - case FdbTupleTypes.Bytes: return FdbTupleParser.ParseBytes(slice); - case FdbTupleTypes.Utf8: return FdbTupleParser.ParseUnicode(slice); - case FdbTupleTypes.TupleStart: return FdbTupleParser.ParseTuple(slice); - } - } - else - { - switch (type) - { - case FdbTupleTypes.Single: return FdbTupleParser.ParseSingle(slice); - case FdbTupleTypes.Double: return FdbTupleParser.ParseDouble(slice); - case FdbTupleTypes.Uuid128: return FdbTupleParser.ParseGuid(slice); - case FdbTupleTypes.Uuid64: return FdbTupleParser.ParseUuid64(slice); - case FdbTupleTypes.AliasDirectory: return FdbTupleAlias.Directory; - case FdbTupleTypes.AliasSystem: return FdbTupleAlias.System; - } - } - - throw new FormatException(String.Format("Cannot convert tuple segment with unknown type code {0}", type)); - } - - /// Deserialize a slice into a type that implements ITupleFormattable - /// Type of a class that must implement ITupleFormattable and have a default constructor - /// Slice that contains a single packed element - /// Decoded value of type - /// The type must have a default parameter-less constructor in order to be created. - public static T DeserializeFormattable(Slice slice) - where T : ITupleFormattable, new() - { - if (FdbTuplePackers.IsNilSegment(slice)) - { - return default(T); - } - - var tuple = FdbTupleParser.ParseTuple(slice); - var value = new T(); - value.FromTuple(tuple); - return value; - } - - /// Deserialize a slice into a type that implements ITupleFormattable, using a custom factory method - /// Type of a class that must implement ITupleFormattable - /// Slice that contains a single packed element - /// Lambda that will be called to construct a new instance of values of type - /// Decoded value of type - public static T DeserializeFormattable(Slice slice, [NotNull] Func factory) - where T : ITupleFormattable - { - var tuple = FdbTupleParser.ParseTuple(slice); - var value = factory(); - value.FromTuple(tuple); - return value; - } - - /// Deserialize a tuple segment into a Slice - public static Slice DeserializeSlice(Slice slice) - { - // Convert the tuple value into a sensible Slice representation. - // The behavior should be equivalent to calling the corresponding Slice.From{TYPE}(TYPE value) - - if (slice.IsNullOrEmpty) return Slice.Nil; //TODO: fail ? - - byte type = slice[0]; - switch(type) - { - case FdbTupleTypes.Nil: return Slice.Nil; - case FdbTupleTypes.Bytes: return FdbTupleParser.ParseBytes(slice); - case FdbTupleTypes.Utf8: return Slice.FromString(FdbTupleParser.ParseUnicode(slice)); - - case FdbTupleTypes.Single: return Slice.FromSingle(FdbTupleParser.ParseSingle(slice)); - case FdbTupleTypes.Double: return Slice.FromDouble(FdbTupleParser.ParseDouble(slice)); - - case FdbTupleTypes.Uuid128: return Slice.FromGuid(FdbTupleParser.ParseGuid(slice)); - case FdbTupleTypes.Uuid64: return Slice.FromUuid64(FdbTupleParser.ParseUuid64(slice)); - } - - if (type <= FdbTupleTypes.IntPos8 && type >= FdbTupleTypes.IntNeg8) - { - if (type >= FdbTupleTypes.IntBase) return Slice.FromInt64(DeserializeInt64(slice)); - return Slice.FromUInt64(DeserializeUInt64(slice)); - } - - throw new FormatException(String.Format("Cannot convert tuple segment of type 0x{0:X} into a Slice", type)); - } - - /// Deserialize a tuple segment into a byte array - [CanBeNull] //REVIEW: because of Slice.GetBytes() - public static byte[] DeserializeBytes(Slice slice) - { - return DeserializeSlice(slice).GetBytes(); - } - - /// Deserialize a tuple segment into a tuple - [CanBeNull] - public static IFdbTuple DeserializeTuple(Slice slice) - { - if (slice.IsNullOrEmpty) return null; - - byte type = slice[0]; - switch(type) - { - case FdbTupleTypes.Nil: - { - return null; - } - case FdbTupleTypes.Bytes: - { - return FdbTuple.Unpack(FdbTupleParser.ParseBytes(slice)); - } - case FdbTupleTypes.TupleStart: - { - return FdbTupleParser.ParseTuple(slice); - } - } - - throw new FormatException("Cannot convert tuple segment into a Tuple"); - } - - /// Deserialize a tuple segment into a Boolean - /// Slice that contains a single packed element - public static bool DeserializeBoolean(Slice slice) - { - if (slice.IsNullOrEmpty) return false; //TODO: fail ? - - byte type = slice[0]; - - // Booleans are usually encoded as integers, with 0 for False (<14>) and 1 for True (<15><01>) - if (type <= FdbTupleTypes.IntPos8 && type >= FdbTupleTypes.IntNeg8) - { - //note: DeserializeInt64 handles most cases - return 0 != DeserializeInt64(slice); - } - - switch (type) - { - case FdbTupleTypes.Bytes: - { // empty is false, all other is true - return slice.Count != 2; // <01><00> - } - case FdbTupleTypes.Utf8: - {// empty is false, all other is true - return slice.Count != 2; // <02><00> - } - case FdbTupleTypes.Single: - { - //TODO: should NaN considered to be false ? - return 0f != FdbTupleParser.ParseSingle(slice); - } - case FdbTupleTypes.Double: - { - //TODO: should NaN considered to be false ? - return 0f != FdbTupleParser.ParseDouble(slice); - } - } - - //TODO: should we handle weird cases like strings "True" and "False"? - - throw new FormatException(String.Format("Cannot convert tuple segment of type 0x{0:X} into a boolean", type)); - } - - /// Deserialize a tuple segment into an Int16 - /// Slice that contains a single packed element - public static sbyte DeserializeSByte(Slice slice) - { - return checked((sbyte)DeserializeInt64(slice)); - } - - /// Deserialize a tuple segment into an Int16 - /// Slice that contains a single packed element - public static short DeserializeInt16(Slice slice) - { - return checked((short)DeserializeInt64(slice)); - } - - /// Deserialize a tuple segment into an Int32 - /// Slice that contains a single packed element - public static int DeserializeInt32(Slice slice) - { - return checked((int)DeserializeInt64(slice)); - } - - /// Deserialize a tuple segment into an Int64 - /// Slice that contains a single packed element - public static long DeserializeInt64(Slice slice) - { - if (slice.IsNullOrEmpty) return 0L; //TODO: fail ? - - int type = slice[0]; - if (type <= FdbTupleTypes.IntPos8) - { - if (type >= FdbTupleTypes.IntNeg8) return FdbTupleParser.ParseInt64(type, slice); - - switch (type) - { - case FdbTupleTypes.Nil: return 0; - case FdbTupleTypes.Bytes: return long.Parse(FdbTupleParser.ParseAscii(slice), CultureInfo.InvariantCulture); - case FdbTupleTypes.Utf8: return long.Parse(FdbTupleParser.ParseUnicode(slice), CultureInfo.InvariantCulture); - } - } - - throw new FormatException(String.Format("Cannot convert tuple segment of type 0x{0:X} into a signed integer", type)); - } - - /// Deserialize a tuple segment into an UInt32 - /// Slice that contains a single packed element - public static byte DeserializeByte(Slice slice) - { - return checked((byte)DeserializeUInt64(slice)); - } - - /// Deserialize a tuple segment into an UInt32 - /// Slice that contains a single packed element - public static ushort DeserializeUInt16(Slice slice) - { - return checked((ushort)DeserializeUInt64(slice)); - } - - /// Deserialize a slice into an UInt32 - /// Slice that contains a single packed element - public static uint DeserializeUInt32(Slice slice) - { - return checked((uint)DeserializeUInt64(slice)); - } - - /// Deserialize a tuple segment into an UInt64 - /// Slice that contains a single packed element - public static ulong DeserializeUInt64(Slice slice) - { - if (slice.IsNullOrEmpty) return 0UL; //TODO: fail ? - - int type = slice[0]; - if (type <= FdbTupleTypes.IntPos8) - { - if (type >= FdbTupleTypes.IntZero) return (ulong)FdbTupleParser.ParseInt64(type, slice); - if (type < FdbTupleTypes.IntZero) throw new OverflowException(); // negative values - - switch (type) - { - case FdbTupleTypes.Nil: return 0; - case FdbTupleTypes.Bytes: return ulong.Parse(FdbTupleParser.ParseAscii(slice), CultureInfo.InvariantCulture); - case FdbTupleTypes.Utf8: return ulong.Parse(FdbTupleParser.ParseUnicode(slice), CultureInfo.InvariantCulture); - } - } - - throw new FormatException(String.Format("Cannot convert tuple segment of type 0x{0:X} into an unsigned integer", type)); - } - - public static float DeserializeSingle(Slice slice) - { - if (slice.IsNullOrEmpty) return 0; - - byte type = slice[0]; - switch (type) - { - case FdbTupleTypes.Nil: - { - return 0; - } - case FdbTupleTypes.Utf8: - { - return Single.Parse(FdbTupleParser.ParseUnicode(slice), CultureInfo.InvariantCulture); - } - case FdbTupleTypes.Single: - { - return FdbTupleParser.ParseSingle(slice); - } - case FdbTupleTypes.Double: - { - return (float)FdbTupleParser.ParseDouble(slice); - } - } - - if (type <= FdbTupleTypes.IntPos8 && type >= FdbTupleTypes.IntNeg8) - { - return checked((float)DeserializeInt64(slice)); - } - - throw new FormatException(String.Format("Cannot convert tuple segment of type 0x{0:X} into a Single", type)); - } - - public static double DeserializeDouble(Slice slice) - { - if (slice.IsNullOrEmpty) return 0; - - byte type = slice[0]; - switch(type) - { - case FdbTupleTypes.Nil: - { - return 0; - } - case FdbTupleTypes.Utf8: - { - return Double.Parse(FdbTupleParser.ParseUnicode(slice), CultureInfo.InvariantCulture); - } - case FdbTupleTypes.Single: - { - return (double)FdbTupleParser.ParseSingle(slice); - } - case FdbTupleTypes.Double: - { - return FdbTupleParser.ParseDouble(slice); - } - } - - if (type <= FdbTupleTypes.IntPos8 && type >= FdbTupleTypes.IntNeg8) - { - return checked((double)DeserializeInt64(slice)); - } - - throw new FormatException(String.Format("Cannot convert tuple segment of type 0x{0:X} into a Double", type)); - } - - /// Deserialize a tuple segment into a DateTime (UTC) - /// Slice that contains a single packed element - /// DateTime in UTC - /// The returned DateTime will be in UTC, because the original TimeZone details are lost. - public static DateTime DeserializeDateTime(Slice slice) - { - if (slice.IsNullOrEmpty) return DateTime.MinValue; //TODO: fail ? - - byte type = slice[0]; - - switch(type) - { - case FdbTupleTypes.Nil: - { - return DateTime.MinValue; - } - - case FdbTupleTypes.Utf8: - { // we only support ISO 8601 dates. For ex: YYYY-MM-DDTHH:MM:SS.fffff" - string str = FdbTupleParser.ParseUnicode(slice); - return DateTime.Parse(str, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind); - } - - case FdbTupleTypes.Double: - { // Number of days since Epoch - const long UNIX_EPOCH_TICKS = 621355968000000000L; - //note: we can't user TimeSpan.FromDays(...) because it rounds to the nearest millisecond! - long ticks = UNIX_EPOCH_TICKS + (long)(FdbTupleParser.ParseDouble(slice) * TimeSpan.TicksPerDay); - return new DateTime(ticks, DateTimeKind.Utc); - } - } - - // If we have an integer, we consider it to be a number of Ticks (Windows Only) - if (type <= FdbTupleTypes.IntPos8 && type >= FdbTupleTypes.IntNeg8) - { - return new DateTime(DeserializeInt64(slice), DateTimeKind.Utc); - } - - throw new FormatException(String.Format("Cannot convert tuple segment of type 0x{0:X} into a DateTime", type)); - } - - /// Deserialize a tuple segment into a TimeSpan - /// Slice that contains a single packed element - public static TimeSpan DeserializeTimeSpan(Slice slice) - { - if (slice.IsNullOrEmpty) return TimeSpan.Zero; //TODO: fail ? - - byte type = slice[0]; - - // We serialize TimeSpans as number of seconds in a 64-bit float. - - switch(type) - { - case FdbTupleTypes.Nil: - { - return TimeSpan.Zero; - } - case FdbTupleTypes.Utf8: - { // "HH:MM:SS.fffff" - return TimeSpan.Parse(FdbTupleParser.ParseUnicode(slice), CultureInfo.InvariantCulture); - } - case FdbTupleTypes.Double: - { // Number of seconds - //note: We can't use TimeSpan.FromSeconds(...) because it rounds to the nearest millisecond! - return new TimeSpan((long)(FdbTupleParser.ParseDouble(slice) * (double)TimeSpan.TicksPerSecond)); - } - } - - // If we have an integer, we consider it to be a number of Ticks (Windows Only) - if (type <= FdbTupleTypes.IntPos8 && type >= FdbTupleTypes.IntNeg8) - { - return new TimeSpan(DeserializeInt64(slice)); - } - - throw new FormatException(String.Format("Cannot convert tuple segment of type 0x{0:X} into a TimeSpan", type)); - } - - /// Deserialize a tuple segment into a Unicode string - /// Slice that contains a single packed element - [CanBeNull] - public static string DeserializeString(Slice slice) - { - if (slice.IsNullOrEmpty) return null; - - byte type = slice[0]; - switch (type) - { - case FdbTupleTypes.Nil: - { - return null; - } - case FdbTupleTypes.Bytes: - { - return FdbTupleParser.ParseAscii(slice); - } - case FdbTupleTypes.Utf8: - { - return FdbTupleParser.ParseUnicode(slice); - } - case FdbTupleTypes.Single: - { - return FdbTupleParser.ParseSingle(slice).ToString(CultureInfo.InvariantCulture); - } - case FdbTupleTypes.Double: - { - return FdbTupleParser.ParseDouble(slice).ToString(CultureInfo.InvariantCulture); - } - case FdbTupleTypes.Uuid128: - { - return FdbTupleParser.ParseGuid(slice).ToString(); - } - case FdbTupleTypes.Uuid64: - { - return FdbTupleParser.ParseUuid64(slice).ToString(); - } - } - - if (type <= FdbTupleTypes.IntPos8 && type >= FdbTupleTypes.IntNeg8) - { - return FdbTupleParser.ParseInt64(type, slice).ToString(CultureInfo.InvariantCulture); - } - - throw new FormatException(String.Format("Cannot convert tuple segment of type 0x{0:X} into a String", type)); - } - - /// Deserialize a tuple segment into Guid - /// Slice that contains a single packed element - public static Guid DeserializeGuid(Slice slice) - { - if (slice.IsNullOrEmpty) return Guid.Empty; - - int type = slice[0]; - - switch (type) - { - case FdbTupleTypes.Bytes: - { - return Guid.Parse(FdbTupleParser.ParseAscii(slice)); - } - case FdbTupleTypes.Utf8: - { - return Guid.Parse(FdbTupleParser.ParseUnicode(slice)); - } - case FdbTupleTypes.Uuid128: - { - return FdbTupleParser.ParseGuid(slice); - } - //REVIEW: should we allow converting a Uuid64 into a Guid? This looks more like a bug than an expected behavior... - } - - throw new FormatException(String.Format("Cannot convert tuple segment of type 0x{0:X} into a System.Guid", type)); - } - - /// Deserialize a tuple segment into 128-bit UUID - /// Slice that contains a single packed element - public static Uuid128 DeserializeUuid128(Slice slice) - { - if (slice.IsNullOrEmpty) return Uuid128.Empty; - - int type = slice[0]; - - switch (type) - { - case FdbTupleTypes.Bytes: - { // expect binary representation as a 16-byte array - return new Uuid128(FdbTupleParser.ParseBytes(slice)); - } - case FdbTupleTypes.Utf8: - { // expect text representation - return new Uuid128(FdbTupleParser.ParseUnicode(slice)); - } - case FdbTupleTypes.Uuid128: - { - return FdbTupleParser.ParseUuid128(slice); - } - //REVIEW: should we allow converting a Uuid64 into a Uuid128? This looks more like a bug than an expected behavior... - } - - throw new FormatException(String.Format("Cannot convert tuple segment of type 0x{0:X} into an Uuid128", type)); - } - - /// Deserialize a tuple segment into 64-bit UUID - /// Slice that contains a single packed element - public static Uuid64 DeserializeUuid64(Slice slice) - { - if (slice.IsNullOrEmpty) return Uuid64.Empty; - - int type = slice[0]; - - switch (type) - { - case FdbTupleTypes.Bytes: - { // expect binary representation as a 16-byte array - return new Uuid64(FdbTupleParser.ParseBytes(slice)); - } - case FdbTupleTypes.Utf8: - { // expect text representation - return new Uuid64(FdbTupleParser.ParseUnicode(slice)); - } - case FdbTupleTypes.Uuid64: - { - return FdbTupleParser.ParseUuid64(slice); - } - } - - if (type >= FdbTupleTypes.IntZero && type <= FdbTupleTypes.IntPos8) - { // expect 64-bit number - return new Uuid64(FdbTupleParser.ParseInt64(type, slice)); - } - // we don't support negative numbers! - - throw new FormatException(String.Format("Cannot convert tuple segment of type 0x{0:X} into an Uuid64", type)); - } - - /// Deserialize a tuple segment into Guid - /// Slice that contains a single packed element - [CanBeNull] - public static System.Net.IPAddress DeserializeIPAddress(Slice slice) - { - if (slice.IsNullOrEmpty) return null; - - int type = slice[0]; - - switch (type) - { - case FdbTupleTypes.Bytes: - { - return new System.Net.IPAddress(FdbTupleParser.ParseBytes(slice).GetBytes()); - } - case FdbTupleTypes.Utf8: - { - return System.Net.IPAddress.Parse(FdbTupleParser.ParseUnicode(slice)); - } - case FdbTupleTypes.Uuid128: - { // could be an IPv6 encoded as a 128-bits UUID - return new System.Net.IPAddress(slice.GetBytes()); - } - } - - if (type >= FdbTupleTypes.IntPos1 && type <= FdbTupleTypes.IntPos4) - { // could be an IPv4 encoded as a 32-bit unsigned integer - var value = FdbTupleParser.ParseInt64(type, slice); - Contract.Assert(value >= 0 && value <= uint.MaxValue); - return new System.Net.IPAddress(value); - } - - throw new FormatException(String.Format("Cannot convert tuple segment of type 0x{0:X} into System.Net.IPAddress", type)); - } - - public static FdbTupleAlias DeserializeAlias(Slice slice) - { - if (slice.Count != 1) throw new FormatException("Cannot convert tuple segment into this type"); - return (FdbTupleAlias)slice[0]; - } - - /// Unpack a tuple from a buffer - /// Slice that contains the packed representation of a tuple with zero or more elements - /// Decoded tuple - [NotNull] - internal static FdbSlicedTuple Unpack(Slice buffer, bool embedded) - { - var reader = new TupleReader(buffer); - if (embedded) reader.Depth = 1; - - // most tuples will probably fit within (prefix, sub-prefix, id, key) so pre-allocating with 4 should be ok... - var items = new Slice[4]; - - Slice item; - int p = 0; - while ((item = FdbTupleParser.ParseNext(ref reader)).HasValue) - { - if (p >= items.Length) - { - // note: do not grow exponentially, because tuples will never but very large... - Array.Resize(ref items, p + 4); - } - items[p++] = item; - } - - if (reader.Input.HasMore) throw new FormatException("Parsing of tuple failed failed before reaching the end of the key"); - return new FdbSlicedTuple(p == 0 ? Slice.EmptySliceArray : items, 0, p); - } - - /// Ensure that a slice is a packed tuple that contains a single and valid element - /// Slice that should contain the packed representation of a singleton tuple - /// Decoded slice of the single element in the singleton tuple - public static Slice UnpackSingle(Slice buffer) - { - var slicer = new TupleReader(buffer); - - var current = FdbTupleParser.ParseNext(ref slicer); - if (slicer.Input.HasMore) throw new FormatException("Parsing of singleton tuple failed before reaching the end of the key"); - - return current; - } - - /// Only returns the first item of a packed tuple - /// Slice that contains the packed representation of a tuple with one or more elements - /// Raw slice corresponding to the first element of the tuple - public static Slice UnpackFirst(Slice buffer) - { - var slicer = new TupleReader(buffer); - - return FdbTupleParser.ParseNext(ref slicer); - } - - /// Only returns the last item of a packed tuple - /// Slice that contains the packed representation of a tuple with one or more elements - /// Raw slice corresponding to the last element of the tuple - public static Slice UnpackLast(Slice buffer) - { - var slicer = new TupleReader(buffer); - - Slice item = Slice.Nil; - - Slice current; - while ((current = FdbTupleParser.ParseNext(ref slicer)).HasValue) - { - item = current; - } - - if (slicer.Input.HasMore) throw new FormatException("Parsing of tuple failed failed before reaching the end of the key"); - return item; - } - - #endregion - - } - -} diff --git a/FoundationDB.Client/Layers/Tuples/FdbTuple`1.cs b/FoundationDB.Client/Layers/Tuples/FdbTuple`1.cs deleted file mode 100644 index 1f58f922d..000000000 --- a/FoundationDB.Client/Layers/Tuples/FdbTuple`1.cs +++ /dev/null @@ -1,218 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Layers.Tuples -{ - using FoundationDB.Client; - using FoundationDB.Client.Converters; - using JetBrains.Annotations; - using System; - using System.Collections; - using System.Collections.Generic; - using System.ComponentModel; - using System.Diagnostics; - - /// Tuple that holds only one item - /// Type of the item - [ImmutableObject(true), DebuggerDisplay("{ToString()}")] - public struct FdbTuple : IFdbTuple - { - // This is mostly used by code that create a lot of temporary singleton, to reduce the pressure on the Garbage Collector by allocating them on the stack. - // Please note that if you return an FdbTuple as an IFdbTuple, it will be boxed by the CLR and all memory gains will be lost - - /// First and only item in the tuple - public readonly T1 Item1; - - [DebuggerStepThrough] - public FdbTuple(T1 item1) - { - this.Item1 = item1; - } - - public int Count { get { return 1; } } - - public object this[int index] - { - get - { - if (index > 0 || index < -1) FdbTuple.FailIndexOutOfRange(index, 1); - return this.Item1; - } - } - - public IFdbTuple this[int? fromIncluded, int? toExcluded] - { - get { return FdbTuple.Splice(this, fromIncluded, toExcluded); } - } - - /// Return the typed value of an item of the tuple, given its position - /// Expected type of the item - /// Position of the item (if negative, means relative from the end) - /// Value of the item at position , adapted into type . - public R Get(int index) - { - if (index > 0 || index < -1) FdbTuple.FailIndexOutOfRange(index, 1); - return FdbConverters.Convert(this.Item1); - } - - /// Return the typed value of the last item in the tuple - R IFdbTuple.Last() - { - return FdbConverters.Convert(this.Item1); - } - - public void PackTo(ref TupleWriter writer) - { - FdbTuplePacker.Encoder(ref writer, this.Item1); - } - - IFdbTuple IFdbTuple.Append(T2 value) - { - return new FdbTuple(this.Item1, value); - } - - /// Appends a tuple as a single new item at the end of the current tuple. - /// Tuple that will be added as an embedded item - /// New tuple with one extra item - /// If you want to append the *items* of , and not the tuple itself, please call ! - [NotNull] - public FdbTuple Append(T2 value) - { - return new FdbTuple(this.Item1, value); - } - - /// Appends the items of a tuple at the end of the current tuple. - /// Tuple whose items are to be appended at the end - /// New tuple composed of the current tuple's items, followed by 's items - [NotNull] - public IFdbTuple Concat([NotNull] IFdbTuple tuple) - { - return FdbTuple.Concat(this, tuple); - } - - /// Copy the item of this singleton into an array at the specified offset - public void CopyTo([NotNull] object[] array, int offset) - { - array[offset] = this.Item1; - } - - /// Execute a lambda Action with the content of this tuple - /// Action that will be passed the content of this tuple as parameters - public void With([NotNull] Action lambda) - { - lambda(this.Item1); - } - - /// Execute a lambda Function with the content of this tuple - /// Action that will be passed the content of this tuple as parameters - /// Result of calling with the items of this tuple - public R With([NotNull] Func lambda) - { - return lambda(this.Item1); - } - - public IEnumerator GetEnumerator() - { - yield return this.Item1; - } - - System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() - { - return this.GetEnumerator(); - } - - public Slice ToSlice() - { - return FdbTuple.EncodeKey(this.Item1); - } - - Slice IFdbKey.ToFoundationDbKey() - { - return this.ToSlice(); - } - - public override string ToString() - { - // singleton tuples end with a trailing ',' - return "(" + FdbTuple.Stringify(this.Item1) + ",)"; - } - - public override bool Equals(object obj) - { - return obj != null && ((IStructuralEquatable)this).Equals(obj, SimilarValueComparer.Default); - } - - public bool Equals(IFdbTuple other) - { - return other != null && ((IStructuralEquatable)this).Equals(other, SimilarValueComparer.Default); - } - - public override int GetHashCode() - { - return ((IStructuralEquatable)this).GetHashCode(SimilarValueComparer.Default); - } - - public static bool operator ==(FdbTuple left, FdbTuple right) - { - return SimilarValueComparer.Default.Equals(left.Item1, right.Item1); - } - - public static bool operator !=(FdbTuple left, FdbTuple right) - { - return !SimilarValueComparer.Default.Equals(left.Item1, right.Item1); - } - - bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer) - { - if (other == null) return false; - if (other is FdbTuple) - { - return comparer.Equals(this.Item1, ((FdbTuple)other).Item1); - } - return FdbTuple.Equals(this, other, comparer); - } - - int IStructuralEquatable.GetHashCode(IEqualityComparer comparer) - { - return comparer.GetHashCode(this.Item1); - } - - public static implicit operator FdbTuple(Tuple t) - { - if (t == null) throw new ArgumentNullException("t"); - return new FdbTuple(t.Item1); - } - - public static explicit operator Tuple(FdbTuple t) - { - return new Tuple(t.Item1); - } - - } - -} diff --git a/FoundationDB.Client/Layers/Tuples/FdbTuple`2.cs b/FoundationDB.Client/Layers/Tuples/FdbTuple`2.cs deleted file mode 100644 index 1ef16cb2d..000000000 --- a/FoundationDB.Client/Layers/Tuples/FdbTuple`2.cs +++ /dev/null @@ -1,248 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Layers.Tuples -{ - using FoundationDB.Client; - using FoundationDB.Client.Converters; - using JetBrains.Annotations; - using System; - using System.Collections; - using System.Collections.Generic; - using System.ComponentModel; - using System.Diagnostics; - - /// Tuple that holds a pair of items - /// Type of the first item - /// Type of the second item - [ImmutableObject(true), DebuggerDisplay("{ToString()}")] - public struct FdbTuple : IFdbTuple - { - // This is mostly used by code that create a lot of temporary pair, to reduce the pressure on the Garbage Collector by allocating them on the stack. - // Please note that if you return an FdbTuple as an IFdbTuple, it will be boxed by the CLR and all memory gains will be lost - - /// First element of the pair - public readonly T1 Item1; - /// Seconde element of the pair - public readonly T2 Item2; - - [DebuggerStepThrough] - public FdbTuple(T1 item1, T2 item2) - { - this.Item1 = item1; - this.Item2 = item2; - } - - public int Count { get { return 2; } } - - public object this[int index] - { - get - { - switch (index) - { - case 0: case -2: return this.Item1; - case 1: case -1: return this.Item2; - default: FdbTuple.FailIndexOutOfRange(index, 2); return null; - } - } - } - - public IFdbTuple this[int? fromIncluded, int? toExcluded] - { - get { return FdbTuple.Splice(this, fromIncluded, toExcluded); } - } - - /// Return the typed value of an item of the tuple, given its position - /// Expected type of the item - /// Position of the item (if negative, means relative from the end) - /// Value of the item at position , adapted into type . - public R Get(int index) - { - switch(index) - { - case 0: case -2: return FdbConverters.Convert(this.Item1); - case 1: case -1: return FdbConverters.Convert(this.Item2); - default: FdbTuple.FailIndexOutOfRange(index, 2); return default(R); - } - } - - /// Return the value of the last item in the tuple - public T2 Last - { - get { return this.Item2; } - } - - /// Return the typed value of the last item in the tuple - R IFdbTuple.Last() - { - return FdbConverters.Convert(this.Item2); - } - - public void PackTo(ref TupleWriter writer) - { - FdbTuplePacker.Encoder(ref writer, this.Item1); - FdbTuplePacker.Encoder(ref writer, this.Item2); - } - - IFdbTuple IFdbTuple.Append(T3 value) - { - return new FdbTuple(this.Item1, this.Item2, value); - } - - /// Appends a single new item at the end of the current tuple. - /// Value that will be added as an embedded item - /// New tuple with one extra item - /// If is a tuple, and you want to append the *items* of this tuple, and not the tuple itself, please call ! - [NotNull] - public FdbTuple Append(T3 value) - { - return new FdbTuple(this.Item1, this.Item2, value); - // Note: By create a FdbTuple we risk an explosion of the number of combinations of Ts which could potentially cause problems at runtime (too many variants of the same generic types). - // ex: if we have N possible types, then there could be N^3 possible variants of FdbTuple that the JIT has to deal with. - // => if this starts becoming a problem, then we should return a list tuple ! - } - - /// Appends the items of a tuple at the end of the current tuple. - /// Tuple whose items are to be appended at the end - /// New tuple composed of the current tuple's items, followed by 's items - [NotNull] - public IFdbTuple Concat([NotNull] IFdbTuple tuple) - { - return FdbTuple.Concat(this, tuple); - } - - /// Copy both items of this pair into an array at the specified offset - public void CopyTo(object[] array, int offset) - { - array[offset] = this.Item1; - array[offset + 1] = this.Item2; - } - - /// Execute a lambda Action with the content of this tuple - /// Action that will be passed the content of this tuple as parameters - public void With([NotNull] Action lambda) - { - lambda(this.Item1, this.Item2); - } - - /// Execute a lambda Function with the content of this tuple - /// Action that will be passed the content of this tuple as parameters - /// Result of calling with the items of this tuple - public R With([NotNull] Func lambda) - { - return lambda(this.Item1, this.Item2); - } - - public IEnumerator GetEnumerator() - { - yield return this.Item1; - yield return this.Item2; - } - - System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() - { - return this.GetEnumerator(); - } - - public Slice ToSlice() - { - return FdbTuple.EncodeKey(this.Item1, this.Item2); - } - - Slice IFdbKey.ToFoundationDbKey() - { - return this.ToSlice(); - } - - public override string ToString() - { - return "(" + FdbTuple.Stringify(this.Item1) + ", " + FdbTuple.Stringify(this.Item2) + ")"; - } - - public override bool Equals(object obj) - { - return obj != null && ((IStructuralEquatable)this).Equals(obj, SimilarValueComparer.Default); - } - - public bool Equals(IFdbTuple other) - { - return other != null && ((IStructuralEquatable)this).Equals(other, SimilarValueComparer.Default); - } - - public override int GetHashCode() - { - return ((IStructuralEquatable)this).GetHashCode(SimilarValueComparer.Default); - } - - public static bool operator ==(FdbTuple left, FdbTuple right) - { - return SimilarValueComparer.Default.Equals(left.Item1, right.Item1) - && SimilarValueComparer.Default.Equals(left.Item2, right.Item2); - } - - public static bool operator !=(FdbTuple left, FdbTuple right) - { - return !SimilarValueComparer.Default.Equals(left.Item1, right.Item1) - || !SimilarValueComparer.Default.Equals(left.Item2, right.Item2); - } - - bool System.Collections.IStructuralEquatable.Equals(object other, System.Collections.IEqualityComparer comparer) - { - if (other == null) return false; - if (other is FdbTuple) - { - var tuple = (FdbTuple)other; - return comparer.Equals(this.Item1, tuple.Item1) - && comparer.Equals(this.Item2, tuple.Item2); - } - return FdbTuple.Equals(this, other, comparer); - } - - int System.Collections.IStructuralEquatable.GetHashCode(System.Collections.IEqualityComparer comparer) - { - return FdbTuple.CombineHashCodes( - comparer.GetHashCode(this.Item1), - comparer.GetHashCode(this.Item2) - ); - } - - public static implicit operator FdbTuple(Tuple t) - { - if (t == null) throw new ArgumentNullException("t"); - return new FdbTuple(t.Item1, t.Item2); - } - - public static explicit operator Tuple(FdbTuple t) - { - return new Tuple(t.Item1, t.Item2); - } - - } - -} diff --git a/FoundationDB.Client/Layers/Tuples/FdbTuple`3.cs b/FoundationDB.Client/Layers/Tuples/FdbTuple`3.cs deleted file mode 100644 index 75b8d23e4..000000000 --- a/FoundationDB.Client/Layers/Tuples/FdbTuple`3.cs +++ /dev/null @@ -1,278 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Layers.Tuples -{ - using FoundationDB.Client; - using FoundationDB.Client.Converters; - using JetBrains.Annotations; - using System; - using System.Collections; - using System.Collections.Generic; - using System.ComponentModel; - using System.Diagnostics; - using System.Text; - - /// Tuple that can hold three items - /// Type of the first item - /// Type of the second item - /// Type of the third item - [ImmutableObject(true), DebuggerDisplay("{ToString()}")] - public struct FdbTuple : IFdbTuple - { - // This is mostly used by code that create a lot of temporary triplet, to reduce the pressure on the Garbage Collector by allocating them on the stack. - // Please note that if you return an FdbTuple as an IFdbTuple, it will be boxed by the CLR and all memory gains will be lost - - /// First element of the triplet - public readonly T1 Item1; - /// Second element of the triplet - public readonly T2 Item2; - /// Third and last elemnt of the triplet - public readonly T3 Item3; - - [DebuggerStepThrough] - public FdbTuple(T1 item1, T2 item2, T3 item3) - { - this.Item1 = item1; - this.Item2 = item2; - this.Item3 = item3; - } - - public int Count { get { return 3; } } - - public object this[int index] - { - get - { - switch (index) - { - case 0: case -3: return this.Item1; - case 1: case -2: return this.Item2; - case 2: case -1: return this.Item3; - default: FdbTuple.FailIndexOutOfRange(index, 3); return null; - } - } - } - - public IFdbTuple this[int? fromIncluded, int? toExcluded] - { - get { return FdbTuple.Splice(this, fromIncluded, toExcluded); } - } - - /// Return the typed value of an item of the tuple, given its position - /// Expected type of the item - /// Position of the item (if negative, means relative from the end) - /// Value of the item at position , adapted into type . - public R Get(int index) - { - switch(index) - { - case 0: case -3: return FdbConverters.Convert(this.Item1); - case 1: case -2: return FdbConverters.Convert(this.Item2); - case 2: case -1: return FdbConverters.Convert(this.Item3); - default: FdbTuple.FailIndexOutOfRange(index, 3); return default(R); - } - } - - /// Return the value of the last item in the tuple - public T3 Last - { - get { return this.Item3; } - } - - /// Return the typed value of the last item in the tuple - R IFdbTuple.Last() - { - return FdbConverters.Convert(this.Item3); - } - - public void PackTo(ref TupleWriter writer) - { - FdbTuplePacker.Encoder(ref writer, this.Item1); - FdbTuplePacker.Encoder(ref writer, this.Item2); - FdbTuplePacker.Encoder(ref writer, this.Item3); - } - - IFdbTuple IFdbTuple.Append(T4 value) - { - // here, the caller doesn't care about the exact tuple type, so we simply return a boxed List Tuple. - return new FdbListTuple(new object[4] { this.Item1, this.Item2, this.Item3, value }, 0, 4); - } - - /// Appends a single new item at the end of the current tuple. - /// Value that will be added as an embedded item - /// New tuple with one extra item - /// If is a tuple, and you want to append the *items* of this tuple, and not the tuple itself, please call ! - [NotNull] - public FdbTuple Append(T4 value) - { - // Here, the caller was explicitly using the FdbTuple struct so probably care about memory footprint, so we keep returning a struct - return new FdbTuple(this.Item1, this.Item2, this.Item3, value); - - // Note: By create a FdbTuple we risk an explosion of the number of combinations of Ts which could potentially cause problems at runtime (too many variants of the same generic types). - // ex: if we have N possible types, then there could be N^4 possible variants of FdbTuple that the JIT has to deal with. - // => if this starts becoming a problem, then we should return a list tuple ! - } - - /// Copy all the items of this tuple into an array at the specified offset - [NotNull] - public FdbTuple Append(IFdbTuple value) - { - //note: this override exists to prevent the explosion of tuple types such as FdbTuple, FdbTuple, FdbTuple> ! - return new FdbTuple(this.Item1, this.Item2, this.Item3, value); - } - - /// Appends the items of a tuple at the end of the current tuple. - /// Tuple whose items are to be appended at the end - /// New tuple composed of the current tuple's items, followed by 's items - [NotNull] - public IFdbTuple Concat([NotNull] IFdbTuple tuple) - { - return FdbTuple.Concat(this, tuple); - } - - public void CopyTo(object[] array, int offset) - { - array[offset] = this.Item1; - array[offset + 1] = this.Item2; - array[offset + 2] = this.Item3; - } - - /// Execute a lambda Action with the content of this tuple - /// Action that will be passed the content of this tuple as parameters - public void With([NotNull] Action lambda) - { - lambda(this.Item1, this.Item2, this.Item3); - } - - /// Execute a lambda Function with the content of this tuple - /// Action that will be passed the content of this tuple as parameters - /// Result of calling with the items of this tuple - public R With([NotNull] Func lambda) - { - return lambda(this.Item1, this.Item2, this.Item3); - } - - public IEnumerator GetEnumerator() - { - yield return this.Item1; - yield return this.Item2; - yield return this.Item3; - } - - System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() - { - return this.GetEnumerator(); - } - - public Slice ToSlice() - { - return FdbTuple.EncodeKey(this.Item1, this.Item2, this.Item3); - } - - Slice IFdbKey.ToFoundationDbKey() - { - return this.ToSlice(); - } - - public override string ToString() - { - return new StringBuilder(32).Append('(') - .Append(FdbTuple.Stringify(this.Item1)).Append(", ") - .Append(FdbTuple.Stringify(this.Item2)).Append(", ") - .Append(FdbTuple.Stringify(this.Item3)).Append(')') - .ToString(); - } - - public override bool Equals(object obj) - { - return obj != null && ((IStructuralEquatable)this).Equals(obj, SimilarValueComparer.Default); - } - - public bool Equals(IFdbTuple other) - { - return other != null && ((IStructuralEquatable)this).Equals(other, SimilarValueComparer.Default); - } - - public override int GetHashCode() - { - return ((IStructuralEquatable)this).GetHashCode(SimilarValueComparer.Default); - } - - public static bool operator ==(FdbTuple left, FdbTuple right) - { - var comparer = SimilarValueComparer.Default; - return comparer.Equals(left.Item1, right.Item1) - && comparer.Equals(left.Item2, right.Item2) - && comparer.Equals(left.Item3, right.Item3); - } - - public static bool operator !=(FdbTuple left, FdbTuple right) - { - var comparer = SimilarValueComparer.Default; - return !comparer.Equals(left.Item1, right.Item1) - || !comparer.Equals(left.Item2, right.Item2) - || !comparer.Equals(left.Item3, right.Item3); - } - - bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer) - { - if (other == null) return false; - if (other is FdbTuple) - { - var tuple = (FdbTuple)other; - return comparer.Equals(this.Item1, tuple.Item1) - && comparer.Equals(this.Item2, tuple.Item2) - && comparer.Equals(this.Item3, tuple.Item3); - } - return FdbTuple.Equals(this, other, comparer); - } - - int IStructuralEquatable.GetHashCode(IEqualityComparer comparer) - { - return FdbTuple.CombineHashCodes( - comparer.GetHashCode(this.Item1), - comparer.GetHashCode(this.Item2), - comparer.GetHashCode(this.Item3) - ); - } - - public static implicit operator FdbTuple(Tuple t) - { - if (t == null) throw new ArgumentNullException("t"); - return new FdbTuple(t.Item1, t.Item2, t.Item3); - } - - public static explicit operator Tuple(FdbTuple t) - { - return new Tuple(t.Item1, t.Item2, t.Item3); - } - - } - -} diff --git a/FoundationDB.Client/Layers/Tuples/FdbTuple`4.cs b/FoundationDB.Client/Layers/Tuples/FdbTuple`4.cs deleted file mode 100644 index 45e3a3fd1..000000000 --- a/FoundationDB.Client/Layers/Tuples/FdbTuple`4.cs +++ /dev/null @@ -1,284 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Layers.Tuples -{ - using FoundationDB.Client; - using FoundationDB.Client.Converters; - using JetBrains.Annotations; - using System; - using System.Collections; - using System.Collections.Generic; - using System.ComponentModel; - using System.Diagnostics; - using System.Text; - - /// Tuple that can hold four items - /// Type of the first item - /// Type of the second item - /// Type of the third item - /// Type of the fourth item - [ImmutableObject(true), DebuggerDisplay("{ToString()}")] - public struct FdbTuple : IFdbTuple - { - // This is mostly used by code that create a lot of temporary quartets, to reduce the pressure on the Garbage Collector by allocating them on the stack. - // Please note that if you return an FdbTuple as an IFdbTuple, it will be boxed by the CLR and all memory gains will be lost - - /// First element of the quartet - public readonly T1 Item1; - /// Second element of the quartet - public readonly T2 Item2; - /// Third element of the quartet - public readonly T3 Item3; - /// Fourth and last element of the quartet - public readonly T4 Item4; - - /// Create a tuple containing for items - [DebuggerStepThrough] - public FdbTuple(T1 item1, T2 item2, T3 item3, T4 item4) - { - this.Item1 = item1; - this.Item2 = item2; - this.Item3 = item3; - this.Item4 = item4; - } - - /// Number of items in this tuple - public int Count { get { return 4; } } - - /// Return the Nth item in this tuple - public object this[int index] - { - get - { - switch (index) - { - case 0: case -4: return this.Item1; - case 1: case -3: return this.Item2; - case 2: case -2: return this.Item3; - case 3: case -1: return this.Item4; - default: FdbTuple.FailIndexOutOfRange(index, 4); return null; - } - } - } - - public IFdbTuple this[int? fromIncluded, int? toExcluded] - { - get { return FdbTuple.Splice(this, fromIncluded, toExcluded); } - } - - /// Return the typed value of an item of the tuple, given its position - /// Expected type of the item - /// Position of the item (if negative, means relative from the end) - /// Value of the item at position , adapted into type . - public R Get(int index) - { - switch(index) - { - case 0: case -4: return FdbConverters.Convert(this.Item1); - case 1: case -3: return FdbConverters.Convert(this.Item2); - case 2: case -2: return FdbConverters.Convert(this.Item3); - case 3: case -1: return FdbConverters.Convert(this.Item4); - default: FdbTuple.FailIndexOutOfRange(index, 4); return default(R); - } - } - - /// Return the value of the last item in the tuple - public T4 Last - { - get { return this.Item4; } - } - - /// Return the typed value of the last item in the tuple - R IFdbTuple.Last() - { - return FdbConverters.Convert(this.Item4); - } - - public void PackTo(ref TupleWriter writer) - { - FdbTuplePacker.Encoder(ref writer, this.Item1); - FdbTuplePacker.Encoder(ref writer, this.Item2); - FdbTuplePacker.Encoder(ref writer, this.Item3); - FdbTuplePacker.Encoder(ref writer, this.Item4); - } - - IFdbTuple IFdbTuple.Append(T5 value) - { - // the caller doesn't care about the return type, so just box everything into a list tuple - return new FdbListTuple(new object[5] { this.Item1, this.Item2, this.Item3, this.Item4, value }, 0, 5); - } - - /// Appends a single new item at the end of the current tuple. - /// Value that will be added as an embedded item - /// New tuple with one extra item - /// If is a tuple, and you want to append the *items* of this tuple, and not the tuple itself, please call ! - [NotNull] - public FdbLinkedTuple Append(T5 value) - { - // the caller probably cares about the return type, since it is using a struct, but whatever tuple type we use will end up boxing this tuple on the heap, and we will loose type information. - // but, by returning a FdbLinkedTuple, the tuple will still remember the exact type, and efficiently serializer/convert the values (without having to guess the type) - return new FdbLinkedTuple(this, value); - } - - /// Appends the items of a tuple at the end of the current tuple. - /// Tuple whose items are to be appended at the end - /// New tuple composed of the current tuple's items, followed by 's items - public IFdbTuple Concat(IFdbTuple tuple) - { - return FdbTuple.Concat(this, tuple); - } - - /// Copy all the items of this tuple into an array at the specified offset - public void CopyTo(object[] array, int offset) - { - array[offset] = this.Item1; - array[offset + 1] = this.Item2; - array[offset + 2] = this.Item3; - array[offset + 3] = this.Item4; - } - - /// Execute a lambda Action with the content of this tuple - /// Action that will be passed the content of this tuple as parameters - public void With([NotNull] Action lambda) - { - lambda(this.Item1, this.Item2, this.Item3, this.Item4); - } - - /// Execute a lambda Function with the content of this tuple - /// Action that will be passed the content of this tuple as parameters - /// Result of calling with the items of this tuple - public R With([NotNull] Func lambda) - { - return lambda(this.Item1, this.Item2, this.Item3, this.Item4); - } - - public IEnumerator GetEnumerator() - { - yield return this.Item1; - yield return this.Item2; - yield return this.Item3; - yield return this.Item4; - } - - System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() - { - return this.GetEnumerator(); - } - - public Slice ToSlice() - { - return FdbTuple.EncodeKey(this.Item1, this.Item2, this.Item3, this.Item4); - } - - Slice IFdbKey.ToFoundationDbKey() - { - return this.ToSlice(); - } - - public override string ToString() - { - return new StringBuilder(48).Append('(') - .Append(FdbTuple.Stringify(this.Item1)).Append(", ") - .Append(FdbTuple.Stringify(this.Item2)).Append(", ") - .Append(FdbTuple.Stringify(this.Item3)).Append(", ") - .Append(FdbTuple.Stringify(this.Item4)).Append(')') - .ToString(); - } - - public override bool Equals(object obj) - { - return obj != null && ((IStructuralEquatable)this).Equals(obj, SimilarValueComparer.Default); - } - - public bool Equals(IFdbTuple other) - { - return other != null && ((IStructuralEquatable)this).Equals(other, SimilarValueComparer.Default); - } - - public override int GetHashCode() - { - return ((IStructuralEquatable)this).GetHashCode(SimilarValueComparer.Default); - } - - public static bool operator ==(FdbTuple left, FdbTuple right) - { - var comparer = SimilarValueComparer.Default; - return comparer.Equals(left.Item1, right.Item1) - && comparer.Equals(left.Item2, right.Item2) - && comparer.Equals(left.Item3, right.Item3) - && comparer.Equals(left.Item4, right.Item4); - } - - public static bool operator !=(FdbTuple left, FdbTuple right) - { - var comparer = SimilarValueComparer.Default; - return !comparer.Equals(left.Item1, right.Item1) - || !comparer.Equals(left.Item2, right.Item2) - || !comparer.Equals(left.Item3, right.Item3) - || !comparer.Equals(left.Item4, right.Item4); - } - - bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer) - { - if (other == null) return false; - if (other is FdbTuple) - { - var tuple = (FdbTuple)other; - return comparer.Equals(this.Item1, tuple.Item1) - && comparer.Equals(this.Item2, tuple.Item2) - && comparer.Equals(this.Item3, tuple.Item3) - && comparer.Equals(this.Item4, tuple.Item4); - } - return FdbTuple.Equals(this, other, comparer); - } - - int IStructuralEquatable.GetHashCode(IEqualityComparer comparer) - { - return FdbTuple.CombineHashCodes( - comparer.GetHashCode(this.Item1), - comparer.GetHashCode(this.Item2), - comparer.GetHashCode(this.Item3), - comparer.GetHashCode(this.Item4) - ); - } - - public static implicit operator FdbTuple(Tuple t) - { - if (t == null) throw new ArgumentNullException("t"); - return new FdbTuple(t.Item1, t.Item2, t.Item3, t.Item4); - } - - public static explicit operator Tuple(FdbTuple t) - { - return new Tuple(t.Item1, t.Item2, t.Item3, t.Item4); - } - - } - -} diff --git a/FoundationDB.Client/Layers/Tuples/FdbTuple`5.cs b/FoundationDB.Client/Layers/Tuples/FdbTuple`5.cs deleted file mode 100644 index 94a9e47b0..000000000 --- a/FoundationDB.Client/Layers/Tuples/FdbTuple`5.cs +++ /dev/null @@ -1,299 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Layers.Tuples -{ - using FoundationDB.Client; - using FoundationDB.Client.Converters; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; - using System; - using System.Collections; - using System.Collections.Generic; - using System.ComponentModel; - using System.Diagnostics; - using System.Text; - - /// Tuple that can hold four items - /// Type of the 1st item - /// Type of the 2nd item - /// Type of the 3rd item - /// Type of the 4th item - /// Type of the 5th item - [ImmutableObject(true), DebuggerDisplay("{ToString()}")] - public struct FdbTuple : IFdbTuple - { - // This is mostly used by code that create a lot of temporary quartets, to reduce the pressure on the Garbage Collector by allocating them on the stack. - // Please note that if you return an FdbTuple as an IFdbTuple, it will be boxed by the CLR and all memory gains will be lost - - /// First element of the tuple - public readonly T1 Item1; - /// Second element of the tuple - public readonly T2 Item2; - /// Third element of the tuple - public readonly T3 Item3; - /// Fourth element of the tuple - public readonly T4 Item4; - /// Fifth and last element of the tuple - public readonly T5 Item5; - - /// Create a tuple containing for items - [DebuggerStepThrough] - public FdbTuple(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) - { - this.Item1 = item1; - this.Item2 = item2; - this.Item3 = item3; - this.Item4 = item4; - this.Item5 = item5; - } - - /// Number of items in this tuple - public int Count { get { return 5; } } - - /// Return the Nth item in this tuple - public object this[int index] - { - get - { - switch (index) - { - case 0: case -5: return this.Item1; - case 1: case -4: return this.Item2; - case 2: case -3: return this.Item3; - case 3: case -2: return this.Item4; - case 4: case -1: return this.Item5; - default: FdbTuple.FailIndexOutOfRange(index, 5); return null; - } - } - } - - public IFdbTuple this[int? fromIncluded, int? toExcluded] - { - get { return FdbTuple.Splice(this, fromIncluded, toExcluded); } - } - - /// Return the typed value of an item of the tuple, given its position - /// Expected type of the item - /// Position of the item (if negative, means relative from the end) - /// Value of the item at position , adapted into type . - public R Get(int index) - { - switch(index) - { - case 0: case -5: return FdbConverters.Convert(this.Item1); - case 1: case -4: return FdbConverters.Convert(this.Item2); - case 2: case -3: return FdbConverters.Convert(this.Item3); - case 3: case -2: return FdbConverters.Convert(this.Item4); - case 4: case -1: return FdbConverters.Convert(this.Item5); - default: FdbTuple.FailIndexOutOfRange(index, 5); return default(R); - } - } - - /// Return the value of the last item in the tuple - public T5 Last - { - get { return this.Item5; } - } - - /// Return the typed value of the last item in the tuple - R IFdbTuple.Last() - { - return FdbConverters.Convert(this.Item5); - } - - public void PackTo(ref TupleWriter writer) - { - FdbTuplePacker.Encoder(ref writer, this.Item1); - FdbTuplePacker.Encoder(ref writer, this.Item2); - FdbTuplePacker.Encoder(ref writer, this.Item3); - FdbTuplePacker.Encoder(ref writer, this.Item4); - FdbTuplePacker.Encoder(ref writer, this.Item5); - } - - IFdbTuple IFdbTuple.Append(T6 value) - { - // the caller doesn't care about the return type, so just box everything into a list tuple - return new FdbListTuple(new object[6] { this.Item1, this.Item2, this.Item3, this.Item4, this.Item5, value }, 0, 6); - } - - /// Appends a single new item at the end of the current tuple. - /// Value that will be added as an embedded item - /// New tuple with one extra item - /// If is a tuple, and you want to append the *items* of this tuple, and not the tuple itself, please call ! - [NotNull] - public FdbLinkedTuple Append(T6 value) - { - // the caller probably cares about the return type, since it is using a struct, but whatever tuple type we use will end up boxing this tuple on the heap, and we will loose type information. - // but, by returning a FdbLinkedTuple, the tuple will still remember the exact type, and efficiently serializer/convert the values (without having to guess the type) - return new FdbLinkedTuple(this, value); - } - - /// Appends the items of a tuple at the end of the current tuple. - /// Tuple whose items are to be appended at the end - /// New tuple composed of the current tuple's items, followed by 's items - public IFdbTuple Concat(IFdbTuple tuple) - { - return FdbTuple.Concat(this, tuple); - } - - /// Copy all the items of this tuple into an array at the specified offset - public void CopyTo(object[] array, int offset) - { - array[offset] = this.Item1; - array[offset + 1] = this.Item2; - array[offset + 2] = this.Item3; - array[offset + 3] = this.Item4; - array[offset + 4] = this.Item5; - } - - /// Execute a lambda Action with the content of this tuple - /// Action that will be passed the content of this tuple as parameters - public void With([NotNull] Action lambda) - { - lambda(this.Item1, this.Item2, this.Item3, this.Item4, this.Item5); - } - - /// Execute a lambda Function with the content of this tuple - /// Action that will be passed the content of this tuple as parameters - /// Result of calling with the items of this tuple - public R With([NotNull] Func lambda) - { - return lambda(this.Item1, this.Item2, this.Item3, this.Item4, this.Item5); - } - - public IEnumerator GetEnumerator() - { - yield return this.Item1; - yield return this.Item2; - yield return this.Item3; - yield return this.Item4; - yield return this.Item5; - } - - System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() - { - return this.GetEnumerator(); - } - - public Slice ToSlice() - { - return FdbTuple.EncodeKey(this.Item1, this.Item2, this.Item3, this.Item4, this.Item5); - } - - Slice IFdbKey.ToFoundationDbKey() - { - return this.ToSlice(); - } - - public override string ToString() - { - return new StringBuilder(48).Append('(') - .Append(FdbTuple.Stringify(this.Item1)).Append(", ") - .Append(FdbTuple.Stringify(this.Item2)).Append(", ") - .Append(FdbTuple.Stringify(this.Item3)).Append(", ") - .Append(FdbTuple.Stringify(this.Item4)).Append(", ") - .Append(FdbTuple.Stringify(this.Item5)).Append(')') - .ToString(); - } - - public override bool Equals(object obj) - { - return obj != null && ((IStructuralEquatable)this).Equals(obj, SimilarValueComparer.Default); - } - - public bool Equals(IFdbTuple other) - { - return other != null && ((IStructuralEquatable)this).Equals(other, SimilarValueComparer.Default); - } - - public override int GetHashCode() - { - return ((IStructuralEquatable)this).GetHashCode(SimilarValueComparer.Default); - } - - public static bool operator ==(FdbTuple left, FdbTuple right) - { - var comparer = SimilarValueComparer.Default; - return comparer.Equals(left.Item1, right.Item1) - && comparer.Equals(left.Item2, right.Item2) - && comparer.Equals(left.Item3, right.Item3) - && comparer.Equals(left.Item4, right.Item4) - && comparer.Equals(left.Item5, right.Item5); - } - - public static bool operator !=(FdbTuple left, FdbTuple right) - { - var comparer = SimilarValueComparer.Default; - return !comparer.Equals(left.Item1, right.Item1) - || !comparer.Equals(left.Item2, right.Item2) - || !comparer.Equals(left.Item3, right.Item3) - || !comparer.Equals(left.Item4, right.Item4) - || !comparer.Equals(left.Item5, right.Item5); - } - - bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer) - { - if (other == null) return false; - if (other is FdbTuple) - { - var tuple = (FdbTuple)other; - return comparer.Equals(this.Item1, tuple.Item1) - && comparer.Equals(this.Item2, tuple.Item2) - && comparer.Equals(this.Item3, tuple.Item3) - && comparer.Equals(this.Item4, tuple.Item4) - && comparer.Equals(this.Item5, tuple.Item5); - } - return FdbTuple.Equals(this, other, comparer); - } - - int IStructuralEquatable.GetHashCode(IEqualityComparer comparer) - { - return FdbTuple.CombineHashCodes( - comparer.GetHashCode(this.Item1), - comparer.GetHashCode(this.Item2), - comparer.GetHashCode(this.Item3), - comparer.GetHashCode(this.Item4), - comparer.GetHashCode(this.Item5) - ); - } - - public static implicit operator FdbTuple(Tuple t) - { - if (t == null) throw new ArgumentNullException("t"); - return new FdbTuple(t.Item1, t.Item2, t.Item3, t.Item4, t.Item5); - } - - public static explicit operator Tuple(FdbTuple t) - { - return new Tuple(t.Item1, t.Item2, t.Item3, t.Item4, t.Item5); - } - - } - -} diff --git a/FoundationDB.Client/Layers/Tuples/TypeSystem/TupleKeyEncoder.cs b/FoundationDB.Client/Layers/Tuples/TypeSystem/TupleKeyEncoder.cs deleted file mode 100644 index 7a29d9c96..000000000 --- a/FoundationDB.Client/Layers/Tuples/TypeSystem/TupleKeyEncoder.cs +++ /dev/null @@ -1,229 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Layers.Tuples -{ - using System; - using FoundationDB.Client; - - public sealed class TupleKeyEncoder : IDynamicKeyEncoder - { - - internal static TupleKeyEncoder Instance = new TupleKeyEncoder(); - - private TupleKeyEncoder() - { } - - public IFdbKeyEncoding Encoding - { - get { return TypeSystem.Tuples; } - } - - public FdbKeyRange ToRange(Slice prefix) - { - return FdbTuple.ToRange(prefix); - } - - public void PackKey(ref SliceWriter writer, IFdbTuple items) - { - var tw = new TupleWriter(writer); - FdbTuple.Pack(ref tw, items); - writer = tw.Output; - } - - public void EncodeKey(ref SliceWriter writer, T1 item1) - { - var tw = new TupleWriter(writer); - FdbTuplePacker.SerializeTo(ref tw, item1); - writer = tw.Output; - } - - public void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2) - { - var tw = new TupleWriter(writer); - FdbTuplePacker.SerializeTo(ref tw, item1); - FdbTuplePacker.SerializeTo(ref tw, item2); - writer = tw.Output; - } - - public void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3) - { - var tw = new TupleWriter(writer); - FdbTuplePacker.SerializeTo(ref tw, item1); - FdbTuplePacker.SerializeTo(ref tw, item2); - FdbTuplePacker.SerializeTo(ref tw, item3); - writer = tw.Output; - } - - public void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4) - { - var tw = new TupleWriter(writer); - FdbTuplePacker.SerializeTo(ref tw, item1); - FdbTuplePacker.SerializeTo(ref tw, item2); - FdbTuplePacker.SerializeTo(ref tw, item3); - FdbTuplePacker.SerializeTo(ref tw, item4); - writer = tw.Output; - } - - public void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) - { - var tw = new TupleWriter(writer); - FdbTuplePacker.SerializeTo(ref tw, item1); - FdbTuplePacker.SerializeTo(ref tw, item2); - FdbTuplePacker.SerializeTo(ref tw, item3); - FdbTuplePacker.SerializeTo(ref tw, item4); - FdbTuplePacker.SerializeTo(ref tw, item5); - writer = tw.Output; - } - - public void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6) - { - var tw = new TupleWriter(writer); - FdbTuplePacker.SerializeTo(ref tw, item1); - FdbTuplePacker.SerializeTo(ref tw, item2); - FdbTuplePacker.SerializeTo(ref tw, item3); - FdbTuplePacker.SerializeTo(ref tw, item4); - FdbTuplePacker.SerializeTo(ref tw, item5); - FdbTuplePacker.SerializeTo(ref tw, item6); - writer = tw.Output; - } - - public void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7) - { - var tw = new TupleWriter(writer); - FdbTuplePacker.SerializeTo(ref tw, item1); - FdbTuplePacker.SerializeTo(ref tw, item2); - FdbTuplePacker.SerializeTo(ref tw, item3); - FdbTuplePacker.SerializeTo(ref tw, item4); - FdbTuplePacker.SerializeTo(ref tw, item5); - FdbTuplePacker.SerializeTo(ref tw, item6); - FdbTuplePacker.SerializeTo(ref tw, item7); - writer = tw.Output; - } - - public void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7, T8 item8) - { - var tw = new TupleWriter(writer); - FdbTuplePacker.SerializeTo(ref tw, item1); - FdbTuplePacker.SerializeTo(ref tw, item2); - FdbTuplePacker.SerializeTo(ref tw, item3); - FdbTuplePacker.SerializeTo(ref tw, item4); - FdbTuplePacker.SerializeTo(ref tw, item5); - FdbTuplePacker.SerializeTo(ref tw, item6); - FdbTuplePacker.SerializeTo(ref tw, item7); - FdbTuplePacker.SerializeTo(ref tw, item8); - writer = tw.Output; - } - - public IFdbTuple UnpackKey(Slice packed) - { - return FdbTuple.Unpack(packed); - } - - public T DecodeKey(Slice packed) - { - return FdbTuple.DecodeKey(packed); - } - - public T DecodeKeyFirst(Slice packed) - { - return FdbTuple.DecodeFirst(packed); - } - - public T DecodeKeyLast(Slice packed) - { - return FdbTuple.DecodeLast(packed); - } - - public FdbTuple DecodeKey(Slice packed) - { - return FdbTuple.DecodeKey(packed); - } - - public FdbTuple DecodeKey(Slice packed) - { - return FdbTuple.DecodeKey(packed); - } - - public FdbTuple DecodeKey(Slice packed) - { - return FdbTuple.DecodeKey(packed); - } - - public FdbTuple DecodeKey(Slice packed) - { - return FdbTuple.DecodeKey(packed); - } - - public FdbKeyRange ToRange(Slice prefix, IFdbTuple items) - { - return FdbTuple.ToRange(prefix, items); - } - - public FdbKeyRange ToKeyRange(Slice prefix, T1 item1) - { - return FdbTuple.ToRange(prefix, FdbTuple.Create(item1)); - } - - public FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2) - { - return FdbTuple.ToRange(prefix, FdbTuple.Create(item1, item2)); - } - - public FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3) - { - return FdbTuple.ToRange(prefix, FdbTuple.Create(item1, item3, item3)); - } - - public FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4) - { - return FdbTuple.ToRange(prefix, FdbTuple.Create(item1, item3, item3, item4)); - } - - public FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) - { - return FdbTuple.ToRange(prefix, FdbTuple.Create(item1, item3, item3, item4, item5)); - } - - public FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6) - { - return FdbTuple.ToRange(prefix, FdbTuple.Create(item1, item3, item3, item4, item5, item6)); - } - - public FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7) - { - return FdbTuple.ToRange(prefix, FdbTuple.Create(item1, item3, item3, item4, item5, item6, item7)); - } - - public FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7, T8 item8) - { - return FdbTuple.ToRange(prefix, FdbTuple.Create(item1, item3, item3, item4, item5, item6, item7, item8)); - } - } - -} \ No newline at end of file diff --git a/FoundationDB.Client/Linq/FdbAsyncEnumerable.cs b/FoundationDB.Client/Linq/FdbAsyncEnumerable.cs deleted file mode 100644 index 264707c46..000000000 --- a/FoundationDB.Client/Linq/FdbAsyncEnumerable.cs +++ /dev/null @@ -1,1234 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Linq -{ - using FoundationDB.Async; - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; - using System; - using System.Collections.Generic; - using System.Threading; - using System.Threading.Tasks; - - /// Provides a set of static methods for querying objects that implement . - public static partial class FdbAsyncEnumerable - { - // Welcome to the wonderful world of the Monads! - - #region Entering the Monad... - - /// Returns an empty async sequence - [NotNull] - public static IFdbAsyncEnumerable Empty() - { - return EmptySequence.Default; - } - - /// Returns an async sequence with a single element, which is a constant - [NotNull] - public static IFdbAsyncEnumerable Singleton(T value) - { - //note: we can't call this method Single(T), because then Single(Func) would be ambigous with Single>(T) - return new SingletonSequence(() => value); - } - - /// Returns an async sequence which will produce a single element, using the specified lambda - /// Lambda that will be called once per iteration, to produce the single element of this sequene - /// If the sequence is iterated multiple times, then will be called once for each iteration. - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable Single([NotNull] Func lambda) - { - if (lambda == null) throw new ArgumentNullException("lambda"); - return new SingletonSequence(lambda); - } - - /// Returns an async sequence which will produce a single element, using the specified lambda - /// Lambda that will be called once per iteration, to produce the single element of this sequene - /// If the sequence is iterated multiple times, then will be called once for each iteration. - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable Single([NotNull] Func> asyncLambda) - { - if (asyncLambda == null) throw new ArgumentNullException("asyncLambda"); - return new SingletonSequence(asyncLambda); - } - - /// Returns an async sequence which will produce a single element, using the specified lambda - /// Lambda that will be called once per iteration, to produce the single element of this sequene - /// If the sequence is iterated multiple times, then will be called once for each iteration. - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable Single([NotNull] Func> asyncLambda) - { - if (asyncLambda == null) throw new ArgumentNullException("asyncLambda"); - return new SingletonSequence(asyncLambda); - } - - /// Apply an async lambda to a sequence of elements to transform it into an async sequence - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable ToAsyncEnumerable([NotNull] this IEnumerable source, [NotNull] Func> lambda) - { - if (source == null) throw new ArgumentNullException("source"); - if (lambda == null) throw new ArgumentNullException("lambda"); - - return Create(source, (iterator) => new EnumerableIterator(iterator, lambda)); - } - - /// Apply an async lambda to a sequence of elements to transform it into an async sequence - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable ToAsyncEnumerable([NotNull] this IEnumerable source) - { - if (source == null) throw new ArgumentNullException("source"); - - return Create(source, (iterator) => new EnumerableIterator(iterator, x => Task.FromResult(x))); - } - - /// Wraps an async lambda into an async sequence that will return the result of the lambda - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable FromTask([NotNull] Func> asyncLambda) - { - //TODO: create a custom iterator for this ? - return ToAsyncEnumerable(new [] { asyncLambda }).Select(x => x()); - } - - /// Split a sequence of items into several batches - /// Type of the elemenst in - /// Source sequence - /// Maximum size of each batch - /// Sequence of batches, whose size will always we , except for the last batch that will only hold the remaning items. If the source is empty, an empty sequence is returned. - [NotNull, LinqTunnel] - public static IEnumerable> Buffered([NotNull] this IEnumerable source, int batchSize) - { - if (source == null) throw new ArgumentNullException("source"); - if (batchSize <= 0) throw new ArgumentException("Batch size must be greater than zero.", "batchSize"); - - var list = new List(batchSize); - foreach (var item in source) - { - list.Add(item); - if (list.Count >= batchSize) - { - yield return list; - list.Clear(); - } - } - } - - #endregion - - #region Staying in the Monad... - - #region SelectMany... - - /// Projects each element of an async sequence to an and flattens the resulting sequences into one async sequence. - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable SelectMany([NotNull] this IFdbAsyncEnumerable source, [NotNull] Func> selector) - { - if (source == null) throw new ArgumentNullException("source"); - if (selector == null) throw new ArgumentNullException("selector"); - - var iterator = source as FdbAsyncIterator; - if (iterator != null) - { - return iterator.SelectMany(selector); - } - - return Flatten(source, new AsyncTransformExpression>(selector)); - } - - /// Projects each element of an async sequence to an and flattens the resulting sequences into one async sequence. - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable SelectMany([NotNull] this IFdbAsyncEnumerable source, [NotNull] Func>> asyncSelector) - { - if (source == null) throw new ArgumentNullException("source"); - if (asyncSelector == null) throw new ArgumentNullException("asyncSelector"); - - return SelectMany(source, TaskHelpers.WithCancellation(asyncSelector)); - } - - /// Projects each element of an async sequence to an and flattens the resulting sequences into one async sequence. - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable SelectMany([NotNull] this IFdbAsyncEnumerable source, [NotNull] Func>> asyncSelector) - { - if (source == null) throw new ArgumentNullException("source"); - if (asyncSelector == null) throw new ArgumentNullException("asyncSelector"); - - var iterator = source as FdbAsyncIterator; - if (iterator != null) - { - return iterator.SelectMany(asyncSelector); - } - - return Flatten(source, new AsyncTransformExpression>(asyncSelector)); - } - - /// Projects each element of an async sequence to an flattens the resulting sequences into one async sequence, and invokes a result selector function on each element therein. - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable SelectMany([NotNull] this IFdbAsyncEnumerable source, [NotNull] Func> collectionSelector, [NotNull] Func resultSelector) - { - if (source == null) throw new ArgumentNullException("source"); - if (collectionSelector == null) throw new ArgumentNullException("collectionSelector"); - if (resultSelector == null) throw new ArgumentNullException("resultSelector"); - - var iterator = source as FdbAsyncIterator; - if (iterator != null) - { - return iterator.SelectMany(collectionSelector, resultSelector); - } - - return Flatten(source, new AsyncTransformExpression>(collectionSelector), resultSelector); - } - - /// Projects each element of an async sequence to an flattens the resulting sequences into one async sequence, and invokes a result selector function on each element therein. - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable SelectMany([NotNull] this IFdbAsyncEnumerable source, [NotNull] Func>> asyncCollectionSelector, [NotNull] Func resultSelector) - { - if (source == null) throw new ArgumentNullException("source"); - if (asyncCollectionSelector == null) throw new ArgumentNullException("asyncCollectionSelector"); - if (resultSelector == null) throw new ArgumentNullException("resultSelector"); - - return SelectMany(source, TaskHelpers.WithCancellation(asyncCollectionSelector), resultSelector); - } - - /// Projects each element of an async sequence to an flattens the resulting sequences into one async sequence, and invokes a result selector function on each element therein. - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable SelectMany([NotNull] this IFdbAsyncEnumerable source, [NotNull] Func>> asyncCollectionSelector, [NotNull] Func resultSelector) - { - if (source == null) throw new ArgumentNullException("source"); - if (asyncCollectionSelector == null) throw new ArgumentNullException("asyncCollectionSelector"); - if (resultSelector == null) throw new ArgumentNullException("resultSelector"); - - var iterator = source as FdbAsyncIterator; - if (iterator != null) - { - return iterator.SelectMany(asyncCollectionSelector, resultSelector); - } - - return Flatten(source, new AsyncTransformExpression>(asyncCollectionSelector), resultSelector); - } - - #endregion - - #region Select... - - /// Projects each element of an async sequence into a new form. - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable Select([NotNull] this IFdbAsyncEnumerable source, [NotNull] Func selector) - { - if (source == null) throw new ArgumentNullException("source"); - if (selector == null) throw new ArgumentNullException("selector"); - - var iterator = source as FdbAsyncIterator; - if (iterator != null) - { - return iterator.Select(selector); - } - - return Map(source, new AsyncTransformExpression(selector)); - } - - /// Projects each element of an async sequence into a new form. - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable Select([NotNull] this IFdbAsyncEnumerable source, [NotNull] Func> asyncSelector) - { - if (source == null) throw new ArgumentNullException("source"); - if (asyncSelector == null) throw new ArgumentNullException("asyncSelector"); - - return Select(source, TaskHelpers.WithCancellation(asyncSelector)); - } - - /// Projects each element of an async sequence into a new form. - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable Select([NotNull] this IFdbAsyncEnumerable source, [NotNull] Func> asyncSelector) - { - if (source == null) throw new ArgumentNullException("source"); - if (asyncSelector == null) throw new ArgumentNullException("asyncSelector"); - - var iterator = source as FdbAsyncIterator; - if (iterator != null) - { - return iterator.Select(asyncSelector); - } - - return Map(source, new AsyncTransformExpression(asyncSelector)); - } - - #endregion - - #region Where... - - /// Filters an async sequence of values based on a predicate. - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable Where([NotNull] this IFdbAsyncEnumerable source, [NotNull] Func predicate) - { - if (source == null) throw new ArgumentNullException("source"); - if (predicate == null) throw new ArgumentNullException("predicate"); - - var iterator = source as FdbAsyncIterator; - if (iterator != null) - { - return iterator.Where(predicate); - } - - return Filter(source, new AsyncFilterExpression(predicate)); - } - - /// Filters an async sequence of values based on a predicate. - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable Where([NotNull] this IFdbAsyncEnumerable source, [NotNull] Func> asyncPredicate) - { - if (source == null) throw new ArgumentNullException("source"); - if (asyncPredicate == null) throw new ArgumentNullException("asyncPredicate"); - - return Where(source, TaskHelpers.WithCancellation(asyncPredicate)); - } - - /// Filters an async sequence of values based on a predicate. - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable Where([NotNull] this IFdbAsyncEnumerable source, [NotNull] Func> asyncPredicate) - { - if (source == null) throw new ArgumentNullException("source"); - if (asyncPredicate == null) throw new ArgumentNullException("asyncPredicate"); - - var iterator = source as FdbAsyncIterator; - if (iterator != null) - { - return iterator.Where(asyncPredicate); - } - - return Filter(source, new AsyncFilterExpression(asyncPredicate)); - } - - #endregion - - #region Take... - - /// Returns a specified number of contiguous elements from the start of an async sequence. - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable Take([NotNull] this IFdbAsyncEnumerable source, int count) - { - if (source == null) throw new ArgumentNullException("source"); - if (count < 0) throw new ArgumentOutOfRangeException("count", count, "Count cannot be less than zero"); - - var iterator = source as FdbAsyncIterator; - if (iterator != null) - { - return iterator.Take(count); - } - - return FdbAsyncEnumerable.Limit(source, count); - } - - #endregion - - #region TakeWhile... - - /// Returns elements from an async sequence as long as a specified condition is true, and then skips the remaining elements. - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable TakeWhile([NotNull] this IFdbAsyncEnumerable source, [NotNull] Func condition) - { - if (source == null) throw new ArgumentNullException("source"); - if (condition == null) throw new ArgumentNullException("condition"); - - var iterator = source as FdbAsyncIterator; - if (iterator != null) - { - return iterator.TakeWhile(condition); - } - - return FdbAsyncEnumerable.Limit(source, condition); - } - - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable TakeWhile(this IFdbAsyncEnumerable source, [NotNull] Func condition, out QueryStatistics stopped) - { - var signal = new QueryStatistics(false); - stopped = signal; - - // to trigger the signal, we just intercept the condition returning false (which only happen once!) - Func wrapped = (x) => - { - if (condition(x)) return true; - signal.Update(true); - return false; - }; - - return TakeWhile(source, wrapped); - } - - #endregion - - #region Skip... - - /// Skips the first elements of an async sequence. - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable Skip([NotNull] this IFdbAsyncEnumerable source, int count) - { - if (source == null) throw new ArgumentNullException("source"); - if (count < 0) throw new ArgumentOutOfRangeException("count", count, "Count cannot be less than zero"); - - var iterator = source as FdbAsyncIterator; - if (iterator != null) - { - return iterator.Skip(count); - } - - return FdbAsyncEnumerable.Offset(source, count); - } - - #endregion - - #region SelectAsync - - /// Projects each element of an async sequence into a new form. - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable SelectAsync([NotNull] this IFdbAsyncEnumerable source, [NotNull] Func> asyncSelector, FdbParallelQueryOptions options = null) - { - if (source == null) throw new ArgumentNullException("source"); - if (asyncSelector == null) throw new ArgumentNullException("asyncSelector"); - - return new FdbParallelSelectAsyncIterator(source, asyncSelector, options ?? new FdbParallelQueryOptions()); - } - - /// Always prefetch the next item from the inner sequence. - /// Type of the items in the source sequence - /// Source sequence that has a high latency, and from which we want to prefetch a set number of items. - /// Sequence that prefetch the next item, when outputing the current item. - /// - /// This iterator can help smooth out the query pipeline when every call to the inner sequence has a somewhat high latency (ex: reading the next page of results from the database). - /// Avoid prefetching from a source that is already reading from a buffer of results. - /// - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable Prefetch([NotNull] this IFdbAsyncEnumerable source) - { - if (source == null) throw new ArgumentNullException("source"); - - return new FdbPrefetchingAsyncIterator(source, 1); - } - - /// Prefetch a certain number of items from the inner sequence, before outputing the results one by one. - /// Type of the items in the source sequence - /// Source sequence that has a high latency, and from which we want to prefetch a set number of items. - /// Maximum number of items to buffer from the source before they are consumed by the rest of the query. - /// Sequence that returns items from a buffer of prefetched list. - /// - /// This iterator can help smooth out the query pipeline when every call to the inner sequence has a somewhat high latency (ex: reading the next page of results from the database). - /// Avoid prefetching from a source that is already reading from a buffer of results. - /// - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable Prefetch([NotNull] this IFdbAsyncEnumerable source, int prefetchCount) - { - if (source == null) throw new ArgumentNullException("source"); - if (prefetchCount <= 0) throw new ArgumentOutOfRangeException("prefetchCount", prefetchCount, "Prefetch count must be at least one."); - - return new FdbPrefetchingAsyncIterator(source, prefetchCount); - } - - /// Buffers the items of a bursty sequence, into a sequence of variable-sized arrays made up of items that where produced in a very short timespan. - /// Type of the items in the source sequence - /// Source sequence, that produces bursts of items, produced from the same page of results, before reading the next page. - /// Maximum number of items to return in a single window. If more items arrive at the same time, a new window will be opened with the rest of the items. - /// Sequence of batches, where all the items of a single batch arrived at the same time. A batch is closed once the next call to MoveNext() on the inner sequence does not complete immediately. Batches can be smaller than . - /// - /// This should only be called on bursty asynchronous sequences, and when you want to process items in batches, without incurring the cost of latency between two pages of results. - /// You should avoid using this operator on sequences where each call to MoveNext() is asynchronous, since it would only produce batchs with only a single item. - /// - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable Window([NotNull] this IFdbAsyncEnumerable source, int maxWindowSize) - { - if (source == null) throw new ArgumentNullException("source"); - if (maxWindowSize <= 0) throw new ArgumentOutOfRangeException("maxWindowSize", maxWindowSize, "Window size must be at least one."); - - return new FdbWindowingAsyncIterator(source, maxWindowSize); - } - - /// Buffers the items of a source sequence, and outputs a sequence of fixed-sized arrays. - /// Type of the items in the source sequence - /// Source sequence that will be cut into chunks containing at most items. - /// Number of items per batch. The last batch may contain less items, but should never be empty. - /// Sequence of arrays of size , except the last batch which can have less items. - /// - /// This operator does not care about the latency of each item, and will always try to fill each batch completely, before outputing a result. - /// If you are working on an inner sequence that is bursty in nature, where items arrives in waves, you should use which attempts to minimize the latency by outputing incomplete batches if needed. - /// - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable Batch([NotNull] this IFdbAsyncEnumerable source, int batchSize) - { - if (source == null) throw new ArgumentNullException("source"); - if (batchSize <= 0) throw new ArgumentOutOfRangeException("batchSize", batchSize, "Batch size must be at least one."); - - return new FdbBatchingAsyncIterator(source, batchSize); - } - - #endregion - - #region Distinct... - - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable Distinct([NotNull] this IFdbAsyncEnumerable source, IEqualityComparer comparer = null) - { - if (source == null) throw new ArgumentNullException("source"); - comparer = comparer ?? EqualityComparer.Default; - - return new FdbDistinctAsyncIterator(source, comparer); - } - - #endregion - - #region OrderBy... - - [NotNull] - public static IFdbAsyncOrderedEnumerable OrderBy([NotNull] this IFdbAsyncEnumerable source, [NotNull] Func keySelector, IComparer comparer = null) - { - if (source == null) throw new ArgumentNullException("source"); - if (keySelector == null) throw new ArgumentNullException("keySelector"); - comparer = comparer ?? Comparer.Default; - - return new OrderedSequence(source, keySelector, comparer, descending: false, parent: null); - } - - [NotNull] - public static IFdbAsyncOrderedEnumerable OrderByDescending([NotNull] this IFdbAsyncEnumerable source, [NotNull] Func keySelector, IComparer comparer = null) - { - if (source == null) throw new ArgumentNullException("source"); - if (keySelector == null) throw new ArgumentNullException("keySelector"); - comparer = comparer ?? Comparer.Default; - - return new OrderedSequence(source, keySelector, comparer, descending: true, parent: null); - } - - [NotNull] - public static IFdbAsyncOrderedEnumerable ThenBy([NotNull] this IFdbAsyncOrderedEnumerable source, [NotNull] Func keySelector, IComparer comparer = null) - { - if (source == null) throw new ArgumentNullException("keySelector"); - return source.CreateOrderedEnumerable(keySelector, comparer, descending: false); - } - - [NotNull] - public static IFdbAsyncOrderedEnumerable ThenByDescending([NotNull] this IFdbAsyncOrderedEnumerable source, [NotNull] Func keySelector, IComparer comparer = null) - { - if (source == null) throw new ArgumentNullException("keySelector"); - return source.CreateOrderedEnumerable(keySelector, comparer, descending: true); - } - - #endregion - - // If you are bored, maybe consider adding: - // - DefaultIfEmpty - // - Zip - // - OrderBy and OrderBy - // - GroupBy - - #endregion - - #region Leaving the Monad... - - /// Execute an action for each element of an async sequence - public static Task ForEachAsync([NotNull] this IFdbAsyncEnumerable source, [NotNull, InstantHandle] Action action, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - if (action == null) throw new ArgumentNullException("action"); - - var iterator = source as FdbAsyncIterator; - if (iterator != null) - { - return iterator.ExecuteAsync(action, ct); - } - else - { - return Run(source, FdbAsyncMode.All, action, ct); - } - } - - /// Execute an async action for each element of an async sequence - public static Task ForEachAsync([NotNull] this IFdbAsyncEnumerable source, [NotNull, InstantHandle] Func asyncAction, CancellationToken ct = default(CancellationToken)) - { - if (asyncAction == null) throw new ArgumentNullException("asyncAction"); - - var iterator = source as FdbAsyncIterator; - if (iterator != null) - { - return iterator.ExecuteAsync(TaskHelpers.WithCancellation(asyncAction), ct); - } - else - { - return ForEachAsync(source, TaskHelpers.WithCancellation(asyncAction), ct); - } - } - - /// Execute an async action for each element of an async sequence - public static Task ForEachAsync([NotNull] this IFdbAsyncEnumerable source, [NotNull, InstantHandle] Func asyncAction, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - if (asyncAction == null) throw new ArgumentNullException("asyncAction"); - - var iterator = source as FdbAsyncIterator; - if (iterator != null) - { - return iterator.ExecuteAsync(asyncAction, ct); - } - else - { - return Run(source, FdbAsyncMode.All, asyncAction, ct); - } - } - - /// Create a list from an async sequence. - public static Task> ToListAsync([NotNull] this IFdbAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - - return AggregateAsync( - source, - new Buffer(), - (buffer, x) => buffer.Add(x), - (buffer) => buffer.ToList(), - ct - ); - } - - /// Create an array from an async sequence. - public static Task ToArrayAsync([NotNull] this IFdbAsyncEnumerable source, CancellationToken cancellationToken = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - - return AggregateAsync( - source, - new Buffer(), - (buffer, x) => buffer.Add(x), - (buffer) => buffer.ToArray(), - cancellationToken - ); - } - - /// Create an array from an async sequence, knowing a rough estimation of the number of elements. - internal static Task ToArrayAsync([NotNull] this IFdbAsyncEnumerable source, int estimatedSize, CancellationToken cancellationToken = default(CancellationToken)) - { - Contract.Requires(source != null && estimatedSize >= 0); - - return AggregateAsync( - source, - new List(estimatedSize), - (buffer, x) => buffer.Add(x), - (buffer) => buffer.ToArray(), - cancellationToken - ); - } - - /// Creates a Dictionary from an async sequence according to a specified key selector function and key comparer. - public static Task> ToDictionaryAsync([NotNull] this IFdbAsyncEnumerable source, [NotNull, InstantHandle] Func keySelector, IEqualityComparer comparer = null, CancellationToken cancellationToken = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - if (keySelector == null) throw new ArgumentNullException("keySelector"); - - return AggregateAsync( - source, - new Dictionary(comparer ?? EqualityComparer.Default), - (results, x) => { results[keySelector(x)] = x; }, - cancellationToken - ); - } - - /// Creates a Dictionary from an async sequence according to a specified key selector function, a comparer, and an element selector function. - public static Task> ToDictionaryAsync([NotNull] this IFdbAsyncEnumerable source, [NotNull, InstantHandle] Func keySelector, [NotNull, InstantHandle] Func elementSelector, IEqualityComparer comparer = null, CancellationToken cancellationToken = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - if (keySelector == null) throw new ArgumentNullException("keySelector"); - if (elementSelector == null) throw new ArgumentNullException("elementSelector"); - - return AggregateAsync( - source, - new Dictionary(comparer ?? EqualityComparer.Default), - (results, x) => { results[keySelector(x)] = elementSelector(x); }, - cancellationToken - ); - } - - /// Creates a Dictionary from an async sequence of pairs of keys and values. - public static Task> ToDictionaryAsync([NotNull] this IFdbAsyncEnumerable> source, IEqualityComparer comparer = null, CancellationToken cancellationToken = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - cancellationToken.ThrowIfCancellationRequested(); - - return AggregateAsync( - source, - new Dictionary(comparer ?? EqualityComparer.Default), - (results, x) => { results[x.Key] = x.Value; }, - cancellationToken - ); - } - - /// Applies an accumulator function over an async sequence. - public static async Task AggregateAsync([NotNull] this IFdbAsyncEnumerable source, [NotNull, InstantHandle] Func aggregator, CancellationToken cancellationToken = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - if (aggregator == null) throw new ArgumentNullException("aggregator"); - - cancellationToken.ThrowIfCancellationRequested(); - using (var iterator = source.GetEnumerator(FdbAsyncMode.All)) - { - Contract.Assert(iterator != null, "The sequence returned a null async iterator"); - - if (!(await iterator.MoveNext(cancellationToken).ConfigureAwait(false))) - { - throw new InvalidOperationException("The sequence was empty"); - } - - var item = iterator.Current; - while (await iterator.MoveNext(cancellationToken).ConfigureAwait(false)) - { - item = aggregator(item, iterator.Current); - } - - return item; - } - } - - /// Applies an accumulator function over an async sequence. - public static async Task AggregateAsync([NotNull] this IFdbAsyncEnumerable source, TAccumulate seed, [NotNull, InstantHandle] Func aggregator, CancellationToken cancellationToken = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - if (aggregator == null) throw new ArgumentNullException("aggregator"); - - var accumulate = seed; - await ForEachAsync(source, (x) => { accumulate = aggregator(accumulate, x); }, cancellationToken).ConfigureAwait(false); - return accumulate; - } - - /// Applies an accumulator function over an async sequence. - public static async Task AggregateAsync([NotNull] this IFdbAsyncEnumerable source, TAccumulate seed, [NotNull, InstantHandle] Action aggregator, CancellationToken cancellationToken = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - if (aggregator == null) throw new ArgumentNullException("aggregator"); - - var accumulate = seed; - await ForEachAsync(source, (x) => { aggregator(accumulate, x); }, cancellationToken).ConfigureAwait(false); - return accumulate; - } - - /// Applies an accumulator function over an async sequence. - public static async Task AggregateAsync([NotNull] this IFdbAsyncEnumerable source, TAccumulate seed, [NotNull, InstantHandle] Func aggregator, [NotNull, InstantHandle] Func resultSelector, CancellationToken cancellationToken = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - if (aggregator == null) throw new ArgumentNullException("aggregator"); - if (resultSelector == null) throw new ArgumentNullException("resultSelector"); - - var accumulate = seed; - await ForEachAsync(source, (x) => { accumulate = aggregator(accumulate, x); }, cancellationToken).ConfigureAwait(false); - return resultSelector(accumulate); - } - - /// Applies an accumulator function over an async sequence. - public static async Task AggregateAsync([NotNull] this IFdbAsyncEnumerable source, TAccumulate seed, [NotNull, InstantHandle] Action aggregator, [NotNull, InstantHandle] Func resultSelector, CancellationToken cancellationToken = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - if (aggregator == null) throw new ArgumentNullException("aggregator"); - if (resultSelector == null) throw new ArgumentNullException("resultSelector"); - - var accumulate = seed; - await ForEachAsync(source, (x) => aggregator(accumulate, x), cancellationToken); - return resultSelector(accumulate); - } - - /// Returns the first element of an async sequence, or an exception if it is empty - public static Task FirstAsync([NotNull] this IFdbAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - ct.ThrowIfCancellationRequested(); - - var rq = source as FdbRangeQuery; - if (rq != null) return rq.FirstAsync(); - - return Head(source, single: false, orDefault: false, ct: ct); - } - - /// Returns the first element of an async sequence, or the default value for the type if it is empty - public static Task FirstOrDefaultAsync([NotNull] this IFdbAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - ct.ThrowIfCancellationRequested(); - - var rq = source as FdbRangeQuery; - if (rq != null) return rq.FirstOrDefaultAsync(); - - return Head(source, single: false, orDefault: true, ct: ct); - } - - /// Returns the first and only element of an async sequence, or an exception if it is empty or have two or more elements - /// Will need to call MoveNext at least twice to ensure that there is no second element. - public static Task SingleAsync([NotNull] this IFdbAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - ct.ThrowIfCancellationRequested(); - - var rq = source as FdbRangeQuery; - if (rq != null) return rq.SingleAsync(); - - return Head(source, single: true, orDefault: false, ct: ct); - } - - /// Returns the first and only element of an async sequence, the default value for the type if it is empty, or an exception if it has two or more elements - /// Will need to call MoveNext at least twice to ensure that there is no second element. - public static Task SingleOrDefaultAsync([NotNull] this IFdbAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - ct.ThrowIfCancellationRequested(); - - var rq = source as FdbRangeQuery; - if (rq != null) return rq.SingleOrDefaultAsync(); - - return Head(source, single: true, orDefault: true, ct: ct); - } - - /// Returns the last element of an async sequence, or an exception if it is empty - public static async Task LastAsync([NotNull] this IFdbAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - ct.ThrowIfCancellationRequested(); - - var rq = source as FdbRangeQuery; - if (rq != null) return await rq.LastAsync(); - - bool found = false; - T last = default(T); - - await ForEachAsync(source, (x) => { found = true; last = x; }, ct).ConfigureAwait(false); - - if (!found) throw new InvalidOperationException("The sequence was empty"); - return last; - } - - /// Returns the last element of an async sequence, or the default value for the type if it is empty - public static async Task LastOrDefaultAsync([NotNull] this IFdbAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - ct.ThrowIfCancellationRequested(); - - var rq = source as FdbRangeQuery; - if (rq != null) return await rq.LastOrDefaultAsync(); - - bool found = false; - T last = default(T); - - await ForEachAsync(source, (x) => { found = true; last = x; }, ct).ConfigureAwait(false); - - return found ? last : default(T); - } - - /// Returns the element at a specific location of an async sequence, or an exception if there are not enough elements - public static async Task ElementAtAsync([NotNull] this IFdbAsyncEnumerable source, int index, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - if (index < 0) throw new ArgumentOutOfRangeException("index"); - ct.ThrowIfCancellationRequested(); - - var rq = source as FdbRangeQuery; - if (rq != null) return await rq.Skip(index).SingleAsync(); - - int counter = index; - T item = default(T); - await Run( - source, - FdbAsyncMode.All, - (x) => - { - if (counter-- == 0) { item = x; return false; } - return true; - }, - ct - ).ConfigureAwait(false); - - if (counter >= 0) throw new InvalidOperationException("The sequence was too small"); - return item; - } - - /// Returns the element at a specific location of an async sequence, or the default value for the type if it there are not enough elements - public static async Task ElementAtOrDefaultAsync([NotNull] this IFdbAsyncEnumerable source, int index, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - if (index < 0) throw new ArgumentOutOfRangeException("index"); - ct.ThrowIfCancellationRequested(); - - var rq = source as FdbRangeQuery; - if (rq != null) return await rq.Skip(index).SingleAsync(); - - int counter = index; - T item = default(T); - - //TODO: use ExecuteAsync() if the source is an Iterator! - await Run( - source, - FdbAsyncMode.All, - (x) => - { - if (counter-- == 0) { item = x; return false; } - return true; - }, - ct - ).ConfigureAwait(false); - - if (counter >= 0) return default(T); - return item; - } - - /// Returns the number of elements in an async sequence. - public static async Task CountAsync([NotNull] this IFdbAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - ct.ThrowIfCancellationRequested(); - - int count = 0; - - await ForEachAsync(source, (_) => { ++count; }, ct).ConfigureAwait(false); - - return count; - } - - /// Returns a number that represents how many elements in the specified async sequence satisfy a condition. - public static async Task CountAsync([NotNull] this IFdbAsyncEnumerable source, [NotNull, InstantHandle] Func predicate, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - if (predicate == null) throw new ArgumentNullException("predicate"); - - int count = 0; - - await ForEachAsync(source, (x) => { if (predicate(x)) ++count; }, ct).ConfigureAwait(false); - - return count; - } - - /// Returns the sum of all elements in the specified async sequence. - public static async Task SumAsync([NotNull] this IFdbAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - - ulong sum = 0; - - await ForEachAsync(source, (x) => { sum += x; }, ct).ConfigureAwait(false); - - return sum; - } - - /// Returns the sum of all elements in the specified async sequence that satisfy a condition. - public static async Task SumAsync([NotNull] this IFdbAsyncEnumerable source, [NotNull, InstantHandle] Func predicate, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - if (predicate == null) throw new ArgumentNullException("predicate"); - - ulong sum = 0; - - await ForEachAsync(source, (x) => { if (predicate(x)) sum += x; }, ct).ConfigureAwait(false); - - return sum; - } - - /// Returns the sum of all elements in the specified async sequence. - public static async Task SumAsync([NotNull] this IFdbAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - - long sum = 0; - - await ForEachAsync(source, (x) => { sum += x; }, ct).ConfigureAwait(false); - - return sum; - } - - /// Returns the sum of all elements in the specified async sequence that satisfy a condition. - public static async Task SumAsync([NotNull] this IFdbAsyncEnumerable source, [NotNull, InstantHandle] Func predicate, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - if (predicate == null) throw new ArgumentNullException("predicate"); - - long sum = 0; - - await ForEachAsync(source, (x) => { if (predicate(x)) sum += x; }, ct).ConfigureAwait(false); - - return sum; - } - - /// Returns the smallest value in the specified async sequence - public static async Task MinAsync([NotNull] this IFdbAsyncEnumerable source, IComparer comparer = null, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - comparer = comparer ?? Comparer.Default; - - bool found = false; - T min = default(T); - - await ForEachAsync( - source, - (x) => - { - if (!found || comparer.Compare(x, min) < 0) - { - min = x; - found = true; - } - }, - ct - ).ConfigureAwait(false); - - if (!found) throw new InvalidOperationException("The sequence was empty"); - return min; - } - - /// Returns the largest value in the specified async sequence - public static async Task MaxAsync([NotNull] this IFdbAsyncEnumerable source, IComparer comparer = null, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - comparer = comparer ?? Comparer.Default; - - bool found = false; - T max = default(T); - - await ForEachAsync( - source, - (x) => - { - if (!found || comparer.Compare(x, max) > 0) - { - max = x; - found = true; - } - }, - ct - ).ConfigureAwait(false); - - if (!found) throw new InvalidOperationException("The sequence was empty"); - return max; - } - - /// Determines whether an async sequence contains any elements. - /// This is the logical equivalent to "source.Count() > 0" but can be better optimized by some providers - public static async Task AnyAsync([NotNull] this IFdbAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - ct.ThrowIfCancellationRequested(); - - using (var iterator = source.GetEnumerator(FdbAsyncMode.Head)) - { - return await iterator.MoveNext(ct).ConfigureAwait(false); - } - } - - /// Determines whether any element of an async sequence satisfies a condition. - public static async Task AnyAsync([NotNull] this IFdbAsyncEnumerable source, [NotNull, InstantHandle] Func predicate, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - if (predicate == null) throw new ArgumentNullException("predicate"); - ct.ThrowIfCancellationRequested(); - - using (var iterator = source.GetEnumerator(FdbAsyncMode.Head)) - { - while (await iterator.MoveNext(ct).ConfigureAwait(false)) - { - if (predicate(iterator.Current)) return true; - } - } - return false; - } - - /// Determines wether an async sequence contains no elements at all. - /// This is the logical equivalent to "source.Count() == 0" or "!source.Any()" but can be better optimized by some providers - public static async Task NoneAsync([NotNull] this IFdbAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - ct.ThrowIfCancellationRequested(); - - using (var iterator = source.GetEnumerator(FdbAsyncMode.Head)) - { - return !(await iterator.MoveNext(ct).ConfigureAwait(false)); - } - } - - /// Determines whether none of the elements of an async sequence satisfies a condition. - public static async Task NoneAsync([NotNull] this IFdbAsyncEnumerable source, [NotNull, InstantHandle] Func predicate, CancellationToken ct = default(CancellationToken)) - { - if (source == null) throw new ArgumentNullException("source"); - if (predicate == null) throw new ArgumentNullException("predicate"); - ct.ThrowIfCancellationRequested(); - - using (var iterator = source.GetEnumerator(FdbAsyncMode.Head)) - { - while (await iterator.MoveNext(ct).ConfigureAwait(false)) - { - if (predicate(iterator.Current)) return false; - } - } - return true; - } - - #endregion - - #region Query Statistics... - - //TODO: move this somewhere else? - - public class QueryStatistics - { - public QueryStatistics() - { } - - public QueryStatistics(TData value) - { - this.Value = value; - } - - public TData Value { get; protected set; } - - public void Update(TData newValue) - { - this.Value = newValue; - } - } - - public class KeyValueSize - { - /// Total number of pairs of keys and values that have flowed through this point - public long Count { get; private set; } - - /// Total size of all keys and values combined - public long Size { get { return checked(this.KeySize + this.ValueSize); } } - - /// Total size of all keys combined - public long KeySize { get; private set; } - - /// Total size of all values combined - public long ValueSize { get; private set; } - - public void Add(int keySize, int valueSize) - { - this.Count++; - this.KeySize = checked(keySize + this.KeySize); - this.ValueSize = checked(valueSize + this.ValueSize); - } - } - - public class DataSize - { - /// Total number of items that have flowed through this point - public long Count { get; private set; } - - /// Total size of all items that have flowed through this point - public long Size { get; private set; } - - public void Add(int size) - { - this.Count++; - this.Size = checked(size + this.Size); - } - } - - /// Measure the number of items that pass through this point of the query - /// The values returned in are only safe to read once the query has ended - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable WithCountStatistics([NotNull] this IFdbAsyncEnumerable source, out QueryStatistics counter) - { - if (source == null) throw new ArgumentNullException("source"); - - var signal = new QueryStatistics(0); - counter = signal; - - // to count, we just increment the signal each type a value flows through here - Func wrapped = (x) => - { - signal.Update(checked(signal.Value + 1)); - return x; - }; - - return Select(source, wrapped); - } - - /// Measure the number and size of slices that pass through this point of the query - /// The values returned in are only safe to read once the query has ended - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable> WithSizeStatistics([NotNull] this IFdbAsyncEnumerable> source, out QueryStatistics statistics) - { - if (source == null) throw new ArgumentNullException("source"); - - var data = new KeyValueSize(); - statistics = new QueryStatistics(data); - - // to count, we just increment the signal each type a value flows through here - Func, KeyValuePair> wrapped = (kvp) => - { - data.Add(kvp.Key.Count, kvp.Value.Count); - return kvp; - }; - - return Select(source, wrapped); - } - - /// Measure the number and sizes of the keys and values that pass through this point of the query - /// The values returned in are only safe to read once the query has ended - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable WithSizeStatistics([NotNull] this IFdbAsyncEnumerable source, out QueryStatistics statistics) - { - if (source == null) throw new ArgumentNullException("source"); - - var data = new DataSize(); - statistics = new QueryStatistics(data); - - // to count, we just increment the signal each type a value flows through here - Func wrapped = (x) => - { - data.Add(x.Count); - return x; - }; - - return Select(source, wrapped); - } - - /// Execute an action on each item passing through the sequence, without modifying the original sequence - /// The is execute inline before passing the item down the line, and should not block - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable Observe([NotNull] this IFdbAsyncEnumerable source, [NotNull] Action handler) - { - if (source == null) throw new ArgumentNullException("source"); - if (handler == null) throw new ArgumentNullException("handler"); - - return new FdbObserverIterator(source, new AsyncObserverExpression(handler)); - } - - /// Execute an action on each item passing through the sequence, without modifying the original sequence - /// The is execute inline before passing the item down the line, and should not block - [NotNull, LinqTunnel] - public static IFdbAsyncEnumerable Observe([NotNull] this IFdbAsyncEnumerable source, [NotNull] Func asyncHandler) - { - if (source == null) throw new ArgumentNullException("source"); - if (asyncHandler == null) throw new ArgumentNullException("asyncHandler"); - - return new FdbObserverIterator(source, new AsyncObserverExpression(asyncHandler)); - } - - #endregion - - } - -} diff --git a/FoundationDB.Client/Native/FdbFuture.cs b/FoundationDB.Client/Native/FdbFuture.cs deleted file mode 100644 index e53fb9500..000000000 --- a/FoundationDB.Client/Native/FdbFuture.cs +++ /dev/null @@ -1,518 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -// enable this to help debug Futures -#undef DEBUG_FUTURES - -namespace FoundationDB.Client.Native -{ - using FoundationDB.Client.Utils; - using JetBrains.Annotations; - using System; - using System.Collections.Concurrent; - using System.Collections.Generic; - using System.Diagnostics; - using System.Runtime.CompilerServices; - using System.Threading; - using System.Threading.Tasks; - - /// Helper class to create FDBFutures - internal static class FdbFuture - { - - public static class Flags - { - /// The future has completed (either success or failure) - public const int COMPLETED = 1; - - /// A completion/failure/cancellation has been posted on the thread pool - public const int HAS_POSTED_ASYNC_COMPLETION = 2; - - /// The future has been cancelled from an external source (manually, or via then CancellationTokeb) - public const int CANCELLED = 4; - - /// The resources allocated by this future have been released - public const int MEMORY_RELEASED = 8; - - /// The future has been constructed, and is listening for the callbacks - public const int READY = 64; - - /// Dispose has been called - public const int DISPOSED = 128; - } - - /// Create a new from an FDBFuture* pointer - /// Type of the result of the task - /// FDBFuture* pointer - /// Func that will be called to get the result once the future completes (and did not fail) - /// Optional cancellation token that can be used to cancel the future - /// Object that tracks the execution of the FDBFuture handle - [NotNull] - public static FdbFutureSingle FromHandle([NotNull] FutureHandle handle, [NotNull] Func selector, CancellationToken cancellationToken) - { - return new FdbFutureSingle(handle, selector, cancellationToken); - } - - /// Create a new from an array of FDBFuture* pointers - /// Type of the items of the arrayreturn by the task - /// Array of FDBFuture* pointers - /// Func that will be called for each future that complete (and did not fail) - /// Optional cancellation token that can be used to cancel the future - /// Object that tracks the execution of all the FDBFuture handles - [NotNull] - public static FdbFutureArray FromHandleArray([NotNull] FutureHandle[] handles, [NotNull] Func selector, CancellationToken cancellationToken) - { - return new FdbFutureArray(handles, selector, cancellationToken); - } - - /// Wrap a FDBFuture* pointer into a - /// Type of the result of the task - /// FDBFuture* pointer - /// Lambda that will be called once the future completes sucessfully, to extract the result from the future handle. - /// Optional cancellation token that can be used to cancel the future - /// Task that will either return the result of the continuation lambda, or an exception - public static Task CreateTaskFromHandle([NotNull] FutureHandle handle, [NotNull] Func continuation, CancellationToken cancellationToken) - { - return new FdbFutureSingle(handle, continuation, cancellationToken).Task; - } - - /// Wrap multiple handles into a single that returns an array of T - /// Type of the result of the task - /// Array of FDBFuture* pointers - /// Lambda that will be called once for each future that completes sucessfully, to extract the result from the future handle. - /// Optional cancellation token that can be used to cancel the future - /// Task that will either return all the results of the continuation lambdas, or an exception - /// If at least one future fails, the whole task will fail. - public static Task CreateTaskFromHandleArray([NotNull] FutureHandle[] handles, [NotNull] Func continuation, CancellationToken cancellationToken) - { - // Special case, because FdbFutureArray does not support empty arrays - //TODO: technically, there is no reason why FdbFutureArray would not accept an empty array. We should simplify this by handling the case in the ctor (we are already allocating something anyway...) - if (handles.Length == 0) return Task.FromResult(new T[0]); - - return new FdbFutureArray(handles, continuation, cancellationToken).Task; - } - - } - - /// Base class for all FDBFuture wrappers - /// Type of the Task's result - [DebuggerDisplay("Flags={m_flags}, State={this.Task.Status}")] - internal abstract class FdbFuture : TaskCompletionSource, IDisposable - { - - #region Private Members... - - /// Flags of the future (bit field of FLAG_xxx values) - private int m_flags; - - /// Future key in the callback dictionary - protected IntPtr m_key; - - /// Optionnal registration on the parent Cancellation Token - /// Is only valid if FLAG_HAS_CTR is set - protected CancellationTokenRegistration m_ctr; - - #endregion - - #region State Management... - - internal bool HasFlag(int flag) - { - return (Volatile.Read(ref m_flags) & flag) == flag; - } - - internal bool HasAnyFlags(int flags) - { - return (Volatile.Read(ref m_flags) & flags) != 0; - } - - protected void SetFlag(int flag) - { - var flags = m_flags; - Interlocked.MemoryBarrier(); - m_flags = flags | flag; - } - - protected bool TrySetFlag(int flag) - { - var wait = new SpinWait(); - while (true) - { - var flags = Volatile.Read(ref m_flags); - if ((flags & flag) != 0) - { - return false; - } - if (Interlocked.CompareExchange(ref m_flags, flags | flag, flags) == flags) - { - return true; - } - wait.SpinOnce(); - } - } - - protected bool TryCleanup() - { - // We try to cleanup the future handle as soon as possible, meaning as soon as we have the result, or an error, or a cancellation - - if (TrySetFlag(FdbFuture.Flags.COMPLETED)) - { - DoCleanup(); - return true; - } - return false; - } - - private void DoCleanup() - { - try - { - // unsubscribe from the parent cancellation token if there was one - UnregisterCancellationRegistration(); - - // ensure that the task always complete ! - // note: always defer the completion on the threadpool, because we don't want to dead lock here (we can be called by Dispose) - if (!this.Task.IsCompleted && TrySetFlag(FdbFuture.Flags.HAS_POSTED_ASYNC_COMPLETION)) - { - PostCancellationOnThreadPool(this); - } - - // The only surviving value after this would be a Task and an optional WorkItem on the ThreadPool that will signal it... - } - finally - { - CloseHandles(); - } - } - - /// Close all the handles managed by this future - protected abstract void CloseHandles(); - - /// Cancel all the handles managed by this future - protected abstract void CancelHandles(); - - /// Release all memory allocated by this future - protected abstract void ReleaseMemory(); - - /// Set the result of this future - /// Result of the future - /// If true, called from the network thread callback and will defer the operation on the ThreadPool. If false, may run the continuations inline. - protected void SetResult(T result, bool fromCallback) - { - if (!fromCallback) - { - this.TrySetResult(result); - } - else if (TrySetFlag(FdbFuture.Flags.HAS_POSTED_ASYNC_COMPLETION)) - { - PostCompletionOnThreadPool(this, result); - } - } - - /// Fault the future's Task - /// Error that will be the result of the task - /// If true, called from the network thread callback and will defer the operation on the ThreadPool. If false, may run the continuations inline. - protected void SetFaulted(Exception e, bool fromCallback) - { - if (!fromCallback) - { - this.TrySetException(e); - } - else if (TrySetFlag(FdbFuture.Flags.HAS_POSTED_ASYNC_COMPLETION)) - { - PostFailureOnThreadPool(this, e); - } - } - - /// Fault the future's Task - /// Error that will be the result of the task - /// If true, called from the network thread callback and will defer the operation on the ThreadPool. If false, may run the continuations inline. - protected void SetFaulted(IEnumerable errors, bool fromCallback) - { - if (!fromCallback) - { - this.TrySetException(errors); - } - else if (TrySetFlag(FdbFuture.Flags.HAS_POSTED_ASYNC_COMPLETION)) - { - PostFailureOnThreadPool(this, errors); - } - } - - /// Cancel the future's Task - /// If true, called from the network thread callback and will defer the operation on the ThreadPool. If false, may run the continuations inline. - protected void SetCanceled(bool fromCallback) - { - if (!fromCallback) - { - this.TrySetCanceled(); - } - else if (TrySetFlag(FdbFuture.Flags.HAS_POSTED_ASYNC_COMPLETION)) - { - PostCancellationOnThreadPool(this); - } - } - - /// Defer setting the result of a TaskCompletionSource on the ThreadPool - private static void PostCompletionOnThreadPool(TaskCompletionSource future, T result) - { - ThreadPool.UnsafeQueueUserWorkItem( - (_state) => - { - var prms = (Tuple, T>)_state; - prms.Item1.TrySetResult(prms.Item2); - }, - Tuple.Create(future, result) - ); - } - - /// Defer failing a TaskCompletionSource on the ThreadPool - private static void PostFailureOnThreadPool(TaskCompletionSource future, Exception error) - { - ThreadPool.UnsafeQueueUserWorkItem( - (_state) => - { - var prms = (Tuple, Exception>)_state; - prms.Item1.TrySetException(prms.Item2); - }, - Tuple.Create(future, error) - ); - } - - /// Defer failing a TaskCompletionSource on the ThreadPool - private static void PostFailureOnThreadPool(TaskCompletionSource future, IEnumerable errors) - { - ThreadPool.UnsafeQueueUserWorkItem( - (_state) => - { - var prms = (Tuple, IEnumerable>)_state; - prms.Item1.TrySetException(prms.Item2); - }, - Tuple.Create(future, errors) - ); - } - - /// Defer cancelling a TaskCompletionSource on the ThreadPool - private static void PostCancellationOnThreadPool(TaskCompletionSource future) - { - ThreadPool.UnsafeQueueUserWorkItem( - (_state) => ((TaskCompletionSource)_state).TrySetCanceled(), - future - ); - } - - #endregion - - #region Callbacks... - - /// List of all pending futures that have not yet completed - private static readonly ConcurrentDictionary> s_futures = new ConcurrentDictionary>(); - - /// Internal counter to generated a unique parameter value for each futures - private static long s_futureCounter; - - /// Register a future in the callback context and return the corresponding callback parameter - /// Future instance - /// Parameter that can be passed to FutureSetCallback and that uniquely identify this future. - /// The caller MUST call ClearCallbackHandler to ensure that the future instance is removed from the list - internal static IntPtr RegisterCallback([NotNull] FdbFuture future) - { - Contract.Requires(future != null); - - // generate a new unique id for this future, that will be use to lookup the future instance in the callback handler - long id = Interlocked.Increment(ref s_futureCounter); - var prm = new IntPtr(id); // note: we assume that we can only run in 64-bit mode, so it is safe to cast a long into an IntPtr - // critical region - try { } - finally - { - Volatile.Write(ref future.m_key, prm); -#if DEBUG_FUTURES - Contract.Assert(!s_futures.ContainsKey(prm)); -#endif - s_futures[prm.ToInt64()] = future; - Interlocked.Increment(ref DebugCounters.CallbackHandlesTotal); - Interlocked.Increment(ref DebugCounters.CallbackHandles); - } - return prm; - } - - /// Remove a future from the callback handler dictionary - /// Future that has just completed, or is being destroyed - internal static void UnregisterCallback([NotNull] FdbFuture future) - { - Contract.Requires(future != null); - - // critical region - try - { } - finally - { - var key = Interlocked.Exchange(ref future.m_key, IntPtr.Zero); - if (key != IntPtr.Zero) - { - FdbFuture _; - if (s_futures.TryRemove(key.ToInt64(), out _)) - { - Interlocked.Decrement(ref DebugCounters.CallbackHandles); - } - } - } - } - - internal static FdbFuture GetFutureFromCallbackParameter(IntPtr parameter) - { - FdbFuture future; - if (s_futures.TryGetValue(parameter.ToInt64(), out future)) - { - if (future != null && Volatile.Read(ref future.m_key) == parameter) - { - return future; - } -#if DEBUG_FUTURES - // If you breakpoint here, that means that a future callback fired but was not able to find a matching registration - // => either the FdbFuture was incorrectly disposed, or there is some problem in the callback dictionary - if (System.Diagnostics.Debugger.IsAttached) System.Diagnostics.Debugger.Break(); -#endif - } - return null; - } - - #endregion - - #region Cancellation... - - protected void RegisterForCancellation(CancellationToken cancellationToken) - { - //note: if the token is already cancelled, the callback handler will run inline and any exception would bubble up here - //=> this is not a problem because the ctor already has a try/catch that will clean up everything - m_ctr = cancellationToken.Register( - (_state) => { CancellationHandler(_state); }, - this, - false - ); - } - - protected void UnregisterCancellationRegistration() - { - // unsubscribe from the parent cancellation token if there was one - m_ctr.Dispose(); - m_ctr = default(CancellationTokenRegistration); - } - - private static void CancellationHandler(object state) - { - var future = state as FdbFuture; - if (future != null) - { -#if DEBUG_FUTURES - Debug.WriteLine("Future<" + typeof(T).Name + ">.Cancel(0x" + future.m_handle.Handle.ToString("x") + ") was called on thread #" + Thread.CurrentThread.ManagedThreadId.ToString()); -#endif - future.Cancel(); - } - } - - #endregion - - /// Return true if the future has completed (successfully or not) - public bool IsReady - { - get { return this.Task.IsCompleted; } - } - - /// Make the Future awaitable - public TaskAwaiter GetAwaiter() - { - return this.Task.GetAwaiter(); - } - - /// Try to abort the task (if it is still running) - public void Cancel() - { - if (HasAnyFlags(FdbFuture.Flags.DISPOSED | FdbFuture.Flags.COMPLETED | FdbFuture.Flags.CANCELLED)) - { - return; - } - - if (TrySetFlag(FdbFuture.Flags.CANCELLED)) - { - bool fromCallback = Fdb.IsNetworkThread; - try - { - if (!this.Task.IsCompleted) - { - CancelHandles(); - SetCanceled(fromCallback); - } - } - finally - { - TryCleanup(); - } - } - } - - /// Free memory allocated by this future after it has completed. - /// This method provides no benefit to most application code, and should only be called when attempting to write thread-safe custom layers. - public void Clear() - { - if (HasFlag(FdbFuture.Flags.DISPOSED)) - { - return; - } - - if (!this.Task.IsCompleted) - { - throw new InvalidOperationException("Cannot release memory allocated by a future that has not yet completed"); - } - - if (TrySetFlag(FdbFuture.Flags.MEMORY_RELEASED)) - { - ReleaseMemory(); - } - } - - public void Dispose() - { - if (TrySetFlag(FdbFuture.Flags.DISPOSED)) - { - try - { - TryCleanup(); - } - finally - { - if (Volatile.Read(ref m_key) != IntPtr.Zero) UnregisterCallback(this); - } - } - GC.SuppressFinalize(this); - } - - } - -} diff --git a/FoundationDB.Client/Native/FdbFutureArray.cs b/FoundationDB.Client/Native/FdbFutureArray.cs deleted file mode 100644 index 0a7989478..000000000 --- a/FoundationDB.Client/Native/FdbFutureArray.cs +++ /dev/null @@ -1,323 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Client.Native -{ - using JetBrains.Annotations; - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Threading; - - /// FDBFuture[] wrapper - /// Type of result - internal sealed class FdbFutureArray : FdbFuture - { - // Wraps several FDBFuture* handles and return all the results at once - - #region Private Members... - - /// Value of the 'FDBFuture*' - private readonly FutureHandle[] m_handles; - - /// Counter of callbacks that still need to fire. - private int m_pending; - - /// Lambda used to extract the result of this FDBFuture - private readonly Func m_resultSelector; - - #endregion - - #region Constructors... - - internal FdbFutureArray([NotNull] FutureHandle[] handles, [NotNull] Func selector, CancellationToken cancellationToken) - { - if (handles == null) throw new ArgumentNullException("handles"); - if (handles.Length == 0) throw new ArgumentException("Handle array cannot be empty", "handles"); - if (selector == null) throw new ArgumentNullException("selector"); - - m_handles = handles; - m_resultSelector = selector; - - bool abortAllHandles = false; - - try - { - if (cancellationToken.IsCancellationRequested) - { // already cancelled, we must abort everything - - SetFlag(FdbFuture.Flags.COMPLETED); - abortAllHandles = true; - m_resultSelector = null; - this.TrySetCanceled(); - return; - } - - // add this instance to the list of pending futures - var prm = RegisterCallback(this); - - foreach (var handle in handles) - { - - if (FdbNative.FutureIsReady(handle)) - { // this handle is already done - continue; - } - - Interlocked.Increment(ref m_pending); - - // register the callback handler - var err = FdbNative.FutureSetCallback(handle, CallbackHandler, prm); - if (Fdb.Failed(err)) - { // uhoh - Debug.WriteLine("Failed to set callback for Future<" + typeof(T).Name + "> 0x" + handle.Handle.ToString("x") + " !!!"); - throw Fdb.MapToException(err); - } - } - - // allow the callbacks to handle completion - TrySetFlag(FdbFuture.Flags.READY); - - if (Volatile.Read(ref m_pending) == 0) - { // all callbacks have already fired (or all handles were already completed) - UnregisterCallback(this); - HandleCompletion(fromCallback: false); - m_resultSelector = null; - abortAllHandles = true; - SetFlag(FdbFuture.Flags.COMPLETED); - } - else if (cancellationToken.CanBeCanceled) - { // register for cancellation (if needed) - RegisterForCancellation(cancellationToken); - } - } - catch - { - // this is bad news, since we are in the constructor, we need to clear everything - SetFlag(FdbFuture.Flags.DISPOSED); - - UnregisterCancellationRegistration(); - - UnregisterCallback(this); - - abortAllHandles = true; - - // this is technically not needed, but just to be safe... - this.TrySetCanceled(); - - throw; - } - finally - { - if (abortAllHandles) - { - CloseHandles(handles); - } - } - GC.KeepAlive(this); - } - - #endregion - - protected override void CloseHandles() - { - CloseHandles(m_handles); - } - - protected override void CancelHandles() - { - CancelHandles(m_handles); - } - - protected override void ReleaseMemory() - { - var handles = m_handles; - if (handles != null) - { - foreach (var handle in handles) - { - if (handle != null && !handle.IsClosed && !handle.IsInvalid) - { - //REVIEW: there is a possibility of a race condition with Dispoe() that could potentially call FutureDestroy(handle) at the same time (not verified) - FdbNative.FutureReleaseMemory(handle); - } - } - } - } - - private static void CloseHandles(FutureHandle[] handles) - { - if (handles != null) - { - foreach (var handle in handles) - { - if (handle != null) - { - //note: Dispose() will be a no-op if already called - handle.Dispose(); - } - } - } - } - - private static void CancelHandles(FutureHandle[] handles) - { - if (handles != null) - { - foreach (var handle in handles) - { - if (handle != null && !handle.IsClosed && !handle.IsInvalid) - { - //REVIEW: there is a possibility of a race condition with Dispoe() that could potentially call FutureDestroy(handle) at the same time (not verified) - FdbNative.FutureCancel(handle); - } - } - } - } - - /// Cached delegate of the future completion callback handler - private static readonly FdbNative.FdbFutureCallback CallbackHandler = FutureCompletionCallback; - - /// Handler called when a FDBFuture becomes ready - /// Handle on the future that became ready - /// Paramter to the callback (unused) - private static void FutureCompletionCallback(IntPtr futureHandle, IntPtr parameter) - { -#if DEBUG_FUTURES - Debug.WriteLine("Future<" + typeof(T).Name + ">.Callback(0x" + futureHandle.ToString("x") + ", " + parameter.ToString("x") + ") has fired on thread #" + Thread.CurrentThread.ManagedThreadId.ToString()); -#endif - - var future = (FdbFutureArray)GetFutureFromCallbackParameter(parameter); - - if (future != null && Interlocked.Decrement(ref future.m_pending) == 0) - { // the last future handle has fired, we can proceed to read all the results - - if (future.HasFlag(FdbFuture.Flags.READY)) - { - UnregisterCallback(future); - try - { - future.HandleCompletion(fromCallback: true); - } - catch(Exception) - { - //TODO ? - } - } - // else, the ctor will handle that - } - } - - /// Update the Task with the state of a ready Future - /// If true, the method is called from the network thread and must defer the continuations from the Thread Pool - /// True if we got a result, or false in case of error (or invalid state) - private void HandleCompletion(bool fromCallback) - { - if (HasAnyFlags(FdbFuture.Flags.DISPOSED | FdbFuture.Flags.COMPLETED)) - { - return; - } - -#if DEBUG_FUTURES - Debug.WriteLine("FutureArray<" + typeof(T).Name + ">.Callback(...) handling completion on thread #" + Thread.CurrentThread.ManagedThreadId.ToString()); -#endif - - try - { - UnregisterCancellationRegistration(); - - List errors = null; - bool cancellation = false; - var selector = m_resultSelector; - - var results = selector != null ? new T[m_handles.Length] : null; - - for (int i = 0; i < m_handles.Length; i++) - { - var handle = m_handles[i]; - - if (handle != null && !handle.IsClosed && !handle.IsInvalid) - { - FdbError err = FdbNative.FutureGetError(handle); - if (Fdb.Failed(err)) - { // it failed... - if (err != FdbError.OperationCancelled) - { // get the exception from the error code - var ex = Fdb.MapToException(err); - (errors ?? (errors = new List())).Add(ex); - } - else - { - cancellation = true; - break; - } - } - else - { // it succeeded... - // try to get the result... - if (selector != null) - { - //note: result selector will execute from network thread, but this should be our own code that only calls into some fdb_future_get_XXXX(), which should be safe... - results[i] = selector(handle); - } - } - } - } - - if (cancellation) - { // the transaction has been cancelled - SetCanceled(fromCallback); - } - else if (errors != null) - { // there was at least one error - SetFaulted(errors, fromCallback); - } - else - { // success - SetResult(results, fromCallback); - } - - } - catch (Exception e) - { // something went wrong - if (e is ThreadAbortException) - { - SetCanceled(fromCallback); - throw; - } - SetFaulted(e, fromCallback); - } - finally - { - TryCleanup(); - } - } - - } - -} diff --git a/FoundationDB.Client/Native/FdbFutureSingle.cs b/FoundationDB.Client/Native/FdbFutureSingle.cs deleted file mode 100644 index 48877f1ee..000000000 --- a/FoundationDB.Client/Native/FdbFutureSingle.cs +++ /dev/null @@ -1,267 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -#undef DEBUG_FUTURES - -namespace FoundationDB.Client.Native -{ - using JetBrains.Annotations; - using System; - using System.Diagnostics; - using System.Threading; - - /// FDBFuture wrapper - /// Type of result - internal sealed class FdbFutureSingle : FdbFuture - { - #region Private Members... - - /// Value of the 'FDBFuture*' - private readonly FutureHandle m_handle; - - /// Lambda used to extract the result of this FDBFuture - private readonly Func m_resultSelector; - - #endregion - - #region Constructors... - - internal FdbFutureSingle([NotNull] FutureHandle handle, [NotNull] Func selector, CancellationToken cancellationToken) - { - if (handle == null) throw new ArgumentNullException("handle"); - if (selector == null) throw new ArgumentNullException("selector"); - - m_handle = handle; - m_resultSelector = selector; - - try - { - - if (handle.IsInvalid) - { // it's dead, Jim ! - SetFlag(FdbFuture.Flags.COMPLETED); - m_resultSelector = null; - return; - } - - if (FdbNative.FutureIsReady(handle)) - { // either got a value or an error -#if DEBUG_FUTURES - Debug.WriteLine("Future<" + typeof(T).Name + "> 0x" + handle.Handle.ToString("x") + " was already ready"); -#endif - HandleCompletion(fromCallback: false); -#if DEBUG_FUTURES - Debug.WriteLine("Future<" + typeof(T).Name + "> 0x" + handle.Handle.ToString("x") + " completed inline"); -#endif - return; - } - - // register for cancellation (if needed) - if (cancellationToken.CanBeCanceled) - { - if (cancellationToken.IsCancellationRequested) - { // we have already been cancelled - -#if DEBUG_FUTURES - Debug.WriteLine("Future<" + typeof(T).Name + "> 0x" + handle.Handle.ToString("x") + " will complete later"); -#endif - - // Abort the future and simulate a Canceled task - SetFlag(FdbFuture.Flags.COMPLETED); - // note: we don't need to call fdb_future_cancel because fdb_future_destroy will take care of everything - handle.Dispose(); - // also, don't keep a reference on the callback because it won't be needed - m_resultSelector = null; - this.TrySetCanceled(); - return; - } - - // token still active - RegisterForCancellation(cancellationToken); - } - -#if DEBUG_FUTURES - Debug.WriteLine("Future<" + typeof(T).Name + "> 0x" + handle.Handle.ToString("x") + " will complete later"); -#endif - - TrySetFlag(FdbFuture.Flags.READY); - - // add this instance to the list of pending futures - var prm = RegisterCallback(this); - - // register the callback handler - var err = FdbNative.FutureSetCallback(handle, CallbackHandler, prm); - if (Fdb.Failed(err)) - { // uhoh -#if DEBUG_FUTURES - Debug.WriteLine("Failed to set callback for Future<" + typeof(T).Name + "> 0x" + handle.Handle.ToString("x") + " !!!"); -#endif - throw Fdb.MapToException(err); - } - } - catch - { - // this is bad news, since we are in the constructor, we need to clear everything - SetFlag(FdbFuture.Flags.DISPOSED); - UnregisterCancellationRegistration(); - UnregisterCallback(this); - - // kill the future handle - m_handle.Dispose(); - - // this is technically not needed, but just to be safe... - this.TrySetCanceled(); - - throw; - } - GC.KeepAlive(this); - } - - #endregion - - /// Cached delegate of the future completion callback handler - private static readonly FdbNative.FdbFutureCallback CallbackHandler = FutureCompletionCallback; - - /// Handler called when a FDBFuture becomes ready - /// Handle on the future that became ready - /// Paramter to the callback (unused) - private static void FutureCompletionCallback(IntPtr futureHandle, IntPtr parameter) - { -#if DEBUG_FUTURES - Debug.WriteLine("Future<" + typeof(T).Name + ">.Callback(0x" + futureHandle.ToString("x") + ", " + parameter.ToString("x") + ") has fired on thread #" + Thread.CurrentThread.ManagedThreadId.ToString()); -#endif - - var future = (FdbFutureSingle)GetFutureFromCallbackParameter(parameter); - if (future != null) - { - UnregisterCallback(future); - future.HandleCompletion(fromCallback: true); - } - } - - /// Update the Task with the state of a ready Future - /// If true, we are called from the network thread - /// True if we got a result, or false in case of error (or invalid state) - private void HandleCompletion(bool fromCallback) - { - // note: if fromCallback is true, we are running on the network thread - // this means that we have to signal the TCS from the threadpool, if not continuations on the task may run inline. - // this is very frequent when we are called with await, or ContinueWith(..., TaskContinuationOptions.ExecuteSynchronously) - - if (HasAnyFlags(FdbFuture.Flags.DISPOSED | FdbFuture.Flags.COMPLETED)) - { - return; - } - -#if DEBUG_FUTURES - var sw = Stopwatch.StartNew(); -#endif - try - { - var handle = m_handle; - if (handle != null && !handle.IsClosed && !handle.IsInvalid) - { - UnregisterCancellationRegistration(); - - FdbError err = FdbNative.FutureGetError(handle); - if (Fdb.Failed(err)) - { // it failed... -#if DEBUG_FUTURES - Debug.WriteLine("Future<" + typeof(T).Name + "> has FAILED: " + err); -#endif - if (err != FdbError.OperationCancelled) - { // get the exception from the error code - var ex = Fdb.MapToException(err); - SetFaulted(ex, fromCallback); - return; - } - //else: will be handle below - } - else - { // it succeeded... - // try to get the result... -#if DEBUG_FUTURES - Debug.WriteLine("Future<" + typeof(T).Name + "> has completed successfully"); -#endif - var selector = m_resultSelector; - if (selector != null) - { - //note: result selector will execute from network thread, but this should be our own code that only calls into some fdb_future_get_XXXX(), which should be safe... - var result = selector(handle); - SetResult(result, fromCallback); - return; - } - //else: it will be handled below - } - } - - // most probably the future was cancelled or we are shutting down... - SetCanceled(fromCallback); - } - catch (Exception e) - { // something went wrong - if (e is ThreadAbortException) - { - SetCanceled(fromCallback); - throw; - } - SetFaulted(e, fromCallback); - } - finally - { -#if DEBUG_FUTURES - sw.Stop(); - Debug.WriteLine("Future<" + typeof(T).Name + "> callback completed in " + sw.Elapsed.TotalMilliseconds.ToString() + " ms"); -#endif - TryCleanup(); - } - } - - protected override void CloseHandles() - { - var handle = m_handle; - if (handle != null) handle.Dispose(); - } - - protected override void CancelHandles() - { - var handle = m_handle; - //REVIEW: there is a possibility of a race condition with Dispose() that could potentially call FutureDestroy(handle) at the same time (not verified) - if (handle != null && !handle.IsClosed && !handle.IsInvalid) FdbNative.FutureCancel(handle); - } - - protected override void ReleaseMemory() - { - var handle = m_handle; - //REVIEW: there is a possibility of a race condition with Dispose() that could potentially call FutureDestroy(handle) at the same time (not verified) - if (handle != null && !handle.IsClosed && !handle.IsInvalid) FdbNative.FutureReleaseMemory(handle); - } - - } - -} diff --git a/FoundationDB.Client/Native/FdbKeyValue.cs b/FoundationDB.Client/Native/FdbKeyValue.cs index 38c997f62..93d343eee 100644 --- a/FoundationDB.Client/Native/FdbKeyValue.cs +++ b/FoundationDB.Client/Native/FdbKeyValue.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/FoundationDB.Client/Native/FdbNative.cs b/FoundationDB.Client/Native/FdbNative.cs index 4a63179c3..0b9df3e49 100644 --- a/FoundationDB.Client/Native/FdbNative.cs +++ b/FoundationDB.Client/Native/FdbNative.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -27,34 +27,35 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY #endregion // enable this to help debug native calls to fdbc.dll -#undef DEBUG_NATIVE_CALLS - -using FoundationDB.Client.Utils; -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.IO; -using System.Runtime.CompilerServices; -using System.Runtime.ExceptionServices; -using System.Runtime.InteropServices; -using System.Text; +//#define DEBUG_NATIVE_CALLS namespace FoundationDB.Client.Native { + using System; + using System.Collections.Generic; + using System.IO; + using System.Runtime.CompilerServices; + using System.Runtime.ExceptionServices; + using System.Runtime.InteropServices; + using System.Text; + using System.Threading; + using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using FoundationDB.Client.Core; + internal static unsafe class FdbNative { public const int FDB_API_MIN_VERSION = 200; - public const int FDB_API_MAX_VERSION = 300; + public const int FDB_API_MAX_VERSION = 510; #if __MonoCS__ /// Name of the C API dll used for P/Invoking private const string FDB_C_DLL = "libfdb_c.so"; #else /// Name of the C API dll used for P/Invoking - private const string FDB_C_DLL = "fdb_c.dll"; + private const string FDB_C_DLL = "fdb_c"; #endif - /// Handle on the native FDB C API library private static readonly UnmanagedLibrary FdbCLib; @@ -97,7 +98,7 @@ internal static class NativeMethods // Cluster [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl, CharSet = CharSet.Ansi, BestFitMapping = false, ThrowOnUnmappableChar = true)] - public static extern FutureHandle fdb_create_cluster([MarshalAs(UnmanagedType.LPStr)] string clusterFilePath); + public static extern IntPtr fdb_create_cluster([MarshalAs(UnmanagedType.LPStr)] string clusterFilePath); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] public static extern void fdb_cluster_destroy(IntPtr cluster); @@ -106,7 +107,7 @@ internal static class NativeMethods public static extern FdbError fdb_cluster_set_option(ClusterHandle cluster, FdbClusterOption option, byte* value, int valueLength); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl, CharSet = CharSet.Ansi, BestFitMapping = false, ThrowOnUnmappableChar = true)] - public static extern FutureHandle fdb_cluster_create_database(ClusterHandle cluster, [MarshalAs(UnmanagedType.LPStr)] string dbName, int dbNameLength); + public static extern IntPtr fdb_cluster_create_database(ClusterHandle cluster, [MarshalAs(UnmanagedType.LPStr)] string dbName, int dbNameLength); // Database @@ -131,19 +132,19 @@ internal static class NativeMethods public static extern void fdb_transaction_set_read_version(TransactionHandle handle, long version); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern FutureHandle fdb_transaction_get_read_version(TransactionHandle transaction); + public static extern IntPtr fdb_transaction_get_read_version(TransactionHandle transaction); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern FutureHandle fdb_transaction_get(TransactionHandle transaction, byte* keyName, int keyNameLength, bool snapshot); + public static extern IntPtr fdb_transaction_get(TransactionHandle transaction, byte* keyName, int keyNameLength, bool snapshot); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern FutureHandle fdb_transaction_get_addresses_for_key(TransactionHandle transaction, byte* keyName, int keyNameLength); + public static extern IntPtr fdb_transaction_get_addresses_for_key(TransactionHandle transaction, byte* keyName, int keyNameLength); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern FutureHandle fdb_transaction_get_key(TransactionHandle transaction, byte* keyName, int keyNameLength, bool orEqual, int offset, bool snapshot); + public static extern IntPtr fdb_transaction_get_key(TransactionHandle transaction, byte* keyName, int keyNameLength, bool orEqual, int offset, bool snapshot); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern FutureHandle fdb_transaction_get_range( + public static extern IntPtr fdb_transaction_get_range( TransactionHandle transaction, byte* beginKeyName, int beginKeyNameLength, bool beginOrEqual, int beginOffset, byte* endKeyName, int endKeyNameLength, bool endOrEqual, int endOffset, @@ -167,16 +168,19 @@ public static extern void fdb_transaction_clear_range( public static extern void fdb_transaction_atomic_op(TransactionHandle transaction, byte* keyName, int keyNameLength, byte* param, int paramLength, FdbMutationType operationType); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern FutureHandle fdb_transaction_commit(TransactionHandle transaction); + public static extern IntPtr fdb_transaction_commit(TransactionHandle transaction); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] public static extern FdbError fdb_transaction_get_committed_version(TransactionHandle transaction, out long version); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern FutureHandle fdb_transaction_watch(TransactionHandle transaction, byte* keyName, int keyNameLength); + public static extern IntPtr fdb_transaction_get_versionstamp(TransactionHandle transaction); + + [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] + public static extern IntPtr fdb_transaction_watch(TransactionHandle transaction, byte* keyName, int keyNameLength); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern FutureHandle fdb_transaction_on_error(TransactionHandle transaction, FdbError error); + public static extern IntPtr fdb_transaction_on_error(TransactionHandle transaction, FdbError error); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] public static extern void fdb_transaction_reset(TransactionHandle transaction); @@ -193,84 +197,114 @@ public static extern void fdb_transaction_clear_range( public static extern void fdb_future_destroy(IntPtr future); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern void fdb_future_cancel(FutureHandle future); + public static extern void fdb_future_cancel(IntPtr future); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern void fdb_future_release_memory(FutureHandle future); + public static extern void fdb_future_release_memory(IntPtr future); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern FdbError fdb_future_block_until_ready(FutureHandle futureHandle); + public static extern FdbError fdb_future_block_until_ready(IntPtr futureHandle); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern bool fdb_future_is_ready(FutureHandle futureHandle); + public static extern bool fdb_future_is_ready(IntPtr futureHandle); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern FdbError fdb_future_get_error(FutureHandle futureHandle); + public static extern FdbError fdb_future_get_error(IntPtr futureHandle); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern FdbError fdb_future_set_callback(FutureHandle future, FdbFutureCallback callback, IntPtr callbackParameter); + public static extern FdbError fdb_future_set_callback(IntPtr future, FdbFutureCallback callback, IntPtr callbackParameter); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern FdbError fdb_future_get_version(FutureHandle future, out long version); + public static extern FdbError fdb_future_get_version(IntPtr future, out long version); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern FdbError fdb_future_get_key(FutureHandle future, out byte* key, out int keyLength); + public static extern FdbError fdb_future_get_key(IntPtr future, out byte* key, out int keyLength); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern FdbError fdb_future_get_cluster(FutureHandle future, out ClusterHandle cluster); + public static extern FdbError fdb_future_get_cluster(IntPtr future, out ClusterHandle cluster); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern FdbError fdb_future_get_database(FutureHandle future, out DatabaseHandle database); + public static extern FdbError fdb_future_get_database(IntPtr future, out DatabaseHandle database); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern FdbError fdb_future_get_value(FutureHandle future, out bool present, out byte* value, out int valueLength); + public static extern FdbError fdb_future_get_value(IntPtr future, out bool present, out byte* value, out int valueLength); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern FdbError fdb_future_get_string_array(FutureHandle future, out byte** strings, out int count); + public static extern FdbError fdb_future_get_string_array(IntPtr future, out byte** strings, out int count); [DllImport(FDB_C_DLL, CallingConvention = CallingConvention.Cdecl)] - public static extern FdbError fdb_future_get_keyvalue_array(FutureHandle future, out FdbKeyValue* kv, out int count, out bool more); + public static extern FdbError fdb_future_get_keyvalue_array(IntPtr future, out FdbKeyValue* kv, out int count, out bool more); } static FdbNative() { - // Impact of NativeLibPath: - // - If null, don't preload the library, and let the CLR find the file using the default P/Invoke behavior - // - If String.Empty, call win32 LoadLibrary("fdb_c.dll") and let the os find the file (using the standard OS behavior) - // - Else, combine the path with "fdb_c.dll" and call LoadLibrary with the resulting (relative or absolute) path + var libraryPath = GetPreloadPath(); - var libraryPath = Fdb.Options.NativeLibPath; - if (libraryPath != null) + if (libraryPath == null) + { // PInvoke will load + return; + } + + try { - try + FdbCLib = UnmanagedLibrary.Load(libraryPath); + } + catch (Exception e) + { + if (FdbCLib != null) FdbCLib.Dispose(); + FdbCLib = null; + if (e is BadImageFormatException && IntPtr.Size == 4) { - if (libraryPath.Length == 0) - { // CLR will handle the search - libraryPath = FDB_C_DLL; - } - else if (!libraryPath.EndsWith(".dll", StringComparison.OrdinalIgnoreCase)) - { // add the file name - libraryPath = Path.Combine(Fdb.Options.NativeLibPath, FDB_C_DLL); - } - - FdbCLib = UnmanagedLibrary.Load(libraryPath); + e = new InvalidOperationException("The native FDB client is 64-bit only, and cannot be loaded in a 32-bit process.", e); } - catch (Exception e) + else { - if (FdbCLib != null) FdbCLib.Dispose(); - FdbCLib = null; - if (e is BadImageFormatException && IntPtr.Size == 4) - { - e = new InvalidOperationException("The native FDB client is 64-bit only, and cannot be loaded in a 32-bit process.", e); - } - else - { - e = new InvalidOperationException("An error occurred while loading the native FoundationDB library", e); - } - LibraryLoadError = ExceptionDispatchInfo.Capture(e); + e = new InvalidOperationException($"An error occurred while loading the native FoundationDB library: '{libraryPath}'.", e); } + LibraryLoadError = ExceptionDispatchInfo.Capture(e); } + + } + + private static string GetPreloadPath() + { + // we need to provide sensible defaults for loading the native library + // if this method returns null we'll let PInvoke deal + // otherwise - use explicit platform-specific dll loading + var libraryPath = Fdb.Options.NativeLibPath; + + // on non-windows, library loading by convention just works. + // unless override is provided, just let PInvoke do the work + if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + { + if (string.IsNullOrEmpty(libraryPath)) + { + return null; + } + // otherwise just use the provided path + return libraryPath; + } + + // Impact of NativeLibPath on windows: + // - If null, don't preload the library, and let the CLR find the file using the default P/Invoke behavior + // - If String.Empty, call win32 LoadLibrary(FDB_C_DLL + ".dll") and let the os find the file (using the standard OS behavior) + // - If path is folder, append the FDB_C_DLL + var winDllWithExtension = FDB_C_DLL + ".dll"; + if (libraryPath == null) + { + return null; + } + if (libraryPath.Length == 0) + { + return winDllWithExtension; + } + var fileName = Path.GetFileName(libraryPath); + if (String.IsNullOrEmpty(fileName)) + { + libraryPath = Path.Combine(libraryPath, winDllWithExtension); + } + return libraryPath; } private static void EnsureLibraryIsLoaded() @@ -304,13 +338,13 @@ public static Slice ToNativeString(string value, bool nullTerminated) if (nullTerminated) { // NULL terminated ANSI string result = new byte[value.Length + 1]; - Slice.DefaultEncoding.GetBytes(value, 0, value.Length, result, 0); + Encoding.Default.GetBytes(value, 0, value.Length, result, 0); } else { - result = Slice.DefaultEncoding.GetBytes(value); + result = Encoding.Default.GetBytes(value); } - return new Slice(result, 0, result.Length); + return Slice.CreateUnsafe(result, 0, result.Length); } @@ -346,51 +380,54 @@ public static int GetMaxApiVersion() #region Futures... - public static bool FutureIsReady(FutureHandle futureHandle) + public static bool FutureIsReady(IntPtr futureHandle) { return NativeMethods.fdb_future_is_ready(futureHandle); } - public static void FutureDestroy(IntPtr futureHandle) + public static void FutureDestroy(IntPtr futureHandle, [CallerMemberName] string caller = null) { +#if DEBUG_FUTURES + Debug.WriteLine("Native.FutureDestroy(0x{0}) from {1}", (object)futureHandle.ToString("X"), caller); +#endif if (futureHandle != IntPtr.Zero) { NativeMethods.fdb_future_destroy(futureHandle); } } - public static void FutureCancel(FutureHandle futureHandle) + public static void FutureCancel(IntPtr futureHandle) { NativeMethods.fdb_future_cancel(futureHandle); } - public static void FutureReleaseMemory(FutureHandle futureHandle) + public static void FutureReleaseMemory(IntPtr futureHandle) { NativeMethods.fdb_future_release_memory(futureHandle); } - public static FdbError FutureGetError(FutureHandle future) + public static FdbError FutureGetError(IntPtr future) { return NativeMethods.fdb_future_get_error(future); } - public static FdbError FutureBlockUntilReady(FutureHandle future) + public static FdbError FutureBlockUntilReady(IntPtr future) { #if DEBUG_NATIVE_CALLS - Debug.WriteLine("calling fdb_future_block_until_ready(0x" + future.Handle.ToString("x") + ")..."); + Debug.WriteLine("calling fdb_future_block_until_ready(0x" + future.ToString("x") + ")..."); #endif var err = NativeMethods.fdb_future_block_until_ready(future); #if DEBUG_NATIVE_CALLS - Debug.WriteLine("fdb_future_block_until_ready(0x" + future.Handle.ToString("x") + ") => err=" + err); + Debug.WriteLine("fdb_future_block_until_ready(0x" + future.ToString("x") + ") => err=" + err); #endif return err; } - public static FdbError FutureSetCallback(FutureHandle future, FdbFutureCallback callback, IntPtr callbackParameter) + public static FdbError FutureSetCallback(IntPtr future, FdbFutureCallback callback, IntPtr callbackParameter) { var err = NativeMethods.fdb_future_set_callback(future, callback, callbackParameter); #if DEBUG_NATIVE_CALLS - Debug.WriteLine("fdb_future_set_callback(0x" + future.Handle.ToString("x") + ", 0x" + ptrCallback.ToString("x") + ") => err=" + err); + Debug.WriteLine("fdb_future_set_callback(0x" + future.ToString("x") + ", 0x" + callbackParameter.ToString("x") + ") => err=" + err); #endif return err; } @@ -427,12 +464,12 @@ public static FdbError StopNetwork() #region Clusters... - public static FutureHandle CreateCluster(string path) + public static IntPtr CreateCluster(string path) { var future = NativeMethods.fdb_create_cluster(path); - Contract.Assert(future != null); + Contract.Assert(future != IntPtr.Zero); #if DEBUG_NATIVE_CALLS - Debug.WriteLine("fdb_create_cluster(" + path + ") => 0x" + future.Handle.ToString("x")); + Debug.WriteLine("fdb_create_cluster(" + path + ") => 0x" + future.ToString("x")); #endif return future; @@ -451,11 +488,11 @@ public static FdbError ClusterSetOption(ClusterHandle cluster, FdbClusterOption return NativeMethods.fdb_cluster_set_option(cluster, option, value, valueLength); } - public static FdbError FutureGetCluster(FutureHandle future, out ClusterHandle cluster) + public static FdbError FutureGetCluster(IntPtr future, out ClusterHandle cluster) { var err = NativeMethods.fdb_future_get_cluster(future, out cluster); #if DEBUG_NATIVE_CALLS - Debug.WriteLine("fdb_future_get_cluster(0x" + future.Handle.ToString("x") + ") => err=" + err + ", handle=0x" + cluster.Handle.ToString("x")); + Debug.WriteLine("fdb_future_get_cluster(0x" + future.ToString("x") + ") => err=" + err + ", handle=0x" + cluster.Handle.ToString("x")); #endif //TODO: check if err == Success ? return err; @@ -465,11 +502,11 @@ public static FdbError FutureGetCluster(FutureHandle future, out ClusterHandle c #region Databases... - public static FdbError FutureGetDatabase(FutureHandle future, out DatabaseHandle database) + public static FdbError FutureGetDatabase(IntPtr future, out DatabaseHandle database) { var err = NativeMethods.fdb_future_get_database(future, out database); #if DEBUG_NATIVE_CALLS - Debug.WriteLine("fdb_future_get_database(0x" + future.Handle.ToString("x") + ") => err=" + err + ", handle=0x" + database.Handle.ToString("x")); + Debug.WriteLine("fdb_future_get_database(0x" + future.ToString("x") + ") => err=" + err + ", handle=0x" + database.Handle.ToString("x")); #endif //TODO: check if err == Success ? return err; @@ -488,10 +525,10 @@ public static void DatabaseDestroy(IntPtr handle) } } - public static FutureHandle ClusterCreateDatabase(ClusterHandle cluster, string name) + public static IntPtr ClusterCreateDatabase(ClusterHandle cluster, string name) { var future = NativeMethods.fdb_cluster_create_database(cluster, name, name == null ? 0 : name.Length); - Contract.Assert(future != null); + Contract.Assert(future != IntPtr.Zero); #if DEBUG_NATIVE_CALLS Debug.WriteLine("fdb_cluster_create_database(0x" + cluster.Handle.ToString("x") + ", name: '" + name + "') => 0x" + cluster.Handle.ToString("x")); #endif @@ -524,37 +561,47 @@ public static FdbError DatabaseCreateTransaction(DatabaseHandle database, out Tr return err; } - public static FutureHandle TransactionCommit(TransactionHandle transaction) + public static IntPtr TransactionCommit(TransactionHandle transaction) { var future = NativeMethods.fdb_transaction_commit(transaction); + Contract.Assert(future != IntPtr.Zero); +#if DEBUG_NATIVE_CALLS + Debug.WriteLine("fdb_transaction_commit(0x" + transaction.Handle.ToString("x") + ") => 0x" + future.ToString("x")); +#endif + return future; + } + + public static IntPtr TransactionGetVersionStamp(TransactionHandle transaction) + { + var future = NativeMethods.fdb_transaction_get_versionstamp(transaction); Contract.Assert(future != null); #if DEBUG_NATIVE_CALLS - Debug.WriteLine("fdb_transaction_commit(0x" + transaction.Handle.ToString("x") + ") => 0x" + future.Handle.ToString("x")); + Debug.WriteLine("fdb_transaction_get_versionstamp(0x" + transaction.Handle.ToString("x") + ") => 0x" + future.Handle.ToString("x")); #endif return future; } - public static FutureHandle TransactionWatch(TransactionHandle transaction, Slice key) + public static IntPtr TransactionWatch(TransactionHandle transaction, Slice key) { if (key.IsNullOrEmpty) throw new ArgumentException("Key cannot be null or empty", "key"); fixed (byte* ptrKey = key.Array) { var future = NativeMethods.fdb_transaction_watch(transaction, ptrKey + key.Offset, key.Count); - Contract.Assert(future != null); + Contract.Assert(future != IntPtr.Zero); #if DEBUG_NATIVE_CALLS - Debug.WriteLine("fdb_transaction_watch(0x" + transaction.Handle.ToString("x") + ", key: '" + FdbKey.Dump(key) + "') => 0x" + future.Handle.ToString("x")); + Debug.WriteLine("fdb_transaction_watch(0x" + transaction.Handle.ToString("x") + ", key: '" + FdbKey.Dump(key) + "') => 0x" + future.ToString("x")); #endif return future; } } - public static FutureHandle TransactionOnError(TransactionHandle transaction, FdbError errorCode) + public static IntPtr TransactionOnError(TransactionHandle transaction, FdbError errorCode) { var future = NativeMethods.fdb_transaction_on_error(transaction, errorCode); - Contract.Assert(future != null); + Contract.Assert(future != IntPtr.Zero); #if DEBUG_NATIVE_CALLS - Debug.WriteLine("fdb_transaction_on_error(0x" + transaction.Handle.ToString("x") + ", " + errorCode + ") => 0x" + future.Handle.ToString("x")); + Debug.WriteLine("fdb_transaction_on_error(0x" + transaction.Handle.ToString("x") + ", " + errorCode + ") => 0x" + future.ToString("x")); #endif return future; } @@ -583,12 +630,12 @@ public static void TransactionSetReadVersion(TransactionHandle transaction, long NativeMethods.fdb_transaction_set_read_version(transaction, version); } - public static FutureHandle TransactionGetReadVersion(TransactionHandle transaction) + public static IntPtr TransactionGetReadVersion(TransactionHandle transaction) { var future = NativeMethods.fdb_transaction_get_read_version(transaction); - Contract.Assert(future != null); + Contract.Assert(future != IntPtr.Zero); #if DEBUG_NATIVE_CALLS - Debug.WriteLine("fdb_transaction_get_read_version(0x" + transaction.Handle.ToString("x") + ") => 0x" + future.Handle.ToString("x")); + Debug.WriteLine("fdb_transaction_get_read_version(0x" + transaction.Handle.ToString("x") + ") => 0x" + future.ToString("x")); #endif return future; } @@ -601,15 +648,15 @@ public static FdbError TransactionGetCommittedVersion(TransactionHandle transact return NativeMethods.fdb_transaction_get_committed_version(transaction, out version); } - public static FdbError FutureGetVersion(FutureHandle future, out long version) + public static FdbError FutureGetVersion(IntPtr future, out long version) { #if DEBUG_NATIVE_CALLS - Debug.WriteLine("fdb_future_get_version(0x" + future.Handle.ToString("x") + ")"); + Debug.WriteLine("fdb_future_get_version(0x" + future.ToString("x") + ")"); #endif return NativeMethods.fdb_future_get_version(future, out version); } - public static FutureHandle TransactionGet(TransactionHandle transaction, Slice key, bool snapshot) + public static IntPtr TransactionGet(TransactionHandle transaction, Slice key, bool snapshot) { if (key.IsNull) throw new ArgumentException("Key cannot be null", "key"); @@ -619,15 +666,15 @@ public static FutureHandle TransactionGet(TransactionHandle transaction, Slice k fixed (byte* ptrKey = key.Array) { var future = NativeMethods.fdb_transaction_get(transaction, ptrKey + key.Offset, key.Count, snapshot); - Contract.Assert(future != null); + Contract.Assert(future != IntPtr.Zero); #if DEBUG_NATIVE_CALLS - Debug.WriteLine("fdb_transaction_get(0x" + transaction.Handle.ToString("x") + ", key: '" + FdbKey.Dump(key) + "', snapshot: " + snapshot + ") => 0x" + future.Handle.ToString("x")); + Debug.WriteLine("fdb_transaction_get(0x" + transaction.Handle.ToString("x") + ", key: '" + FdbKey.Dump(key) + "', snapshot: " + snapshot + ") => 0x" + future.ToString("x")); #endif return future; } } - public static FutureHandle TransactionGetRange(TransactionHandle transaction, FdbKeySelector begin, FdbKeySelector end, int limit, int targetBytes, FdbStreamingMode mode, int iteration, bool snapshot, bool reverse) + public static IntPtr TransactionGetRange(TransactionHandle transaction, KeySelector begin, KeySelector end, int limit, int targetBytes, FdbStreamingMode mode, int iteration, bool snapshot, bool reverse) { fixed (byte* ptrBegin = begin.Key.Array) fixed (byte* ptrEnd = end.Key.Array) @@ -637,57 +684,57 @@ public static FutureHandle TransactionGetRange(TransactionHandle transaction, Fd ptrBegin + begin.Key.Offset, begin.Key.Count, begin.OrEqual, begin.Offset, ptrEnd + end.Key.Offset, end.Key.Count, end.OrEqual, end.Offset, limit, targetBytes, mode, iteration, snapshot, reverse); - Contract.Assert(future != null); + Contract.Assert(future != IntPtr.Zero); #if DEBUG_NATIVE_CALLS - Debug.WriteLine("fdb_transaction_get_range(0x" + transaction.Handle.ToString("x") + ", begin: " + begin.PrettyPrint(FdbKey.PrettyPrintMode.Begin) + ", end: " + end.PrettyPrint(FdbKey.PrettyPrintMode.End) + ", " + snapshot + ") => 0x" + future.Handle.ToString("x")); + Debug.WriteLine("fdb_transaction_get_range(0x" + transaction.Handle.ToString("x") + ", begin: " + begin.PrettyPrint(FdbKey.PrettyPrintMode.Begin) + ", end: " + end.PrettyPrint(FdbKey.PrettyPrintMode.End) + ", " + snapshot + ") => 0x" + future.ToString("x")); #endif return future; } } - public static FutureHandle TransactionGetKey(TransactionHandle transaction, FdbKeySelector selector, bool snapshot) + public static IntPtr TransactionGetKey(TransactionHandle transaction, KeySelector selector, bool snapshot) { if (selector.Key.IsNull) throw new ArgumentException("Key cannot be null", "selector"); fixed (byte* ptrKey = selector.Key.Array) { var future = NativeMethods.fdb_transaction_get_key(transaction, ptrKey + selector.Key.Offset, selector.Key.Count, selector.OrEqual, selector.Offset, snapshot); - Contract.Assert(future != null); + Contract.Assert(future != IntPtr.Zero); #if DEBUG_NATIVE_CALLS - Debug.WriteLine("fdb_transaction_get_key(0x" + transaction.Handle.ToString("x") + ", " + selector.ToString() + ", " + snapshot + ") => 0x" + future.Handle.ToString("x")); + Debug.WriteLine("fdb_transaction_get_key(0x" + transaction.Handle.ToString("x") + ", " + selector.ToString() + ", " + snapshot + ") => 0x" + future.ToString("x")); #endif return future; } } - public static FutureHandle TransactionGetAddressesForKey(TransactionHandle transaction, Slice key) + public static IntPtr TransactionGetAddressesForKey(TransactionHandle transaction, Slice key) { if (key.IsNullOrEmpty) throw new ArgumentException("Key cannot be null or empty", "key"); fixed (byte* ptrKey = key.Array) { var future = NativeMethods.fdb_transaction_get_addresses_for_key(transaction, ptrKey + key.Offset, key.Count); - Contract.Assert(future != null); + Contract.Assert(future != IntPtr.Zero); #if DEBUG_NATIVE_CALLS - Debug.WriteLine("fdb_transaction_get_addresses_for_key(0x" + transaction.Handle.ToString("x") + ", key: '" + FdbKey.Dump(key) + "') => 0x" + future.Handle.ToString("x")); + Debug.WriteLine("fdb_transaction_get_addresses_for_key(0x" + transaction.Handle.ToString("x") + ", key: '" + FdbKey.Dump(key) + "') => 0x" + future.ToString("x")); #endif return future; } } - public static FdbError FutureGetValue(FutureHandle future, out bool valuePresent, out Slice value) + public static FdbError FutureGetValue(IntPtr future, out bool valuePresent, out Slice value) { byte* ptr; int valueLength; var err = NativeMethods.fdb_future_get_value(future, out valuePresent, out ptr, out valueLength); #if DEBUG_NATIVE_CALLS - Debug.WriteLine("fdb_future_get_value(0x" + future.Handle.ToString("x") + ") => err=" + err + ", present=" + valuePresent + ", valueLength=" + valueLength); + Debug.WriteLine("fdb_future_get_value(0x" + future.ToString("x") + ") => err=" + err + ", present=" + valuePresent + ", valueLength=" + valueLength); #endif if (ptr != null && valueLength >= 0) { var bytes = new byte[valueLength]; Marshal.Copy(new IntPtr(ptr), bytes, 0, valueLength); - value = new Slice(bytes, 0, valueLength); + value = Slice.CreateUnsafe(bytes, 0, valueLength); } else { @@ -696,13 +743,13 @@ public static FdbError FutureGetValue(FutureHandle future, out bool valuePresent return err; } - public static FdbError FutureGetKey(FutureHandle future, out Slice key) + public static FdbError FutureGetKey(IntPtr future, out Slice key) { byte* ptr; int keyLength; var err = NativeMethods.fdb_future_get_key(future, out ptr, out keyLength); #if DEBUG_NATIVE_CALLS - Debug.WriteLine("fdb_future_get_key(0x" + future.Handle.ToString("x") + ") => err=" + err + ", keyLength=" + keyLength); + Debug.WriteLine("fdb_future_get_key(0x" + future.ToString("x") + ") => err=" + err + ", keyLength=" + keyLength); #endif // note: fdb_future_get_key is allowed to return NULL for the empty key (not to be confused with a key that has an empty value) @@ -714,12 +761,12 @@ public static FdbError FutureGetKey(FutureHandle future, out Slice key) } else { - key = Slice.Create(ptr, keyLength); + key = Slice.Copy(ptr, keyLength); } return err; } - public static FdbError FutureGetKeyValueArray(FutureHandle future, out KeyValuePair[] result, out bool more) + public static FdbError FutureGetKeyValueArray(IntPtr future, out KeyValuePair[] result, out bool more) { result = null; @@ -728,7 +775,7 @@ public static FdbError FutureGetKeyValueArray(FutureHandle future, out KeyValueP var err = NativeMethods.fdb_future_get_keyvalue_array(future, out kvp, out count, out more); #if DEBUG_NATIVE_CALLS - Debug.WriteLine("fdb_future_get_keyvalue_array(0x" + future.Handle.ToString("x") + ") => err=" + err + ", count=" + count + ", more=" + more); + Debug.WriteLine("fdb_future_get_keyvalue_array(0x" + future.ToString("x") + ") => err=" + err + ", count=" + count + ", more=" + more); #endif if (Fdb.Success(err)) @@ -776,8 +823,8 @@ public static FdbError FutureGetKeyValueArray(FutureHandle future, out KeyValueP Marshal.Copy(kvp[i].Value, page, p + kl, vl); result[i] = new KeyValuePair( - new Slice(page, p, kl), - new Slice(page, p + kl, vl) + page.AsSlice(p, kl), + page.AsSlice(p + kl, vl) ); p += kl + vl; @@ -789,7 +836,7 @@ public static FdbError FutureGetKeyValueArray(FutureHandle future, out KeyValueP return err; } - public static FdbError FutureGetStringArray(FutureHandle future, out string[] result) + public static FdbError FutureGetStringArray(IntPtr future, out string[] result) { result = null; @@ -798,7 +845,7 @@ public static FdbError FutureGetStringArray(FutureHandle future, out string[] re var err = NativeMethods.fdb_future_get_string_array(future, out strings, out count); #if DEBUG_NATIVE_CALLS - Debug.WriteLine("fdb_future_get_string_array(0x" + future.Handle.ToString("x") + ") => err=" + err + ", count=" + count); + Debug.WriteLine("fdb_future_get_string_array(0x" + future.ToString("x") + ") => err=" + err + ", count=" + count); #endif if (Fdb.Success(err)) @@ -825,6 +872,25 @@ public static FdbError FutureGetStringArray(FutureHandle future, out string[] re return err; } + public static FdbError FutureGetVersionStamp(IntPtr future, out VersionStamp stamp) + { + byte* ptr; + int keyLength; + var err = NativeMethods.fdb_future_get_key(future, out ptr, out keyLength); +#if DEBUG_NATIVE_CALLS + Debug.WriteLine("fdb_future_get_key(0x" + future.Handle.ToString("x") + ") => err=" + err + ", keyLength=" + keyLength); +#endif + + if (keyLength != 10 || ptr == null) + { + stamp = default; + return err; + } + + VersionStamp.ReadUnsafe(ptr, 10, out stamp); + return err; + } + public static void TransactionSet(TransactionHandle transaction, Slice key, Slice value) { fixed (byte* pKey = key.Array) @@ -886,6 +952,129 @@ public static FdbError TransactionAddConflictRange(TransactionHandle transaction #endregion + #region Global Future Context... + + internal static readonly GlobalNativeContext GlobalContext = new GlobalNativeContext(); + + internal sealed class GlobalNativeContext : FdbFutureContext + { + + public Task CreateClusterAsync(string clusterFile, CancellationToken ct) + { + return RunAsync( + (arg) => FdbNative.CreateCluster((string)arg), + clusterFile, + (h, _) => + { + ClusterHandle cluster; + var err = FdbNative.FutureGetCluster(h, out cluster); + if (err != FdbError.Success) + { + cluster.Dispose(); + throw Fdb.MapToException(err); + } + var handler = new FdbNativeCluster(cluster); + return (IFdbClusterHandler)handler; + }, + null, //unused + ct + ); + } + + public void WatchKeyAsync(ref FdbWatch watch, TransactionHandle handle, Slice key, CancellationToken ct) + { + throw new NotImplementedException(); +#if false + IntPtr h = IntPtr.Zero; + bool mustDispose = true; + try + { + IntPtr cookie = IntPtr.Zero; //TODO!! + + RuntimeHelpers.PrepareConstrainedRegions(); + try + { } + finally + { + h = FdbNative.TransactionWatch(handle, key); + } + Contract.Assert(h != IntPtr.Zero); + if (h == IntPtr.Zero) throw new InvalidOperationException("FIXME: failed to create a watch handle");//TODO: message? + + var f = new FdbWatchFuture(key, cookie, "WatchKeyAsync", null); + watch = new FdbWatch(f, key, Slice.Nil); + + if (FdbNative.FutureIsReady(h)) + { + f.OnReady(); + mustDispose = false; + return; + } + } + finally + { + if (mustDispose && h != IntPtr.Zero) + { + FdbNative.FutureDestroy(h); + } + } +#endif + } + + internal sealed class FdbWatchFuture : FdbFuture + { + private IntPtr m_handle; + + private readonly object m_lock = new object(); + + public FdbWatchFuture(Slice key, IntPtr cookie, string label, object state) + : base(cookie, label, state) + { + this.Key = key; + } + + public Slice Key { get; private set; } + + public override bool Visit(IntPtr handle) + { + Contract.Assert(handle == m_handle || m_handle == IntPtr.Zero); + return true; + } + + protected override void OnCancel() + { + throw new NotImplementedException(); + } + + public override void OnReady() + { + IntPtr handle = IntPtr.Zero; + try + { + handle = Interlocked.Exchange(ref m_handle, IntPtr.Zero); + if (handle == IntPtr.Zero) return; + + var err = FdbNative.FutureGetError(m_handle); + + if (err == FdbError.Success) + { + PublishResult(this.Key); + } + else + { + PublishError(null, err); + } + } + finally + { + if (handle != IntPtr.Zero) FdbNative.FutureDestroy(handle); + } + } + } + + } + +#endregion } } diff --git a/FoundationDB.Client/Native/FdbNativeCluster.cs b/FoundationDB.Client/Native/FdbNativeCluster.cs index 1a8831418..9570d3a7e 100644 --- a/FoundationDB.Client/Native/FdbNativeCluster.cs +++ b/FoundationDB.Client/Native/FdbNativeCluster.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,47 +28,29 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Client.Native { - using FoundationDB.Async; - using FoundationDB.Client.Core; - using FoundationDB.Client.Utils; using System; using System.Diagnostics; + using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using FoundationDB.Client.Core; /// Wraps a native FDBCluster* handle - internal sealed class FdbNativeCluster : IFdbClusterHandler + internal sealed class FdbNativeCluster : FdbFutureContext, IFdbClusterHandler { - private readonly ClusterHandle m_handle; + //private readonly ClusterHandle m_handle; public FdbNativeCluster(ClusterHandle handle) + : base(handle) { - Contract.Requires(handle != null); - m_handle = handle; } - public static Task CreateClusterAsync(string clusterFile, CancellationToken cancellationToken) + public static Task CreateClusterAsync(string clusterFile, CancellationToken ct) { - var future = FdbNative.CreateCluster(clusterFile); - return FdbFuture.CreateTaskFromHandle(future, - (h) => - { - ClusterHandle cluster; - var err = FdbNative.FutureGetCluster(h, out cluster); - if (err != FdbError.Success) - { - cluster.Dispose(); - throw Fdb.MapToException(err); - } - var handler = new FdbNativeCluster(cluster); - return (IFdbClusterHandler) handler; - }, - cancellationToken - ); + return FdbNative.GlobalContext.CreateClusterAsync(clusterFile, ct); } - internal ClusterHandle Handle { get { return m_handle; } } - public bool IsInvalid { get { return m_handle.IsInvalid; } } public bool IsClosed { get { return m_handle.IsClosed; } } @@ -93,14 +75,14 @@ public void SetOption(FdbClusterOption option, Slice data) } } - public Task OpenDatabaseAsync(string databaseName, CancellationToken cancellationToken) + public Task OpenDatabaseAsync(string databaseName, CancellationToken ct) { - if (cancellationToken.IsCancellationRequested) return TaskHelpers.FromCancellation(cancellationToken); + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); - var future = FdbNative.ClusterCreateDatabase(m_handle, databaseName); - return FdbFuture.CreateTaskFromHandle( - future, - (h) => + return RunAsync( + (handle, state) => FdbNative.ClusterCreateDatabase(handle, state), + databaseName, + (h, state) => { DatabaseHandle database; var err = FdbNative.FutureGetDatabase(h, out database); @@ -109,18 +91,14 @@ public Task OpenDatabaseAsync(string databaseName, Cancella database.Dispose(); throw Fdb.MapToException(err); } - var handler = new FdbNativeDatabase(database); - return (IFdbDatabaseHandler) handler; + var handler = new FdbNativeDatabase(database, (string)state); + return (IFdbDatabaseHandler) handler; }, - cancellationToken + databaseName, + ct ); } - public void Dispose() - { - if (m_handle != null) m_handle.Dispose(); - } - } diff --git a/FoundationDB.Client/Native/FdbNativeDatabase.cs b/FoundationDB.Client/Native/FdbNativeDatabase.cs index 2ba7e47c5..37c9ecec4 100644 --- a/FoundationDB.Client/Native/FdbNativeDatabase.cs +++ b/FoundationDB.Client/Native/FdbNativeDatabase.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -27,7 +27,7 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY #endregion // enable this to capture the stacktrace of the ctor, when troubleshooting leaked database handles -#undef CAPTURE_STACKTRACES +//#define CAPTURE_STACKTRACES namespace FoundationDB.Client.Native { @@ -37,43 +37,20 @@ namespace FoundationDB.Client.Native /// Wraps a native FDBDatabase* handle [DebuggerDisplay("Handle={m_handle}, Closed={m_handle.IsClosed}")] - internal sealed class FdbNativeDatabase : IFdbDatabaseHandler + internal sealed class FdbNativeDatabase : FdbFutureContext, IFdbDatabaseHandler { - /// Handle that wraps the native FDB_DATABASE* - private readonly DatabaseHandle m_handle; - -#if CAPTURE_STACKTRACES - private readonly StackTrace m_stackTrace; -#endif - - public FdbNativeDatabase(DatabaseHandle handle) - { - if (handle == null) throw new ArgumentNullException("handle"); - - m_handle = handle; -#if CAPTURE_STACKTRACES - m_stackTrace = new StackTrace(); -#endif - } - - //REVIEW: do we really need a destructor ? The handle is a SafeHandle, and will take care of itself... - ~FdbNativeDatabase() + public FdbNativeDatabase(DatabaseHandle handle, string name) + : base(handle) { -#if CAPTURE_STACKTRACES - Trace.WriteLine("A database handle (" + m_handle + ") was leaked by " + m_stackTrace); -#endif -#if DEBUG - // If you break here, that means that a native database handler was leaked by a FdbDatabase instance (or that the database instance was leaked) - if (Debugger.IsAttached) Debugger.Break(); -#endif - Dispose(false); + this.Name = name; } - public bool IsInvalid { get { return m_handle.IsInvalid; } } public bool IsClosed { get { return m_handle.IsClosed; } } + public string Name { get; private set; } + public void SetOption(FdbDatabaseOption option, Slice data) { Fdb.EnsureNotOnNetworkThread(); @@ -113,19 +90,6 @@ public IFdbTransactionHandler CreateTransaction(FdbOperationContext context) } } - public void Dispose() - { - Dispose(true); - GC.SuppressFinalize(this); - } - - private void Dispose(bool disposing) - { - if (disposing) - { - if (m_handle != null) m_handle.Dispose(); - } - } } } diff --git a/FoundationDB.Client/Native/FdbNativeTransaction.cs b/FoundationDB.Client/Native/FdbNativeTransaction.cs index 63317a1b1..4d2b35cdf 100644 --- a/FoundationDB.Client/Native/FdbNativeTransaction.cs +++ b/FoundationDB.Client/Native/FdbNativeTransaction.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -27,67 +27,42 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY #endregion // enable this to help debug Transactions -#undef DEBUG_TRANSACTIONS +//#define DEBUG_TRANSACTIONS // enable this to capture the stacktrace of the ctor, when troubleshooting leaked transaction handles -#undef CAPTURE_STACKTRACES +//#define CAPTURE_STACKTRACES namespace FoundationDB.Client.Native { - using FoundationDB.Client.Core; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.Diagnostics; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using FoundationDB.Client.Core; + using JetBrains.Annotations; /// Wraps a native FDB_TRANSACTION handle [DebuggerDisplay("Handle={m_handle}, Size={m_payloadBytes}, Closed={m_handle.IsClosed}")] - internal class FdbNativeTransaction : IFdbTransactionHandler + internal class FdbNativeTransaction : FdbFutureContext, IFdbTransactionHandler { + private readonly FdbNativeDatabase m_database; - /// FDB_TRANSACTION* handle - private readonly TransactionHandle m_handle; /// Estimated current size of the transaction private int m_payloadBytes; -#if CAPTURE_STACKTRACES - private StackTrace m_stackTrace; -#endif - - public FdbNativeTransaction(FdbNativeDatabase db, TransactionHandle handle) + public FdbNativeTransaction([NotNull] FdbNativeDatabase db, [NotNull] TransactionHandle handle) + : base(handle) { if (db == null) throw new ArgumentNullException("db"); - if (handle == null) throw new ArgumentNullException("handle"); m_database = db; - m_handle = handle; -#if CAPTURE_STACKTRACES - m_stackTrace = new StackTrace(); -#endif - } - - //REVIEW: do we really need a destructor ? The handle is a SafeHandle, and will take care of itself... - ~FdbNativeTransaction() - { -#if CAPTURE_STACKTRACES - Trace.WriteLine("A transaction handle (" + m_handle + ", " + m_payloadBytes + " bytes written) was leaked by " + m_stackTrace); -#endif -#if DEBUG - // If you break here, that means that a native transaction handler was leaked by a FdbTransaction instance (or that the transaction instance was leaked) - if (Debugger.IsAttached) Debugger.Break(); -#endif - Dispose(false); } #region Properties... public bool IsClosed { get { return m_handle.IsClosed; } } - /// Native FDB_TRANSACTION* handle - public TransactionHandle Handle { get { return m_handle; } } - /// Database handler that owns this transaction public FdbNativeDatabase Database { get { return m_database; } } @@ -131,21 +106,23 @@ public void SetOption(FdbTransactionOption option, Slice data) #region Reading... - public Task GetReadVersionAsync(CancellationToken cancellationToken) + public Task GetReadVersionAsync(CancellationToken ct) { - var future = FdbNative.TransactionGetReadVersion(m_handle); - return FdbFuture.CreateTaskFromHandle(future, - (h) => + return RunAsync( + (handle, state) => FdbNative.TransactionGetReadVersion(handle), + default(object), + (future, state) => { long version; - var err = FdbNative.FutureGetVersion(h, out version); + var err = FdbNative.FutureGetVersion(future, out version); #if DEBUG_TRANSACTIONS Debug.WriteLine("FdbTransaction[" + m_id + "].GetReadVersion() => err=" + err + ", version=" + version); #endif Fdb.DieOnError(err); return version; }, - cancellationToken + default(object), + ct ); } @@ -154,10 +131,8 @@ public void SetReadVersion(long version) FdbNative.TransactionSetReadVersion(m_handle, version); } - private static bool TryGetValueResult(FutureHandle h, out Slice result) + private static bool TryGetValueResult(IntPtr h, out Slice result) { - Contract.Requires(h != null); - bool present; var err = FdbNative.FutureGetValue(h, out present, out result); #if DEBUG_TRANSACTIONS @@ -167,48 +142,47 @@ private static bool TryGetValueResult(FutureHandle h, out Slice result) return present; } - private static Slice GetValueResultBytes(FutureHandle h) + private static Slice GetValueResultBytes(IntPtr h) { - Contract.Requires(h != null); - Slice result; - if (!TryGetValueResult(h, out result)) - { - return Slice.Nil; - } - return result; + return !TryGetValueResult(h, out result) ? Slice.Nil : result; } - public Task GetAsync(Slice key, bool snapshot, CancellationToken cancellationToken) + public Task GetAsync(Slice key, bool snapshot, CancellationToken ct) { - var future = FdbNative.TransactionGet(m_handle, key, snapshot); - return FdbFuture.CreateTaskFromHandle(future, (h) => GetValueResultBytes(h), cancellationToken); + return RunAsync( + (handle, state) => FdbNative.TransactionGet(handle, state.Item1, state.Item2), + (key, snapshot), + (future, state) => GetValueResultBytes(future), + null, + ct + ); } - public Task GetValuesAsync(Slice[] keys, bool snapshot, CancellationToken cancellationToken) + public Task GetValuesAsync(Slice[] keys, bool snapshot, CancellationToken ct) { Contract.Requires(keys != null); - if (keys.Length == 0) return Task.FromResult(Slice.EmptySliceArray); + if (keys.Length == 0) return Task.FromResult(Array.Empty()); - var futures = new FutureHandle[keys.Length]; - try - { - for (int i = 0; i < keys.Length; i++) - { - futures[i] = FdbNative.TransactionGet(m_handle, keys[i], snapshot); - } - } - catch - { - for (int i = 0; i < keys.Length; i++) + return RunAsync( + keys.Length, + (handle, state, futures) => { - if (futures[i] == null) break; - futures[i].Dispose(); - } - throw; - } - return FdbFuture.CreateTaskFromHandleArray(futures, (h) => GetValueResultBytes(h), cancellationToken); + var _keys = state.Item1; + var _snapshot = state.Item2; + for (int i = 0; i < _keys.Length; i++) + { + var h = FdbNative.TransactionGet(handle, _keys[i], _snapshot); + if (h == IntPtr.Zero) throw new FdbException(FdbError.OperationFailed); + futures[i] = h; + } + }, + (keys, snapshot), + (future, state) => GetValueResultBytes(future), + default(object), //TODO: buffer for the slices + ct + ); } /// Extract a chunk of result from a completed Future @@ -216,7 +190,7 @@ public Task GetValuesAsync(Slice[] keys, bool snapshot, CancellationTok /// Receives true if there are more result, or false if all results have been transmited /// Array of key/value pairs, or an exception [NotNull] - private static KeyValuePair[] GetKeyValueArrayResult(FutureHandle h, out bool more) + private static KeyValuePair[] GetKeyValueArrayResult(IntPtr h, out bool more) { KeyValuePair[] result; var err = FdbNative.FutureGetKeyValueArray(h, out result, out more); @@ -228,33 +202,31 @@ private static KeyValuePair[] GetKeyValueArrayResult(FutureHandle /// Asynchronously fetch a new page of results /// True if Chunk contains a new page of results. False if all results have been read. - public Task GetRangeAsync(FdbKeySelector begin, FdbKeySelector end, FdbRangeOptions options, int iteration, bool snapshot, CancellationToken cancellationToken) + public Task GetRangeAsync(KeySelector begin, KeySelector end, FdbRangeOptions options, int iteration, bool snapshot, CancellationToken ct) { Contract.Requires(options != null); bool reversed = options.Reverse ?? false; - var future = FdbNative.TransactionGetRange(m_handle, begin, end, options.Limit ?? 0, options.TargetBytes ?? 0, options.Mode ?? FdbStreamingMode.Iterator, iteration, snapshot, reversed); - return FdbFuture.CreateTaskFromHandle( - future, - (h) => + + return RunAsync( + (handle, _) => FdbNative.TransactionGetRange(handle, begin, end, options.Limit ?? 0, options.TargetBytes ?? 0, options.Mode ?? FdbStreamingMode.Iterator, iteration, snapshot, reversed), + default(object), //TODO: pass options & co? + (future, state) => { // TODO: quietly return if disposed - bool hasMore; - var chunk = GetKeyValueArrayResult(h, out hasMore); + var chunk = GetKeyValueArrayResult(future, out bool hasMore); return new FdbRangeChunk(hasMore, chunk, iteration, reversed); }, - cancellationToken + default(object), //TODO: pass options & co? + ct ); } - private static Slice GetKeyResult(FutureHandle h) + private static Slice GetKeyResult(IntPtr h) { - Contract.Requires(h != null); - - Slice result; - var err = FdbNative.FutureGetKey(h, out result); + var err = FdbNative.FutureGetKey(h, out Slice result); #if DEBUG_TRANSACTIONS Debug.WriteLine("FdbTransaction[].GetKeyResult() => err=" + err + ", result=" + result.ToString()); #endif @@ -262,39 +234,41 @@ private static Slice GetKeyResult(FutureHandle h) return result; } - public Task GetKeyAsync(FdbKeySelector selector, bool snapshot, CancellationToken cancellationToken) + public Task GetKeyAsync(KeySelector selector, bool snapshot, CancellationToken ct) { - var future = FdbNative.TransactionGetKey(m_handle, selector, snapshot); - return FdbFuture.CreateTaskFromHandle( - future, - (h) => GetKeyResult(h), - cancellationToken + return RunAsync( + (handle, state) => FdbNative.TransactionGetKey(handle, state.Selector, state.Snapshot), + (Selector: selector, Snapshot: snapshot), + (future, state) => GetKeyResult(future), + default(object), + ct ); } - public Task GetKeysAsync(FdbKeySelector[] selectors, bool snapshot, CancellationToken cancellationToken) + public Task GetKeysAsync(KeySelector[] selectors, bool snapshot, CancellationToken ct) { Contract.Requires(selectors != null); - var futures = new FutureHandle[selectors.Length]; - try - { - for (int i = 0; i < selectors.Length; i++) - { - futures[i] = FdbNative.TransactionGetKey(m_handle, selectors[i], snapshot); - } - } - catch - { - for (int i = 0; i < selectors.Length; i++) - { - if (futures[i] == null) break; - futures[i].Dispose(); - } - throw; - } - return FdbFuture.CreateTaskFromHandleArray(futures, (h) => GetKeyResult(h), cancellationToken); + if (selectors.Length == 0) return Task.FromResult(Array.Empty()); //REVIEW: PERF: maybe we could cache the emtpy array task? + return RunAsync( + selectors.Length, + (handle, state, futures) => + { + var _selectors = state.Selectors; + var _snapshot = state.Snapshot; + for (int i = 0; i < _selectors.Length; i++) + { + var h = FdbNative.TransactionGetKey(handle, _selectors[i], _snapshot); + if (h == IntPtr.Zero) throw new FdbException(FdbError.OperationFailed); + futures[i] = h; + } + }, + (Selectors: selectors, Snapshot: snapshot), + (future, state) => GetKeyResult(future), + default(object), //TODO: buffer for the slices + ct + ); } #endregion @@ -340,10 +314,8 @@ public void AddConflictRange(Slice beginKeyInclusive, Slice endKeyExclusive, Fdb } [NotNull] - private static string[] GetStringArrayResult(FutureHandle h) + private static string[] GetStringArrayResult(IntPtr h) { - Contract.Requires(h != null); - string[] result; var err = FdbNative.FutureGetStringArray(h, out result); #if DEBUG_TRANSACTIONS @@ -354,13 +326,14 @@ private static string[] GetStringArrayResult(FutureHandle h) return result; } - public Task GetAddressesForKeyAsync(Slice key, CancellationToken cancellationToken) + public Task GetAddressesForKeyAsync(Slice key, CancellationToken ct) { - var future = FdbNative.TransactionGetAddressesForKey(m_handle, key); - return FdbFuture.CreateTaskFromHandle( - future, - (h) => GetStringArrayResult(h), - cancellationToken + return RunAsync( + (handle, state) => FdbNative.TransactionGetAddressesForKey(handle, state), + key, + (future, state) => GetStringArrayResult(future), + default(object), + ct ); } @@ -368,14 +341,14 @@ public Task GetAddressesForKeyAsync(Slice key, CancellationToken cance #region Watches... - public FdbWatch Watch(Slice key, CancellationToken cancellationToken) + public FdbWatch Watch(Slice key, CancellationToken ct) { - var future = FdbNative.TransactionWatch(m_handle, key); - return new FdbWatch( - FdbFuture.FromHandle(future, (h) => key, cancellationToken), - key, - Slice.Nil - ); + // a Watch will outlive the transaction, so we can attach it to the current FutureContext (which will be disposed once the transaction goes away) + // => we will store at them to the GlobalContext + + + + throw new NotImplementedException("FIXME: Future refactoring in progress! I owe you a beer (*) if I ever forget to remove this before committing! (*: if you come get it in person!)"); } #endregion @@ -384,8 +357,7 @@ public FdbWatch Watch(Slice key, CancellationToken cancellationToken) public long GetCommittedVersion() { - long version; - var err = FdbNative.TransactionGetCommittedVersion(m_handle, out version); + var err = FdbNative.TransactionGetCommittedVersion(m_handle, out long version); #if DEBUG_TRANSACTIONS Debug.WriteLine("FdbTransaction[" + m_id + "].GetCommittedVersion() => err=" + err + ", version=" + version); #endif @@ -393,6 +365,30 @@ public long GetCommittedVersion() return version; } + public Task GetVersionStampAsync(CancellationToken ct) + { + return RunAsync( + (handle, state) => FdbNative.TransactionGetVersionStamp(handle), + default(object), + (future, state) => GetVersionStampResult(future), + default(object), + ct + ); + } + + private static VersionStamp GetVersionStampResult(IntPtr h) + { + Contract.Requires(h != null); + var err = FdbNative.FutureGetVersionStamp(h, out VersionStamp stamp); +#if DEBUG_TRANSACTIONS + Debug.WriteLine("FdbTransaction[" + m_id + "].FutureGetVersionStamp() => err=" + err + ", vs=" + stamp + ")"); +#endif + Fdb.DieOnError(err); + + return stamp; + } + + /// /// Attempts to commit the sets and clears previously applied to the database snapshot represented by this transaction to the actual database. /// The commit may or may not succeed – in particular, if a conflicting transaction previously committed, then the commit must fail in order to preserve transactional isolation. @@ -400,16 +396,30 @@ public long GetCommittedVersion() /// /// Task that succeeds if the transaction was comitted successfully, or fails if the transaction failed to commit. /// As with other client/server databases, in some failure scenarios a client may be unable to determine whether a transaction succeeded. In these cases, CommitAsync() will throw CommitUnknownResult error. The OnErrorAsync() function treats this error as retryable, so retry loops that don’t check for CommitUnknownResult could execute the transaction twice. In these cases, you must consider the idempotence of the transaction. - public Task CommitAsync(CancellationToken cancellationToken) + public Task CommitAsync(CancellationToken ct) { - var future = FdbNative.TransactionCommit(m_handle); - return FdbFuture.CreateTaskFromHandle(future, (h) => null, cancellationToken); + return RunAsync( + (handle, state) => FdbNative.TransactionCommit(handle), + default(object), + (future, state) => state, + default(object), //TODO:? + ct + ); } - public Task OnErrorAsync(FdbError code, CancellationToken cancellationToken) + public Task OnErrorAsync(FdbError code, CancellationToken ct) { - var future = FdbNative.TransactionOnError(m_handle, code); - return FdbFuture.CreateTaskFromHandle(future, (h) => { ResetInternal(); return null; }, cancellationToken); + return RunAsync( + (handle, state) => FdbNative.TransactionOnError(handle, state), + code, + (h, state) => + { + ((FdbNativeTransaction)state).ResetInternal(); + return default(object); + }, + this, + ct + ); } public void Reset() @@ -430,25 +440,6 @@ private void ResetInternal() #endregion - #region IDisposable... - - public void Dispose() - { - Dispose(true); - GC.SuppressFinalize(this); - } - - private void Dispose(bool disposing) - { - if (disposing) - { - // Dispose of the handle - if (!m_handle.IsClosed) m_handle.Dispose(); - } - } - - #endregion - } } diff --git a/FoundationDB.Client/Native/Futures/FdbFuture.cs b/FoundationDB.Client/Native/Futures/FdbFuture.cs new file mode 100644 index 000000000..4e944ae9c --- /dev/null +++ b/FoundationDB.Client/Native/Futures/FdbFuture.cs @@ -0,0 +1,121 @@ +#region BSD Licence +/* Copyright (c) 2013-2015, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +// enable this to help debug Futures +#undef DEBUG_FUTURES + +using System.Diagnostics.Contracts; + +namespace FoundationDB.Client.Native +{ + using System; + using System.Diagnostics; + using System.Runtime.CompilerServices; + using System.Threading; + using System.Threading.Tasks; + + /// Base class for all FDBFuture wrappers + /// Type of the Task's result + [DebuggerDisplay("Label={Label}, Cookie={Cookie}, State={Task.Status}")] + internal abstract class FdbFuture : TaskCompletionSource, IFdbFuture + { + + #region Private Members... + + /// Optionnal registration on the parent Cancellation Token + /// Is only valid if FLAG_HAS_CTR is set + internal CancellationTokenRegistration m_ctr; + + protected FdbFuture(IntPtr cookie, string label, object state) + : base(state) + { + this.Cookie = cookie; + this.Label = label; + } + + public IntPtr Cookie { get; private set; } + + public string Label { get; private set; } + + #endregion + + #region Cancellation... + + #endregion + + public abstract bool Visit(IntPtr handle); + + public abstract void OnReady(); + + /// Return true if the future has completed (successfully or not) + public bool IsReady + { + get { return this.Task.IsCompleted; } + } + + /// Make the Future awaitable + public TaskAwaiter GetAwaiter() + { + return this.Task.GetAwaiter(); + } + + /// Try to abort the task (if it is still running) + public void Cancel() + { + if (this.Task.IsCanceled) return; + + OnCancel(); + } + + protected abstract void OnCancel(); + + internal void PublishResult(T result) + { + TrySetResult(result); + } + + internal void PublishError(Exception error, FdbError code) + { + if (error != null) + { + TrySetException(error); + } + else if (FdbFutureContext.ClassifyErrorSeverity(code) == FdbFutureContext.CATEGORY_CANCELLED) + { + TrySetCanceled(); + } + else + { + Contract.Assert(code != FdbError.Success); + TrySetException(Fdb.MapToException(code)); + } + } + + } + +} diff --git a/FoundationDB.Client/Native/Futures/FdbFutureArray.cs b/FoundationDB.Client/Native/Futures/FdbFutureArray.cs new file mode 100644 index 000000000..60897bf2c --- /dev/null +++ b/FoundationDB.Client/Native/Futures/FdbFutureArray.cs @@ -0,0 +1,195 @@ +#region BSD Licence +/* Copyright (c) 2013-2015, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace FoundationDB.Client.Native +{ + using JetBrains.Annotations; + using System; + using System.Diagnostics; + using System.Threading; + + /// FDBFuture[] wrapper + /// Type of result + internal sealed class FdbFutureArray : FdbFuture + { + // This future encapsulate multiple FDBFuture* handles and use ref-counting to detect when all the handles have fired + // The ref-counting is handled by the network thread, and invokation of future.OnReady() is deferred to the ThreadPool once the counter reaches zero + // The result array is computed once all FDBFuture are ready, from the ThreadPool. + // If at least one of the FDBFuture fails, the Task fails, using the most "serious" error found (ie: Non-Retryable > Cancelled > Retryable) + + #region Private Members... + + /// Encapsulated handles + // May contains IntPtr.Zero handles if there was a problem when setting up the callbacks. + // Atomically set to null by the first thread that needs to destroy all the handles + [CanBeNull] + private IntPtr[] m_handles; + + /// Number of handles that haven't fired yet + private int m_pending; + + /// Lambda used to extract the result of one handle + // the first argument is the FDBFuture handle that must be ready and not failed + // the second argument is a state that is passed by the caller. + [NotNull] + private readonly Func m_resultSelector; + + #endregion + + internal FdbFutureArray([NotNull] IntPtr[] handles, [NotNull] Func selector, object state, IntPtr cookie, string label) + : base(cookie, label, state) + { + m_handles = handles; + m_pending = handles.Length; + m_resultSelector = selector; + } + + public override bool Visit(IntPtr handle) + { + return 0 == Interlocked.Decrement(ref m_pending); + } + + public override void OnReady() + { + //README: + // - This callback will fire either from the ThreadPool (async ops) or inline form the ctor of the future (non-async ops, or ops that where served from some cache). + // - The method *MUST* dispose the future handle before returning, and *SHOULD* do so before signaling the task. + // => This is because continuations may run inline, and start new futures from there, while we still have our original future handle opened. + + IntPtr[] handles = null; + try + { + // make sure that nobody can destroy our handles while we are using them. + handles = Interlocked.Exchange(ref m_handles, null); + if (handles == null) return; // already disposed? + +#if DEBUG_FUTURES + Debug.WriteLine("FutureArray.{0}<{1}[]>.OnReady([{2}])", this.Label, typeof(T).Name, handles.Length); +#endif + + T[] results = new T[handles.Length]; + FdbError code = FdbError.Success; + int severity = 0; + Exception error = null; + + if (this.Task.IsCompleted) + { // task has already been handled by someone else + return; + } + + var state = this.Task.AsyncState; + for (int i = 0; i < results.Length; i++) + { + var handle = handles[i]; + var err = FdbNative.FutureGetError(handle); + if (err == FdbError.Success) + { + if (code != FdbError.Success) + { // there's been at least one error before, so there is no point in computing the result, it would be discarded anyway + continue; + } + + try + { + results[i] = m_resultSelector(handle, state); + } + catch (AccessViolationException e) + { // trouble in paradise! + +#if DEBUG_FUTURES + Debug.WriteLine("EPIC FAIL: " + e.ToString()); +#endif + + // => THIS IS VERY BAD! We have no choice but to terminate the process immediately, because any new call to any method to the binding may end up freezing the whole process (best case) or sending corrupted data to the cluster (worst case) + if (Debugger.IsAttached) Debugger.Break(); + + Environment.FailFast("FIXME: FDB done goofed!", e); + } + catch (Exception e) + { +#if DEBUG_FUTURES + Debug.WriteLine("FAIL: " + e.ToString()); +#endif + code = FdbError.InternalError; + error = e; + break; + } + } + else if (code != err) + { + int cur = FdbFutureContext.ClassifyErrorSeverity(err); + if (cur > severity) + { // error is more serious than before + severity = cur; + code = err; + } + } + } + + // since continuations may fire inline, make sure to release all the memory used by this handle first + FdbFutureContext.DestroyHandles(ref handles); + + if (code == FdbError.Success) + { + PublishResult(results); + } + else + { + PublishError(error, code); + } + } + catch (Exception e) + { // we must not blow up the TP or the parent, so make sure to propagate all exceptions to the task + TrySetException(e); + } + finally + { + if (handles != null) FdbFutureContext.DestroyHandles(ref handles); + GC.KeepAlive(this); + } + } + + protected override void OnCancel() + { + var handles = Volatile.Read(ref m_handles); + //TODO: we probably need locking to prevent concurrent destroy and cancel calls + if (handles != null) + { + foreach (var handle in handles) + { + if (handle != IntPtr.Zero) + { + FdbNative.FutureCancel(handle); + } + } + } + } + + } + +} \ No newline at end of file diff --git a/FoundationDB.Client/Native/Futures/FdbFutureContext.cs b/FoundationDB.Client/Native/Futures/FdbFutureContext.cs new file mode 100644 index 000000000..fb93b9e32 --- /dev/null +++ b/FoundationDB.Client/Native/Futures/FdbFutureContext.cs @@ -0,0 +1,756 @@ +#region BSD Licence +/* Copyright (c) 2013-2014, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +// enable this to capture the stacktrace of the ctor, when troubleshooting leaked transaction handles +//#define CAPTURE_STACKTRACES + +namespace FoundationDB.Client.Native +{ + using JetBrains.Annotations; + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.Runtime.CompilerServices; + using System.Threading; + using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Threading.Tasks; + + internal class FdbFutureContext : IDisposable + { + + #region Private Constants... + + private const int FUTURE_COOKIE_SIZE = 32; + + private const int FUTURE_COOKIE_SHIFT = 0; + + private const ulong FUTURE_COOKIE_MASK = (1UL << FUTURE_COOKIE_SIZE) - 1; + + private const int CONTEXT_COOKIE_SIZE = 32; + + private const ulong CONTEXT_COOKIE_MASK = (1UL << CONTEXT_COOKIE_SIZE) - 1; + + private const int CONTEXT_COOKIE_SHIFT = FUTURE_COOKIE_SIZE; + + #endregion + + #region Static Stuff.... + + /// Counter used to generate the cookie values for each unique context + private static int s_globalCookieCounter; + + private static readonly Dictionary s_contexts = new Dictionary(); + + private static IntPtr MakeCallbackCookie(uint contextId, uint futureId) + { + ulong cookie = (contextId & CONTEXT_COOKIE_MASK) << CONTEXT_COOKIE_SHIFT; + cookie |= (futureId & FUTURE_COOKIE_MASK) << FUTURE_COOKIE_SHIFT; + return new IntPtr((long)cookie); + } + + private static uint GetContextIdFromCookie(IntPtr cookie) + { + return (uint) (((ulong) cookie.ToInt64() >> CONTEXT_COOKIE_SHIFT) & CONTEXT_COOKIE_MASK); + } + + private static uint GetFutureIdFromCookie(IntPtr cookie) + { + return (uint)(((ulong)cookie.ToInt64() >> FUTURE_COOKIE_SHIFT) & FUTURE_COOKIE_MASK); + } + + /// Delegate that will be called by fdb_c to notify us that a future as completed + /// It is important to make sure that this delegate will NOT be garbaged collected before the last future callback has fired! + private static readonly FdbNative.FdbFutureCallback GlobalCallback = FutureCallbackHandler; + + private static void FutureCallbackHandler(IntPtr handle, IntPtr cookie) + { + // cookie is the value that will help us find the corresponding context (upper 32 bits) and future within this context (lower 32 bits) that matches with this future handle. + +#if DEBUG_FUTURES + Debug.WriteLine("FutureCallbackHandler(0x{0}, {1:X8} | {2:X8}) called from {3} [{4}]", handle.ToString("X"), cookie.ToInt64() >> 32, cookie.ToInt64() & uint.MaxValue, Thread.CurrentThread.ManagedThreadId, Thread.CurrentThread.Name); +#endif + bool fromNetworkThread = Fdb.IsNetworkThread; + + if (!fromNetworkThread) + { // most probably, we have been called inline from fdb_future_set_callback + // => The caller is holding a lock, so we have to defer to the ThreadPool and return as soon as possible! + try + { + ThreadPool.UnsafeQueueUserWorkItem( + (state) => + { + var args = (Tuple) state; + ProcessFutureCallback(args.Item1, args.Item2, false); + }, + Tuple.Create(handle, cookie) + ); + return; + } + catch (Exception) + { // unable to defer to the TP? + // we can't rethrow the exception if FDB_C is calling us (it won't know about it), + // so we will continue running inline. Hopefully this should never happen. + + // => eat the exception and continue + } + } + + ProcessFutureCallback(handle, cookie, fromNetworkThread); + } + + private static void ProcessFutureCallback(IntPtr handle, IntPtr cookie, bool fromNetworkThread) + { +#if DEBUG_FUTURES + Debug.WriteLine("ProcessFutureCallback(0x{0}, {1:X8} | {2:X8}, {3}) called from {4} [{5}]", handle.ToString("X"), cookie.ToInt64() >> 32, cookie.ToInt64() & uint.MaxValue, fromNetworkThread, Thread.CurrentThread.ManagedThreadId, Thread.CurrentThread.Name); +#endif + // we are called by FDB_C, from the thread that runs the Event Loop + bool keepAlive = false; + try + { + // extract the upper 32 bits which contain the ID of the corresponding future context + uint contextId = GetContextIdFromCookie(cookie); + + FdbFutureContext context; + lock (s_contexts) // there will only be contentions on this lock if other a lot of threads are creating new contexts (ie: new transactions) + { + s_contexts.TryGetValue(contextId, out context); + } + + if (context != null) + { + //TODO: if the context is marked as "dead" we need to refcount the pending futures down to 0, and then remove the context from the list + + Contract.Assert(context.m_contextId == contextId); + bool purgeContext; + keepAlive = context.OnFutureReady(handle, cookie, fromNetworkThread, out purgeContext); + + if (purgeContext) + { // the context was disposed and saw the last pending future going by, we have to remove it from the list + lock (s_contexts) + { + s_contexts.Remove(contextId); + } + } + } + } + finally + { + if (!keepAlive) DestroyHandle(ref handle); + } + } + + #endregion + + private const int STATE_DEFAULT = 0; + + private const int STATE_DEAD = 1; + + // this flag must only be used under the lock + private int m_flags; + + /// Cookie for this context + /// Makes the 32-bits upper bits of the future callback parameter + private readonly uint m_contextId = (uint) Interlocked.Increment(ref s_globalCookieCounter); + + /// Counter used to generated the cookie for all futures created from this context + private int m_localCookieCounter; + + /// Dictionary used to store all the pending Futures for this context + /// All methods should take a lock on this instance before manipulating the state + private readonly Dictionary m_futures = new Dictionary(); + +#if CAPTURE_STACKTRACES + private readonly StackTrace m_stackTrace; +#endif + + #region Constructors... + + protected FdbFutureContext() + { + //REVIEW: is this a good idea to do this in the constructor? (we could start observing a context that hasn't been fully constructed yet + lock (s_contexts) + { + s_contexts[m_contextId] = this; + } +#if CAPTURE_STACKTRACES + m_stackTrace = new StackTrace(); +#endif + } + +#if NOT_NEEDED + //REVIEW: do we really need a destructor ? The handle is a SafeHandle, and will take care of itself... + ~FdbFutureContext() + { + if (!AppDomain.CurrentDomain.IsFinalizingForUnload()) + { +#if CAPTURE_STACKTRACES + Debug.WriteLine("A future context ({0}) was leaked by {1}", this, m_stackTrace); +#endif +#if DEBUG + // If you break here, that means that a native transaction handler was leaked by a FdbTransaction instance (or that the transaction instance was leaked) + if (Debugger.IsAttached) Debugger.Break(); +#endif + Dispose(false); + } + } +#endif + + #endregion + + #region IDisposable... + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + protected virtual void Dispose(bool disposing) + { + if (disposing) + { + // + +#if DEBUG_FUTURES + Debug.WriteLine("Disposing context {0}#{1} with {2} pending future(s) ({3} total)", this.GetType().Name, m_contextId, m_futures.Count, m_localCookieCounter); +#endif + bool purge; + lock (m_futures) + { + if (m_flags == STATE_DEAD) + { // already dead! + return; + } + m_flags = STATE_DEAD; + purge = m_futures.Count == 0; + } + + if (purge) + { // no pending futures, we can remove ourselves from the global list + lock (s_contexts) + { + s_contexts.Remove(m_contextId); +#if DEBUG_FUTURES + Debug.WriteLine("Dumping all remaining contexts: {0}", s_contexts.Count); + foreach (var ctx in s_contexts) + { + Debug.WriteLine("- {0}#{1} : {2} ({3})", ctx.Value.GetType().Name, ctx.Key, ctx.Value.m_futures.Count, ctx.Value.m_localCookieCounter); + } +#endif + } + } + //else: we have to wait for all callbacks to fire. The last one will remove this context from the global list + } + } + + #endregion + + /// A callback has fire for a future handled by this context + private bool OnFutureReady(IntPtr handle, IntPtr cookie, bool fromNetworkThread, out bool purgeContext) + { + uint futureId = GetFutureIdFromCookie(cookie); + + purgeContext = false; + IFdbFuture future; + lock (m_futures) + { + if (m_flags == STATE_DEAD) + { // we are just waiting for all callbacks to fire + m_futures.Remove(futureId); + purgeContext = m_futures.Count == 0; + return false; + } + + m_futures.TryGetValue(futureId, out future); + } + + if (future != null && future.Cookie == cookie) + { + if (future.Visit(handle)) + { // future is ready to process all the results + + lock (m_futures) + { + m_futures.Remove(futureId); + } + + if (fromNetworkThread) + { + ThreadPool.UnsafeQueueUserWorkItem((state) => ((IFdbFuture)state).OnReady(), future); + //TODO: what happens if TP.UQUWI() fails? + } + else + { + future.OnReady(); + } + } + // else: expecting more handles + + // handles will be destroyed when the future completes + return true; + } + return false; + } + + /// Add a new future handle to this context + /// + /// Handle of the newly created future + /// Flag set to true if the future must be disposed by the caller (in case of error), or false if the future will be disposed by some other thread. + /// Method called when the future completes successfully + /// State that will be passed as the second argument to + /// TODO: remove this? + /// Type of future (name of the caller) + /// + protected Task RegisterFuture( + IntPtr handle, + ref bool mustDispose, + [NotNull] Func selector, + object state, + CancellationToken ct, + string label + ) + { + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); + + FdbFutureSingle future = null; + IntPtr cookie = IntPtr.Zero; + uint futureId = (uint)Interlocked.Increment(ref m_localCookieCounter); + + try + { + cookie = MakeCallbackCookie(m_contextId, futureId); + + future = new FdbFutureSingle(handle, selector, state, cookie, label); + + if (FdbNative.FutureIsReady(handle)) + { // the result is already computed +#if DEBUG_FUTURES + Debug.WriteLine("FutureSingle.{0} 0x{1} already completed!", label, handle.ToString("X")); +#endif + cookie = IntPtr.Zero; + mustDispose = false; + future.OnReady(); + return future.Task; + } + + if (ct.CanBeCanceled) + { + if (ct.IsCancellationRequested) + { + future.TrySetCanceled(); + cookie = IntPtr.Zero; + return future.Task; + } + + // note that the cancellation handler can fire inline, but it will only mark the future as cancelled + // this means that we will still wait for the future callback to fire and set the task state in there. + future.m_ctr = RegisterForCancellation(future, ct); + } + + lock (m_futures) + { + m_futures[futureId] = future; + + // note: if the future just got ready, the callback will fire inline (as of v3.0) + // => if this happens, the callback defer the execution to the ThreadPool and returns immediately + var err = FdbNative.FutureSetCallback(handle, GlobalCallback, cookie); + if (!Fdb.Success(err)) + { // the callback will not fire, so we have to abort the future immediately + future.PublishError(null, err); + } + else + { + mustDispose = false; + } + } + return future.Task; + } + catch (Exception e) + { + if (future != null) + { + future.PublishError(e, FdbError.UnknownError); + return future.Task; + } + throw; + } + finally + { + if (mustDispose && cookie != IntPtr.Zero) + { // make sure that we never leak a failed future ! + lock (m_futures) + { + m_futures.Remove(futureId); + } + } + } + } + + /// Add a new future handle to this context + /// + /// Handles of the newly created future + /// Flag set to true if the future must be disposed by the caller (in case of error), or false if the future will be disposed by some other thread. + /// Method called when the future completes successfully + /// State that will be passed as the second argument to + /// TODO: remove this? + /// Type of future (name of the caller) + /// + protected Task RegisterFutures( + [NotNull] IntPtr[] handles, + ref bool mustDispose, + [NotNull] Func selector, + object state, + CancellationToken ct, + string label + ) + { + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); + + FdbFutureArray future = null; + IntPtr cookie = IntPtr.Zero; + uint futureId = (uint) Interlocked.Increment(ref m_localCookieCounter); + try + { + cookie = MakeCallbackCookie(m_contextId, futureId); + + // make a copy because we may diverge from the caller if we partially fail to register the callbacks below + var tmp = new IntPtr[handles.Length]; + handles.CopyTo(tmp, 0); + future = new FdbFutureArray(tmp, selector, state, cookie, label); + + // check the case where all futures are already ready (served from cache?) + bool ready = true; + foreach (var handle in tmp) + { + if (!FdbNative.FutureIsReady(handle)) + { + ready = false; + break; + } + } + if (ready) + { +#if DEBUG_FUTURES + Debug.WriteLine("FutureArray.{0} [{1}] already completed!", label, tmp.Length); +#endif + cookie = IntPtr.Zero; + mustDispose = false; + future.OnReady(); + return future.Task; + } + + if (ct.CanBeCanceled) + { + future.m_ctr = RegisterForCancellation(future, ct); + if (future.Task.IsCompleted) + { // cancellation ran inline + future.TrySetCanceled(); + return future.Task; + } + } + + lock (m_futures) + { + m_futures[futureId] = future; + + // since the callbacks can fire inline, we have to make sure that we finish setting everything up under the lock + for (int i = 0; i < handles.Length; i++) + { + FdbError err = FdbNative.FutureSetCallback(handles[i], GlobalCallback, cookie); + if (Fdb.Success(err)) + { + handles[i] = IntPtr.Zero; + continue; + } + + // we have to cleanup everything, and mute this future + lock (m_futures) + { + m_futures.Remove(futureId); + for (int j = i + 1; j < handles.Length; j++) + { + tmp[j] = IntPtr.Zero; + } + } + + throw Fdb.MapToException(err); + } + } + mustDispose = false; + return future.Task; + } + catch (Exception e) + { + if (future != null) + { + future.PublishError(e, FdbError.UnknownError); + return future.Task; + } + throw; + } + finally + { + if (mustDispose && cookie != IntPtr.Zero) + { // make sure that we never leak a failed future ! + lock (m_futures) + { + m_futures.Remove(futureId); + } + } + + } + } + + /// Start a new async operation + /// Result of the operation + /// Lambda called to produce the future handle + /// Argument passed to . It will not be used after the handle has been constructed + /// Lambda called once the future completes (successfully) + /// State object passed to . It will be stored in the future has long as it is active. + /// Optional cancellation token used to cancel the task from an external source. + /// Optional label, used for logging and troubleshooting purpose (by default the name of the caller) + /// + protected Task RunAsync( + [NotNull] Func generator, + object argument, + [NotNull] Func selector, + object state, + CancellationToken ct, + [CallerMemberName] string label = null + ) + { + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); + + bool mustDispose = true; + IntPtr h = IntPtr.Zero; + try + { + RuntimeHelpers.PrepareConstrainedRegions(); + try + { } + finally + { + h = generator(argument); + } + + return RegisterFuture(h, ref mustDispose, selector, state, ct, label); + } + finally + { + if (mustDispose && h != IntPtr.Zero) + { + FdbNative.FutureDestroy(h); + } + } + } + + internal static CancellationTokenRegistration RegisterForCancellation(IFdbFuture future, CancellationToken cancellationToken) + { + //note: if the token is already cancelled, the callback handler will run inline and any exception would bubble up here + //=> this is not a problem because the ctor already has a try/catch that will clean up everything + return cancellationToken.RegisterWithoutEC( + (_state) => { CancellationHandler(_state); }, + future + ); + } + + private static void CancellationHandler(object state) + { + var future = (IFdbFuture)state; + Contract.Assert(state != null); +#if DEBUG_FUTURES + Debug.WriteLine("CancellationHandler for " + future + " was called on thread #" + Thread.CurrentThread.ManagedThreadId.ToString()); +#endif + future.Cancel(); + } + + internal static void DestroyHandle(ref IntPtr handle) + { + if (handle != IntPtr.Zero) + { + FdbNative.FutureDestroy(handle); + handle = IntPtr.Zero; + } + } + + internal static void DestroyHandles(ref IntPtr[] handles) + { + if (handles != null) + { + foreach (var handle in handles) + { + if (handle != IntPtr.Zero) FdbNative.FutureDestroy(handle); + } + handles = null; + } + } + + internal const int CATEGORY_SUCCESS = 0; + internal const int CATEGORY_RETRYABLE = 1; + internal const int CATEGORY_CANCELLED = 2; + internal const int CATEGORY_FAILURE = 3; + + internal static int ClassifyErrorSeverity(FdbError error) + { + switch (error) + { + case FdbError.Success: + { + return CATEGORY_SUCCESS; + } + case FdbError.PastVersion: + case FdbError.FutureVersion: + case FdbError.NotCommitted: + case FdbError.CommitUnknownResult: + { + return CATEGORY_RETRYABLE; + } + + case FdbError.OperationCancelled: // happens if a future is cancelled (probably a watch) + case FdbError.TransactionCancelled: // happens if a transaction is cancelled (via its own parent CT, or via tr.Cancel()) + { + return CATEGORY_CANCELLED; + } + + default: + { + return CATEGORY_FAILURE; + } + } + } + } + + internal class FdbFutureContext : FdbFutureContext + where THandle : FdbSafeHandle + { + + protected readonly THandle m_handle; + + protected FdbFutureContext([NotNull] THandle handle) + { + if (handle == null) throw new ArgumentNullException("handle"); + m_handle = handle; + } + + public THandle Handle { [NotNull] get { return m_handle; } } + + protected override void Dispose(bool disposing) + { + try + { + base.Dispose(disposing); + } + finally + { + if (disposing) + { + lock (this.Handle) + { + if (!this.Handle.IsClosed) this.Handle.Dispose(); + } + } + } + } + + /// Start a new async operation + /// Type of the result of the operation + /// Type of the argument passed to the generator + /// Lambda called to produce the future handle + /// Argument passed to . It will not be used after the handle has been constructed + /// Lambda called once the future completes (successfully) + /// State object passed to . It will be stored in the future has long as it is active. + /// Optional cancellation token used to cancel the task from an external source. + /// Optional label, used for logging and troubleshooting purpose (by default the name of the caller) + /// + protected Task RunAsync( + [NotNull] Func generator, + TArg argument, + [NotNull] Func selector, + object state, + CancellationToken ct, + [CallerMemberName] string label = null + ) + { + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); + + bool mustDispose = true; + IntPtr h = IntPtr.Zero; + try + { + lock (this.Handle) + { + if (this.Handle.IsClosed) throw new ObjectDisposedException(this.GetType().Name); + h = generator(m_handle, argument); + } + return RegisterFuture(h, ref mustDispose, selector, state, ct, label); + } + finally + { + if (mustDispose && h != IntPtr.Zero) + { + FdbNative.FutureDestroy(h); + } + } + } + + protected Task RunAsync( + int count, + Action generator, + TArg arg, + Func selector, + object state, + CancellationToken ct, + [CallerMemberName] string label = null + + ) + { + bool mustDispose = true; + var handles = new IntPtr[count]; + try + { + lock (this.Handle) + { + if (this.Handle.IsClosed) throw new ObjectDisposedException(this.GetType().Name); + generator(m_handle, arg, handles); + } + return RegisterFutures(handles, ref mustDispose, selector, state, ct, label); + } + catch + { + foreach (var future in handles) + { + if (future != IntPtr.Zero) FdbNative.FutureDestroy(future); + } + throw; + } + } + + } + +} diff --git a/FoundationDB.Client/Native/Futures/FdbFutureSingle.cs b/FoundationDB.Client/Native/Futures/FdbFutureSingle.cs new file mode 100644 index 000000000..d07fa8d66 --- /dev/null +++ b/FoundationDB.Client/Native/Futures/FdbFutureSingle.cs @@ -0,0 +1,159 @@ +#region BSD Licence +/* Copyright (c) 2013-2015, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace FoundationDB.Client.Native +{ + using System; + using System.Diagnostics; + using System.Runtime.ExceptionServices; + using System.Threading; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; + + /// FDBFuture wrapper + /// Type of result + internal sealed class FdbFutureSingle : FdbFuture + { + #region Private Members... + + /// Value of the 'FDBFuture*' + private IntPtr m_handle; + + /// Lambda used to extract the result of this FDBFuture + private readonly Func m_resultSelector; + + #endregion + + internal FdbFutureSingle(IntPtr handle, [NotNull] Func selector, object state, IntPtr cookie, string label) + : base(cookie, label, state) + { + if (handle == IntPtr.Zero) throw new ArgumentException("Invalid future handle", nameof(handle)); + if (selector == null) throw new ArgumentNullException(nameof(selector)); + + m_handle = handle; + m_resultSelector = selector; + } + + public override bool Visit(IntPtr handle) + { +#if DEBUG_FUTURES + Debug.WriteLine("FutureSingle.{0}<{1}>.Visit(0x{2})", this.Label, typeof(T).Name, handle.ToString("X8")); +#endif + Contract.Requires(handle == m_handle, this.Label); + return true; + } + + [HandleProcessCorruptedStateExceptions] // to be able to handle Access Violations and terminate the process + public override void OnReady() + { + IntPtr handle = IntPtr.Zero; + + //README: + // - This callback will fire either from the ThreadPool (async ops) or inline form the ctor of the future (non-async ops, or ops that where served from some cache). + // - The method *MUST* dispose the future handle before returning, and *SHOULD* do so before signaling the task. + // => This is because continuations may run inline, and start new futures from there, while we still have our original future handle opened. + + try + { + handle = Interlocked.Exchange(ref m_handle, IntPtr.Zero); + if (handle == IntPtr.Zero) return; // already disposed? + +#if DEBUG_FUTURES + Debug.WriteLine("FutureSingle.{0}<{1}>.OnReady(0x{2})", this.Label, typeof(T).Name, handle.ToString("X8")); +#endif + + if (this.Task.IsCompleted) + { // task has already been handled by someone else + return; + } + + var result = default(T); + var error = default(Exception); + + var code = FdbNative.FutureGetError(handle); + if (code == FdbError.Success) + { + try + { + result = m_resultSelector(handle, this.Task.AsyncState); + } + catch (AccessViolationException e) + { // trouble in paradise! + +#if DEBUG_FUTURES + Debug.WriteLine("EPIC FAIL: " + e.ToString()); +#endif + + // => THIS IS VERY BAD! We have no choice but to terminate the process immediately, because any new call to any method to the binding may end up freezing the whole process (best case) or sending corrupted data to the cluster (worst case) + if (Debugger.IsAttached) Debugger.Break(); + + Environment.FailFast("FIXME: FDB done goofed!", e); + } + catch (Exception e) + { +#if DEBUG_FUTURES + Debug.WriteLine("FAIL: " + e.ToString()); +#endif + code = FdbError.InternalError; + error = e; + } + } + + // since continuations may fire inline, make sure to release all the memory used by this handle first + FdbFutureContext.DestroyHandle(ref handle); + + if (code == FdbError.Success) + { + PublishResult(result); + } + else + { + PublishError(error, code); + } + } + catch (Exception e) + { // we must not blow up the TP or the parent, so make sure to propagate all exceptions to the task + TrySetException(e); + } + finally + { + if (handle != IntPtr.Zero) FdbFutureContext.DestroyHandle(ref handle); + GC.KeepAlive(this); + } + } + + protected override void OnCancel() + { + IntPtr handle = Volatile.Read(ref m_handle); + //TODO: we probably need locking to prevent concurrent destroy and cancel calls + if (handle != IntPtr.Zero) FdbNative.FutureCancel(handle); + } + + } + +} diff --git a/FoundationDB.Client/Layers/Tuples/TypeSystem/TupleKeyEncoding.cs b/FoundationDB.Client/Native/Futures/IFdbFuture.cs similarity index 64% rename from FoundationDB.Client/Layers/Tuples/TypeSystem/TupleKeyEncoding.cs rename to FoundationDB.Client/Native/Futures/IFdbFuture.cs index 9db59e1f7..7ec3fc921 100644 --- a/FoundationDB.Client/Layers/Tuples/TypeSystem/TupleKeyEncoding.cs +++ b/FoundationDB.Client/Native/Futures/IFdbFuture.cs @@ -1,4 +1,4 @@ -#region BSD Licence +#region BSD Licence /* Copyright (c) 2013-2015, Doxense SAS All rights reserved. @@ -26,37 +26,31 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion +// enable this to help debug Futures +#undef DEBUG_FUTURES -namespace FoundationDB.Layers.Tuples +namespace FoundationDB.Client.Native { using System; - using FoundationDB.Client; - public sealed class TupleKeyEncoding : IFdbKeyEncoding + internal interface IFdbFuture { - public IDynamicKeyEncoder GetDynamicEncoder() - { - return TupleKeyEncoder.Instance; - } - - public IKeyEncoder GetEncoder() - { - return KeyValueEncoders.Tuples.Key(); - } - - public ICompositeKeyEncoder GetEncoder() - { - return KeyValueEncoders.Tuples.CompositeKey(); - } - - public ICompositeKeyEncoder GetEncoder() - { - return KeyValueEncoders.Tuples.CompositeKey(); - } - - public ICompositeKeyEncoder GetEncoder() - { - return KeyValueEncoders.Tuples.CompositeKey(); - } + /// Unique identifier of this future + IntPtr Cookie { get; } + + /// Label of the future (usually the name of the operation) + string Label { get; } + + /// Cancel the future, if it hasen't completed yet + void Cancel(); + + /// Test if this was the last pending handle for this future, or not + /// Handle that completed + /// True if this was the last handle and can be called, or False if more handles need to fire first. + bool Visit(IntPtr handle); + + /// Called when all handles tracked by this future have fired + void OnReady(); } -} \ No newline at end of file + +} diff --git a/FoundationDB.Client/Native/Handles/ClusterHandle.cs b/FoundationDB.Client/Native/Handles/ClusterHandle.cs index 9f5b74abe..925a46a6d 100644 --- a/FoundationDB.Client/Native/Handles/ClusterHandle.cs +++ b/FoundationDB.Client/Native/Handles/ClusterHandle.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/FoundationDB.Client/Native/Handles/DatabaseHandle.cs b/FoundationDB.Client/Native/Handles/DatabaseHandle.cs index d74aa286c..23507c172 100644 --- a/FoundationDB.Client/Native/Handles/DatabaseHandle.cs +++ b/FoundationDB.Client/Native/Handles/DatabaseHandle.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/FoundationDB.Client/Native/Handles/FdbSafeHandle.cs b/FoundationDB.Client/Native/Handles/FdbSafeHandle.cs index eb49d9af8..9863a6ed6 100644 --- a/FoundationDB.Client/Native/Handles/FdbSafeHandle.cs +++ b/FoundationDB.Client/Native/Handles/FdbSafeHandle.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,7 +26,7 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -#undef DEBUG_HANDLES +//#define DEBUG_HANDLES namespace FoundationDB.Client.Native { diff --git a/FoundationDB.Client/Native/Handles/FutureHandle.cs b/FoundationDB.Client/Native/Handles/FutureHandle.cs index cf5c18674..fc6f5fcb2 100644 --- a/FoundationDB.Client/Native/Handles/FutureHandle.cs +++ b/FoundationDB.Client/Native/Handles/FutureHandle.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,6 +26,8 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion +#if REFACTORED + namespace FoundationDB.Client.Native { using FoundationDB.Client.Utils; @@ -62,3 +64,5 @@ public override string ToString() } } + +#endif diff --git a/FoundationDB.Client/Native/Handles/TransactionHandle.cs b/FoundationDB.Client/Native/Handles/TransactionHandle.cs index e4f80d0c3..643c34da4 100644 --- a/FoundationDB.Client/Native/Handles/TransactionHandle.cs +++ b/FoundationDB.Client/Native/Handles/TransactionHandle.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/FoundationDB.Client/Native/UnmanagedLibrary.cs b/FoundationDB.Client/Native/UnmanagedLibrary.cs index 4696f3298..968045b0a 100644 --- a/FoundationDB.Client/Native/UnmanagedLibrary.cs +++ b/FoundationDB.Client/Native/UnmanagedLibrary.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -72,23 +72,24 @@ protected override bool ReleaseHandle() [SuppressUnmanagedCodeSecurity] private static class NativeMethods { -#if __MonoCS__ - const string KERNEL = "dl"; + const string LIBDL = "dl"; + - [DllImport(KERNEL)] + [DllImport(LIBDL)] public static extern SafeLibraryHandle dlopen(string fileName, int flags); - [DllImport(KERNEL, SetLastError = true)] + + [DllImport(LIBDL, SetLastError = true)] [return: MarshalAs(UnmanagedType.Bool)] public static extern int dlclose(IntPtr hModule); - public static SafeLibraryHandle LoadLibrary(string fileName) - { +#if __MonoCS__ + public static SafeLibraryHandle LoadPlatformLibrary(string fileName) + { return dlopen(fileName, 1); - } - public static bool FreeLibrary(IntPtr hModule) { return dlclose(hModule) == 0; } + public static bool FreePlatformLibrary(IntPtr hModule) { return dlclose(hModule) == 0; } #else const string KERNEL = "kernel32"; @@ -100,6 +101,24 @@ public static SafeLibraryHandle LoadLibrary(string fileName) [DllImport(KERNEL, SetLastError = true)] [return: MarshalAs(UnmanagedType.Bool)] public static extern bool FreeLibrary(IntPtr hModule); + + public static SafeLibraryHandle LoadPlatformLibrary(string fileName) + { + if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + { + return LoadLibrary(fileName); + } + return dlopen(fileName, 1); + } + + public static bool FreePlatformLibrary(IntPtr hModule) + { + if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + { + return FreeLibrary(hModule); + } + return dlclose(hModule) == 0; + } #endif } @@ -112,7 +131,7 @@ public static UnmanagedLibrary Load(string path) { if (path == null) throw new ArgumentNullException("path"); - var handle = NativeMethods.LoadLibrary(path); + var handle = NativeMethods.LoadPlatformLibrary(path); if (handle == null || handle.IsInvalid) { var ex = Marshal.GetExceptionForHR(Marshal.GetHRForLastWin32Error()); diff --git a/FoundationDB.Client/Properties/AssemblyInfo.cs b/FoundationDB.Client/Properties/AssemblyInfo.cs index faab1f0f1..c17316435 100644 --- a/FoundationDB.Client/Properties/AssemblyInfo.cs +++ b/FoundationDB.Client/Properties/AssemblyInfo.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -30,10 +30,6 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY using System.Runtime.CompilerServices; using System.Runtime.InteropServices; -[assembly: AssemblyTitle("FoundationDB.Client")] -[assembly: AssemblyDescription(".NET Binding for FoundationDB")] -[assembly: AssemblyConfiguration("")] - [assembly: ComVisible(false)] [assembly: Guid("0fce138d-cb61-49fd-bb0a-a0ecb37abe78")] @@ -41,4 +37,4 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY [assembly: InternalsVisibleTo("FoundationDB.Layers.Common, PublicKey=0024000004800000940000000602000000240000525341310004000001000100a9e653303024d91e3e98cdb33228897aebc9aeb0dd5e0890a2362ff08231643525d86e955d52a9be450a9602eedbc1c0eb463d227320a6b6ad1c7129f21353b2b28242d712a0e7b3aaff55c0ab1019c92bea6806b9cf64e93d976143dc57e0a8e73a65c03422ab2624c1220d84f7e88c5a5c3c9edefcf4a76969d458348403ce")] [assembly: InternalsVisibleTo("FoundationDB.Layers.Experimental, PublicKey=0024000004800000940000000602000000240000525341310004000001000100a9e653303024d91e3e98cdb33228897aebc9aeb0dd5e0890a2362ff08231643525d86e955d52a9be450a9602eedbc1c0eb463d227320a6b6ad1c7129f21353b2b28242d712a0e7b3aaff55c0ab1019c92bea6806b9cf64e93d976143dc57e0a8e73a65c03422ab2624c1220d84f7e88c5a5c3c9edefcf4a76969d458348403ce")] [assembly: InternalsVisibleTo("FoundationDB.Linq.Providers, PublicKey=0024000004800000940000000602000000240000525341310004000001000100a9e653303024d91e3e98cdb33228897aebc9aeb0dd5e0890a2362ff08231643525d86e955d52a9be450a9602eedbc1c0eb463d227320a6b6ad1c7129f21353b2b28242d712a0e7b3aaff55c0ab1019c92bea6806b9cf64e93d976143dc57e0a8e73a65c03422ab2624c1220d84f7e88c5a5c3c9edefcf4a76969d458348403ce")] -[assembly: InternalsVisibleTo("FoundationDB.Tests, PublicKey=0024000004800000940000000602000000240000525341310004000001000100a9e653303024d91e3e98cdb33228897aebc9aeb0dd5e0890a2362ff08231643525d86e955d52a9be450a9602eedbc1c0eb463d227320a6b6ad1c7129f21353b2b28242d712a0e7b3aaff55c0ab1019c92bea6806b9cf64e93d976143dc57e0a8e73a65c03422ab2624c1220d84f7e88c5a5c3c9edefcf4a76969d458348403ce")] \ No newline at end of file +[assembly: InternalsVisibleTo("FoundationDB.Tests, PublicKey=0024000004800000940000000602000000240000525341310004000001000100a9e653303024d91e3e98cdb33228897aebc9aeb0dd5e0890a2362ff08231643525d86e955d52a9be450a9602eedbc1c0eb463d227320a6b6ad1c7129f21353b2b28242d712a0e7b3aaff55c0ab1019c92bea6806b9cf64e93d976143dc57e0a8e73a65c03422ab2624c1220d84f7e88c5a5c3c9edefcf4a76969d458348403ce")] diff --git a/FoundationDB.Client/Async/AsyncBuffer.cs b/FoundationDB.Client/Shared/Async/AsyncBuffer.cs similarity index 68% rename from FoundationDB.Client/Async/AsyncBuffer.cs rename to FoundationDB.Client/Shared/Async/AsyncBuffer.cs index bbcb29564..b034b0421 100644 --- a/FoundationDB.Client/Async/AsyncBuffer.cs +++ b/FoundationDB.Client/Shared/Async/AsyncBuffer.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,69 +28,69 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY //#define FULL_DEBUG -namespace FoundationDB.Async +namespace Doxense.Async { - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.Runtime.ExceptionServices; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; /// Buffer that holds a fixed number of items and can rate-limit the producer - /// - /// - public class AsyncBuffer : AsyncProducerConsumerQueue, IAsyncSource + /// + /// + public class AsyncBuffer : AsyncProducerConsumerQueue, IAsyncSource { #region Private Members... /// Transformation applied on the values - private readonly Func m_transform; + private readonly Func m_transform; /// Queue that holds items produced but not yet consumed /// The queue can sometime go over the limit because the Complete/Error message are added without locking - private readonly Queue> m_queue = new Queue>(); + private readonly Queue> m_queue = new Queue>(); #endregion #region Constructors... - public AsyncBuffer([NotNull] Func transform, int capacity) + public AsyncBuffer([NotNull] Func transform, int capacity) : base(capacity) { - if (transform == null) throw new ArgumentNullException("transform"); + Contract.NotNull(transform, nameof(transform)); m_transform = transform; } #endregion - #region IFdbAsyncTarget... + #region IAsyncTarget... - public override Task OnNextAsync(T value, CancellationToken cancellationToken) + public override Task OnNextAsync(TInput value, CancellationToken ct) { - if (cancellationToken.IsCancellationRequested) return TaskHelpers.FromCancellation(cancellationToken); + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); LogProducer("Received new value"); Task wait; lock (m_lock) { - if (m_done) return TaskHelpers.FromException(new InvalidOperationException("Cannot send any more values because this buffer has already completed")); + if (m_done) return Task.FromException(new InvalidOperationException("Cannot send any more values because this buffer has already completed")); if (m_queue.Count < m_capacity) { // quick path Enqueue_NeedsLocking(Maybe.Return(value)); - return TaskHelpers.CompletedTask; + return Task.CompletedTask; } // we are blocked, we will need to wait ! - wait = MarkProducerAsBlocked_NeedsLocking(cancellationToken); + wait = MarkProducerAsBlocked_NeedsLocking(ct); } // slow path - return WaitForNextFreeSlotThenEnqueueAsync(value, wait, cancellationToken); + return WaitForNextFreeSlotThenEnqueueAsync(value, wait, ct); } public override void OnCompleted() @@ -101,26 +101,12 @@ public override void OnCompleted() { LogProducer("Completion received"); m_done = true; - m_queue.Enqueue(Maybe.Nothing()); + m_queue.Enqueue(Maybe.Nothing()); WakeUpBlockedConsumer_NeedsLocking(); } } } -#if NET_4_0 - public override void OnError(Exception error) - { - lock (m_lock) - { - if (!m_done) - { - LogProducer("Error received: " + error.Message); - m_queue.Enqueue(Maybe.Error(error)); - WakeUpBlockedConsumer_NeedsLocking(); - } - } - } -#else public override void OnError(ExceptionDispatchInfo error) { lock (m_lock) @@ -128,14 +114,13 @@ public override void OnError(ExceptionDispatchInfo error) if (!m_done) { LogProducer("Error received: " + error.SourceException.Message); - m_queue.Enqueue(Maybe.Error(error)); + m_queue.Enqueue(Maybe.Error(error)); WakeUpBlockedConsumer_NeedsLocking(); } } } -#endif - private void Enqueue_NeedsLocking(Maybe value) + private void Enqueue_NeedsLocking(Maybe value) { m_queue.Enqueue(value); @@ -145,7 +130,7 @@ private void Enqueue_NeedsLocking(Maybe value) } } - private async Task WaitForNextFreeSlotThenEnqueueAsync(T value, Task wait, CancellationToken ct) + private async Task WaitForNextFreeSlotThenEnqueueAsync(TInput value, Task wait, CancellationToken ct) { ct.ThrowIfCancellationRequested(); @@ -162,16 +147,16 @@ private async Task WaitForNextFreeSlotThenEnqueueAsync(T value, Task wait, Cance #endregion - #region IFdbAsyncSource... + #region IAsyncSource... - public Task> ReceiveAsync(CancellationToken ct) + public Task> ReceiveAsync(CancellationToken ct) { - if (ct.IsCancellationRequested) return TaskHelpers.FromCancellation>(ct); + if (ct.IsCancellationRequested) return Task.FromCanceled>(ct); LogConsumer("Looking for next value..."); Task wait = null; - Maybe item; + Maybe item; lock (m_lock) { if (m_queue.Count > 0) @@ -183,12 +168,12 @@ public Task> ReceiveAsync(CancellationToken ct) else if (m_done) { LogConsumer("The queue was complete"); - item = Maybe.Nothing(); + item = Maybe.Nothing(); } else { wait = MarkConsumerAsBlocked_NeedsLocking(ct); - item = default(Maybe); // needed to please the compiler + item = default(Maybe); // needed to please the compiler } } @@ -200,26 +185,26 @@ public Task> ReceiveAsync(CancellationToken ct) return Task.FromResult(ProcessResult(item)); } - private Maybe ProcessResult(Maybe item) + private Maybe ProcessResult(Maybe item) { if (item.IsEmpty) { // that was the last one ! m_receivedLast = true; LogConsumer("Received last item"); - return Maybe.Nothing(); + return Maybe.Nothing(); } LogConsumer("Applying transform on item"); - return Maybe.Apply(item, m_transform); + return Maybe.Apply(item, m_transform); } - private async Task> WaitForNextItemAsync(Task wait, CancellationToken ct) + private async Task> WaitForNextItemAsync(Task wait, CancellationToken ct) { await wait.ConfigureAwait(false); LogConsumer("Wake up because one item arrived"); - Maybe item; + Maybe item; lock(m_lock) { ct.ThrowIfCancellationRequested(); diff --git a/FoundationDB.Client/Async/AsyncCancellableMutex.cs b/FoundationDB.Client/Shared/Async/AsyncCancellableMutex.cs similarity index 87% rename from FoundationDB.Client/Async/AsyncCancellableMutex.cs rename to FoundationDB.Client/Shared/Async/AsyncCancellableMutex.cs index 2e4a16990..fb11c9e98 100644 --- a/FoundationDB.Client/Async/AsyncCancellableMutex.cs +++ b/FoundationDB.Client/Shared/Async/AsyncCancellableMutex.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,12 +26,14 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Async +namespace Doxense.Async { using System; using System.Diagnostics; using System.Threading; using System.Threading.Tasks; + using Doxense.Threading.Tasks; + using JetBrains.Annotations; /// Implements a async mutex that supports cancellation [DebuggerDisplay("Status={this.Task.Status}, CancellationState=({m_state}, {m_ct.IsCancellationRequested?\"alive\":\"cancelled\"})")] @@ -43,7 +45,6 @@ public class AsyncCancelableMutex : TaskCompletionSource // note: this is not really a mutex because there is no "Reset()" method (not possible to reset a TCS)... private static readonly Action s_cancellationCallback = CancellationHandler; - private static readonly AsyncCancelableMutex s_alreadyCompleted = CreateAlreadyDone(); /// Returns an already completed, new mutex instance private static AsyncCancelableMutex CreateAlreadyDone() @@ -53,10 +54,8 @@ private static AsyncCancelableMutex CreateAlreadyDone() return mtx; } - public static AsyncCancelableMutex AlreadyDone - { - get { return s_alreadyCompleted; } - } + /// Mutex that has already completed + public static AsyncCancelableMutex AlreadyDone { [NotNull] get; } = CreateAlreadyDone(); private const int STATE_NONE = 0; private const int STATE_SET = 1; @@ -83,7 +82,7 @@ public AsyncCancelableMutex(CancellationToken ct) { if (ct.CanBeCanceled) { - m_ctr = ct.Register(s_cancellationCallback, new WeakReference(this), useSynchronizationContext: false); + m_ctr = ct.RegisterWithoutEC(s_cancellationCallback, new WeakReference(this)); } GC.SuppressFinalize(this); } @@ -130,12 +129,12 @@ public bool Abort(bool async = false) private static void SetDefered(AsyncCancelableMutex mutex) { - ThreadPool.QueueUserWorkItem((state) => ((AsyncCancelableMutex)state).TrySetResult(null), mutex); + ThreadPool.UnsafeQueueUserWorkItem((state) => ((AsyncCancelableMutex)state).TrySetResult(null), mutex); } private static void CancelDefered(AsyncCancelableMutex mutex) { - ThreadPool.QueueUserWorkItem((state) => ((AsyncCancelableMutex)state).TrySetCanceled(), mutex); + ThreadPool.UnsafeQueueUserWorkItem((state) => ((AsyncCancelableMutex)state).TrySetCanceled(), mutex); } } diff --git a/FoundationDB.Client/Async/AsyncHelpers.cs b/FoundationDB.Client/Shared/Async/AsyncHelpers.cs similarity index 53% rename from FoundationDB.Client/Async/AsyncHelpers.cs rename to FoundationDB.Client/Shared/Async/AsyncHelpers.cs index 9bc4dd899..235ae3953 100644 --- a/FoundationDB.Client/Async/AsyncHelpers.cs +++ b/FoundationDB.Client/Shared/Async/AsyncHelpers.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,26 +26,23 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Async +namespace Doxense.Async { - using FoundationDB.Client.Utils; using System; using System.Collections.Generic; using System.Runtime.ExceptionServices; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Linq; + using Doxense.Threading.Tasks; /// Helper methods for creating and manipulating async sequences. public static class AsyncHelpers { internal static readonly Action NoOpCompletion = () => { }; -#if NET_4_0 - internal static readonly Action NoOpError = (e) => { }; - internal static readonly Action RethrowError = (e) => { throw e; }; -#else internal static readonly Action NoOpError = (e) => { }; internal static readonly Action RethrowError = (e) => { e.Throw(); }; -#endif #region Targets... @@ -53,11 +50,7 @@ public static class AsyncHelpers public static IAsyncTarget CreateTarget( Func onNextAsync, Action onCompleted = null, -#if NET_4_0 - Action onError = null -#else Action onError = null -#endif ) { return new AnonymousAsyncTarget(onNextAsync, onCompleted, onError); @@ -67,11 +60,7 @@ public static IAsyncTarget CreateTarget( public static IAsyncTarget CreateTarget( Action onNext, Action onCompleted = null, -#if NET_4_0 - Action onError = null -#else Action onError = null -#endif ) { return new AnonymousTarget(onNext, onCompleted, onError); @@ -82,26 +71,22 @@ public static Task Publish(this IAsyncTarget target, Maybe result, Canc { Contract.Requires(target != null); - if (ct.IsCancellationRequested) return TaskHelpers.FromCancellation(ct); + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); if (result.HasValue) - { + { // we have the next value return target.OnNextAsync(result.Value, ct); } - else if (result.HasFailed) - { -#if NET_4_0 - target.OnError(result.Error); -#else + + if (result.Failed) + { // we have failed target.OnError(result.CapturedError); -#endif - return TaskHelpers.CompletedTask; - } - else - { - target.OnCompleted(); - return TaskHelpers.CompletedTask; + return Task.CompletedTask; } + + // this is the end of the stream + target.OnCompleted(); + return Task.CompletedTask; } /// Wrapper class for use with async lambda callbacks @@ -112,20 +97,12 @@ internal sealed class AnonymousAsyncTarget : IAsyncTarget private readonly Action m_onCompleted; -#if NET_4_0 - private readonly Action m_onError; -#else private readonly Action m_onError; -#endif public AnonymousAsyncTarget( Func onNextAsync, Action onCompleted, -#if NET_4_0 - Action onError -#else Action onError -#endif ) { m_onNextAsync = onNextAsync; @@ -133,9 +110,9 @@ Action onError m_onError = onError; } - public Task OnNextAsync(T value, CancellationToken cancellationToken) + public Task OnNextAsync(T value, CancellationToken ct) { - return m_onNextAsync(value, cancellationToken); + return m_onNextAsync(value, ct); } public void OnCompleted() @@ -143,11 +120,7 @@ public void OnCompleted() m_onCompleted(); } -#if NET_4_0 - public void OnError(Exception error) -#else public void OnError(ExceptionDispatchInfo error) -#endif { m_onError(error); } @@ -161,51 +134,31 @@ internal sealed class AnonymousTarget : IAsyncTarget private readonly Action m_onCompleted; -#if NET_4_0 - private readonly Action m_onError; -#else private readonly Action m_onError; -#endif public AnonymousTarget( Action onNext, Action onCompleted, -#if NET_4_0 - Action onError -#else Action onError -#endif ) { - if (onNext == null) throw new ArgumentNullException("onNext"); + Contract.NotNull(onNext, nameof(onNext)); m_onNext = onNext; m_onCompleted = onCompleted; m_onError = onError; } - public Task OnNextAsync(T value, CancellationToken cancellationToken) + public Task OnNextAsync(T value, CancellationToken ct) { - return TaskHelpers.Inline(m_onNext, value, cancellationToken, cancellationToken); + return TaskHelpers.Inline(m_onNext, value, ct, ct); } public void OnCompleted() { - if (m_onCompleted != null) - { - m_onCompleted(); - } + m_onCompleted?.Invoke(); } -#if NET_4_0 - public void OnError(Exception error) - { - if (m_onError != null) - m_onError(error); - else - throw error; - } -#else public void OnError(ExceptionDispatchInfo error) { if (m_onError != null) @@ -213,35 +166,113 @@ public void OnError(ExceptionDispatchInfo error) else error.Throw(); } -#endif } #endregion #region Pumps... - public static async Task PumpToAsync(this IAsyncSource source, IAsyncTarget target, CancellationToken cancellationToken) + /// Consumes all the elements of the source, and publish them to the target, one by one and in order + /// Source that produces elements asynchronously + /// Target that consumes elements asynchronously + /// Cancellation token + /// Task that completes when all the elements of the source have been published to the target, or fails if on the first error, or the token is cancelled unexpectedly + /// The pump will only read one element at a time, and wait for it to be published to the target, before reading the next element. + public static async Task PumpToAsync(this IAsyncSource source, IAsyncTarget target, CancellationToken ct) { - if (cancellationToken.IsCancellationRequested) cancellationToken.ThrowIfCancellationRequested(); + ct.ThrowIfCancellationRequested(); + + bool notifiedCompletion = false; + bool notifiedError = false; - using (var pump = new AsyncPump(source, target)) + try { - await pump.PumpAsync(stopOnFirstError: true, cancellationToken: cancellationToken).ConfigureAwait(false); + //LogPump("Starting pump"); + + while (!ct.IsCancellationRequested) + { + //LogPump("Waiting for next"); + + var current = await source.ReceiveAsync(ct).ConfigureAwait(false); + + //LogPump("Received " + (current.HasValue ? "value" : current.Failed ? "error" : "completion") + ", publishing... " + current); + if (ct.IsCancellationRequested) + { + // REVIEW: should we notify the target? + // REVIEW: if the item is IDisposble, who will clean up? + break; + } + + // push the data/error/completion on to the target, which will triage and update its state accordingly + await target.Publish(current, ct).ConfigureAwait(false); + + if (current.Failed) + { // bounce the error back to the caller + //REVIEW: SHOULD WE? We poush the error to the target, and the SAME error to the caller... who should be responsible for handling it? + // => target should know about the error (to cancel something) + // => caller should maybe also know that the pump failed unexpectedly.... + notifiedError = true; + current.ThrowForNonSuccess(); // throws an exception right here + return; // should not be reached + } + else if (current.IsEmpty) + { // the source has completed, stop the pump + //LogPump("Completed"); + notifiedCompletion = true; + return; + } + } + + // notify cancellation if it happend while we were pumping + if (ct.IsCancellationRequested) + { + //LogPump("We were cancelled!"); + throw new OperationCanceledException(ct); + } + } + catch (Exception e) + { + //LogPump("Failed: " + e); + + if (!notifiedCompletion && !notifiedError) + { // notify the target that we crashed while fetching the next + try + { + //LogPump("Push error down to target: " + e.Message); + target.OnError(ExceptionDispatchInfo.Capture(e)); + notifiedError = true; + } + catch (Exception x) when (!x.IsFatalError()) + { + //LogPump("Failed to notify target of error: " + x.Message); + } + } + + throw; + } + finally + { + if (!notifiedCompletion) + { // we must be sure to complete the target if we haven't done so yet! + //LogPump("Notify target of completion due to unexpected conditions"); + target.OnCompleted(); + } + //LogPump("Stopped pump"); } } /// Pump the content of a source into a list - public static async Task> PumpToListAsync(this IAsyncSource source, CancellationToken cancellationToken) + public static async Task> PumpToListAsync(this IAsyncSource source, CancellationToken ct) { - if (cancellationToken.IsCancellationRequested) cancellationToken.ThrowIfCancellationRequested(); + if (ct.IsCancellationRequested) ct.ThrowIfCancellationRequested(); - var buffer = new FoundationDB.Linq.FdbAsyncEnumerable.Buffer(); + var buffer = new Buffer(); var target = CreateTarget( (x, _) => buffer.Add(x) ); - await PumpToAsync(source, target, cancellationToken).ConfigureAwait(false); + await PumpToAsync(source, target, ct).ConfigureAwait(false); return buffer.ToList(); } @@ -265,24 +296,24 @@ public static AsyncTaskBuffer CreateUnorderedAsyncBuffer(int capacity) #region Transforms... - public static AsyncTransform CreateAsyncTransform(Func> transform, IAsyncTarget> target, TaskScheduler scheduler = null) + public static AsyncTransform CreateAsyncTransform(Func> transform, IAsyncTarget> target, TaskScheduler scheduler = null) { - return new AsyncTransform(transform, target, scheduler); + return new AsyncTransform(transform, target, scheduler); } - public static async Task> TransformToListAsync(IAsyncSource source, Func> transform, CancellationToken cancellationToken, int? maxConcurrency = null, TaskScheduler scheduler = null) + public static async Task> TransformToListAsync(IAsyncSource source, Func> transform, CancellationToken ct, int? maxConcurrency = null, TaskScheduler scheduler = null) { - cancellationToken.ThrowIfCancellationRequested(); + ct.ThrowIfCancellationRequested(); - using (var queue = CreateOrderPreservingAsyncBuffer(maxConcurrency ?? 32)) + using (var queue = CreateOrderPreservingAsyncBuffer(maxConcurrency ?? 32)) { - using (var pipe = CreateAsyncTransform(transform, queue, scheduler)) + using (var pipe = CreateAsyncTransform(transform, queue, scheduler)) { // start the output pump - var output = PumpToListAsync(queue, cancellationToken); + var output = PumpToListAsync(queue, ct); // start the intput pump - var input = PumpToAsync(source, pipe, cancellationToken); + var input = PumpToAsync(source, pipe, ct); await Task.WhenAll(input, output).ConfigureAwait(false); diff --git a/FoundationDB.Client/Async/AsyncOrderingMode.cs b/FoundationDB.Client/Shared/Async/AsyncOrderingMode.cs similarity index 96% rename from FoundationDB.Client/Async/AsyncOrderingMode.cs rename to FoundationDB.Client/Shared/Async/AsyncOrderingMode.cs index 52c5ef327..402ae2b23 100644 --- a/FoundationDB.Client/Async/AsyncOrderingMode.cs +++ b/FoundationDB.Client/Shared/Async/AsyncOrderingMode.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,7 +26,7 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Async +namespace Doxense.Async { using System; diff --git a/FoundationDB.Client/Async/AsyncProducerConsumerQueue.cs b/FoundationDB.Client/Shared/Async/AsyncProducerConsumerQueue.cs similarity index 90% rename from FoundationDB.Client/Async/AsyncProducerConsumerQueue.cs rename to FoundationDB.Client/Shared/Async/AsyncProducerConsumerQueue.cs index 0ad6fb39e..cbcaeea3f 100644 --- a/FoundationDB.Client/Async/AsyncProducerConsumerQueue.cs +++ b/FoundationDB.Client/Shared/Async/AsyncProducerConsumerQueue.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,7 +28,7 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY //#define FULL_DEBUG -namespace FoundationDB.Async +namespace Doxense.Async { using System; using System.Diagnostics; @@ -60,20 +60,16 @@ public abstract class AsyncProducerConsumerQueue : IAsyncTarget, IDisposab protected AsyncProducerConsumerQueue(int capacity) { - if (capacity <= 0) throw new ArgumentOutOfRangeException("capacity", "Capacity must be greater than zero"); + if (capacity <= 0) throw new ArgumentOutOfRangeException(nameof(capacity), "Capacity must be greater than zero"); m_capacity = capacity; } - public abstract Task OnNextAsync(T value, CancellationToken cancellationToken); + public abstract Task OnNextAsync(T value, CancellationToken ct); public abstract void OnCompleted(); -#if NET_4_0 - public abstract void OnError(Exception error); -#else public abstract void OnError(ExceptionDispatchInfo error); -#endif /// Delcare the producer as beeing blocked on a full queue /// @@ -82,7 +78,7 @@ protected Task MarkProducerAsBlocked_NeedsLocking(CancellationToken ct) { if (ct.IsCancellationRequested) { - return TaskHelpers.FromCancellation(ct); + return Task.FromCanceled(ct); } if (m_producerLock.IsCompleted) { @@ -108,7 +104,7 @@ protected Task MarkConsumerAsBlocked_NeedsLocking(CancellationToken ct) { if (ct.IsCancellationRequested) { - return TaskHelpers.FromCancellation(ct); + return Task.FromCanceled(ct); } if (m_consumerLock.IsCompleted) { @@ -123,7 +119,11 @@ protected void WakeUpBlockedConsumer_NeedsLocking() { if (m_consumerLock.Set(async: true)) { - LogProducer("Woke up blocked consumer"); + LogProducer("Woke up blocked consumer"); + } + else + { + LogProducer("Consumer was already unblocked?"); } } @@ -145,13 +145,17 @@ public void Dispose() [Conditional("FULL_DEBUG")] protected void LogProducer(string msg, [CallerMemberName] string caller = null) { +#if FULL_DEBUG Console.WriteLine("@@@ [producer#{0}] {1} [{2}]", Thread.CurrentThread.ManagedThreadId, msg, caller); +#endif } [Conditional("FULL_DEBUG")] protected void LogConsumer(string msg, [CallerMemberName] string caller = null) { +#if FULL_DEBUG Console.WriteLine("@@@ [consumer#{0}] {1} [{2}]", Thread.CurrentThread.ManagedThreadId, msg, caller); +#endif } #endregion diff --git a/FoundationDB.Client/Async/AsyncTaskBuffer.cs b/FoundationDB.Client/Shared/Async/AsyncTaskBuffer.cs similarity index 81% rename from FoundationDB.Client/Async/AsyncTaskBuffer.cs rename to FoundationDB.Client/Shared/Async/AsyncTaskBuffer.cs index 7750b8ed8..5f86164bf 100644 --- a/FoundationDB.Client/Async/AsyncTaskBuffer.cs +++ b/FoundationDB.Client/Shared/Async/AsyncTaskBuffer.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,15 +28,15 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY //#define FULL_DEBUG -namespace FoundationDB.Async +namespace Doxense.Async { - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.Runtime.ExceptionServices; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; /// Buffer that holds a fixed number of Tasks, output them in arrival or completion order, and can rate-limit the producer /// @@ -62,25 +62,25 @@ public class AsyncTaskBuffer : AsyncProducerConsumerQueue>, IAsyncSou public AsyncTaskBuffer(AsyncOrderingMode mode, int capacity) : base(capacity) { - if (mode != AsyncOrderingMode.ArrivalOrder && mode != AsyncOrderingMode.CompletionOrder) throw new ArgumentOutOfRangeException("mode", "Unsupported ordering mode"); + if (mode != AsyncOrderingMode.ArrivalOrder && mode != AsyncOrderingMode.CompletionOrder) throw new ArgumentOutOfRangeException(nameof(mode), "Unsupported ordering mode"); m_mode = mode; } #endregion - #region IFdbAsyncTarget... + #region IAsyncTarget... - public override Task OnNextAsync(Task task, CancellationToken cancellationToken) + public override Task OnNextAsync(Task task, CancellationToken ct) { - if (cancellationToken.IsCancellationRequested) return TaskHelpers.FromCancellation(cancellationToken); + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); LogProducer("Received task #" + task.Id + " (" + task.Status + ")"); Task wait; lock (m_lock) { - if (m_done) return TaskHelpers.FromException(new InvalidOperationException("Cannot send any more values because this buffer has already completed")); + if (m_done) return Task.FromException(new InvalidOperationException("Cannot send any more values because this buffer has already completed")); if (m_queue.Count < m_capacity) { // quick path @@ -100,15 +100,15 @@ public override Task OnNextAsync(Task task, CancellationToken cancellationTok } } - return TaskHelpers.CompletedTask; + return Task.CompletedTask; } // we are blocked, we will need to wait ! - wait = MarkProducerAsBlocked_NeedsLocking(cancellationToken); + wait = MarkProducerAsBlocked_NeedsLocking(ct); } // slow path - return WaitForNextFreeSlotThenEnqueueAsync(task, wait, cancellationToken); + return WaitForNextFreeSlotThenEnqueueAsync(task, wait, ct); } private void NotifyConsumerOfTaskCompletion_NeedsLocking() @@ -124,7 +124,7 @@ private void NotifyConsumerOfTaskCompletion_NeedsLocking() /// Observe the completion of a task to wake up the consumer private void ObserveTaskCompletion([NotNull] Task task) { - var _ = task.ContinueWith( + task.ContinueWith( (t, state) => { LogProducer("Task #" + t.Id + " " + t.Status); @@ -154,21 +154,6 @@ public override void OnCompleted() } } -#if NET_4_0 - public override void OnError(Exception error) - { - lock (m_lock) - { - if (!m_done) - { - LogProducer("Error received: " + error.Message); - m_queue.AddLast(new LinkedListNode>(TaskHelpers.FromException(error))); - WakeUpBlockedConsumer_NeedsLocking(); - if (m_mode == AsyncOrderingMode.CompletionOrder) NotifyConsumerOfTaskCompletion_NeedsLocking(); - } - } - } -#else public override void OnError(ExceptionDispatchInfo error) { lock (m_lock) @@ -176,13 +161,12 @@ public override void OnError(ExceptionDispatchInfo error) if (!m_done) { LogProducer("Error received: " + error.SourceException.Message); - m_queue.AddLast(new LinkedListNode>(TaskHelpers.FromException(error.SourceException))); + m_queue.AddLast(new LinkedListNode>(Task.FromException(error.SourceException))); WakeUpBlockedConsumer_NeedsLocking(); if (m_mode == AsyncOrderingMode.CompletionOrder) NotifyConsumerOfTaskCompletion_NeedsLocking(); } } } -#endif private void Enqueue_NeedsLocking(Task task) { @@ -196,22 +180,29 @@ private async Task WaitForNextFreeSlotThenEnqueueAsync(Task task, [NotNull] T await wait.ConfigureAwait(false); - LogProducer("Wake up because one slot got freed"); + LogProducer("Woke up because one slot got freed"); lock (m_lock) { Contract.Assert(m_queue.Count < m_capacity); Enqueue_NeedsLocking(task); + + if (m_mode == AsyncOrderingMode.CompletionOrder) + { // we need to observe task completion to wake up the consumer as soon as one is ready ! + LogConsumer("Task still pending after wait, and must be observed"); + ObserveTaskCompletion(task); + } + } } #endregion - #region IFdbAsyncSource... + #region IAsyncSource... public Task> ReceiveAsync(CancellationToken ct) { - if (ct.IsCancellationRequested) return TaskHelpers.FromCancellation>(ct); + if (ct.IsCancellationRequested) return Task.FromCanceled>(ct); LogConsumer("Looking for next value..."); @@ -228,7 +219,8 @@ public Task> ReceiveAsync(CancellationToken ct) throw new InvalidOperationException("Last item has already been received"); } - var current = m_queue.First; + var queue = m_queue; + var current = queue.First; if (current != null) { if (m_mode == AsyncOrderingMode.ArrivalOrder) @@ -236,7 +228,7 @@ public Task> ReceiveAsync(CancellationToken ct) if (current.Value == null || current.Value.IsCompleted) { // it's ready - m_queue.RemoveFirst(); + queue.RemoveFirst(); LogConsumer("First task #" + current.Value.Id + " was already " + current.Value.Status); return CompleteTask(current.Value); } @@ -247,21 +239,22 @@ public Task> ReceiveAsync(CancellationToken ct) else { // note: if one is already completed, it will be return immediately ! - while(current != null) + while (current != null) { - if (current.Value != null && current.Value.IsCompleted) + var t = current.Value; + if (t != null && t.IsCompleted) { - m_queue.Remove(current); - LogConsumer("Found task #" + current.Value.Id + " that was already " + current.Value.Status); - return CompleteTask(current.Value); + queue.Remove(current); + LogConsumer("Found task #" + t.Id + " that was already " + t.Status); + return CompleteTask(t); } current = current.Next; } // in case of completion, it would be the last - if (m_queue.First == m_queue.Last && m_queue.First.Value == null) + if (queue.First == queue.Last && queue.First.Value == null) { // last one - m_queue.Clear(); + queue.Clear(); m_receivedLast = true; LogConsumer("Received completion notification"); return CompleteTask(null); @@ -314,11 +307,7 @@ private async Task> WaitForTaskToCompleteAsync([NotNull] Task task, catch(Exception e) { LogConsumer("Notified that task #" + task + " failed"); -#if NET_4_0 - return Maybe.Error(e); -#else return Maybe.Error(ExceptionDispatchInfo.Capture(e)); -#endif } } @@ -336,13 +325,15 @@ protected Task MarkConsumerAsAwaitingCompletion_NeedsLocking(CancellationToken c { Contract.Requires(m_mode == AsyncOrderingMode.CompletionOrder); - if (m_completionLock.IsCompleted) + var cl = m_completionLock; + if (cl.IsCompleted) { LogConsumer("Creating new task completion lock"); - m_completionLock = new AsyncCancelableMutex(ct); + cl = new AsyncCancelableMutex(ct); + m_completionLock = cl; } LogConsumer("marked as waiting for task completion"); - return m_completionLock.Task; + return cl.Task; } #endregion @@ -353,6 +344,7 @@ protected override void Dispose(bool disposing) { if (disposing) { + LogConsumer("Disposing consumer!"); lock (m_lock) { m_done = true; @@ -360,6 +352,7 @@ protected override void Dispose(bool disposing) m_consumerLock.Abort(); m_completionLock.Abort(); m_queue.Clear(); + LogConsumer("Consumer has been disposed"); } } } diff --git a/FoundationDB.Client/Async/AsyncTransform.cs b/FoundationDB.Client/Shared/Async/AsyncTransform.cs similarity index 68% rename from FoundationDB.Client/Async/AsyncTransform.cs rename to FoundationDB.Client/Shared/Async/AsyncTransform.cs index db4eca663..36fa36241 100644 --- a/FoundationDB.Client/Async/AsyncTransform.cs +++ b/FoundationDB.Client/Shared/Async/AsyncTransform.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,28 +26,27 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Async +namespace Doxense.Async { using JetBrains.Annotations; using System; using System.Runtime.ExceptionServices; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; /// Pump that takes items from a source, transform them, and outputs them - /// - /// - public sealed class AsyncTransform : IAsyncTarget, IDisposable + public sealed class AsyncTransform : IAsyncTarget, IDisposable { - private readonly IAsyncTarget> m_target; - private readonly Func> m_transform; + private readonly IAsyncTarget> m_target; + private readonly Func> m_transform; private readonly TaskScheduler m_scheduler; private bool m_done; - public AsyncTransform([NotNull] Func> transform, [NotNull] IAsyncTarget> target, TaskScheduler scheduler = null) + public AsyncTransform([NotNull] Func> transform, [NotNull] IAsyncTarget> target, TaskScheduler scheduler = null) { - if (transform == null) throw new ArgumentNullException("transform"); - if (target == null) throw new ArgumentNullException("target"); + Contract.NotNull(transform, nameof(transform)); + Contract.NotNull(target, nameof(target)); m_transform = transform; m_target = target; @@ -55,16 +54,16 @@ public AsyncTransform([NotNull] Func> transform, [ } /// Target of the transform - public IAsyncTarget> Target { get { return m_target; } } + public IAsyncTarget> Target { get { return m_target; } } /// Optional scheduler used to run the tasks public TaskScheduler Scheduler { get { return m_scheduler; } } #region IAsyncTarget... - public Task OnNextAsync(T value, CancellationToken cancellationToken) + public Task OnNextAsync(TInput value, CancellationToken ct) { - if (cancellationToken.IsCancellationRequested) return TaskHelpers.CompletedTask; + if (ct.IsCancellationRequested) return Task.CompletedTask; if (m_done) throw new InvalidOperationException("Cannot send any more values because this transform has already completed"); @@ -73,57 +72,40 @@ public Task OnNextAsync(T value, CancellationToken cancellationToken) // we start the task here, but do NOT wait for its completion! // It is the job of the target to handle that (and ordering) - Task task; + Task task; if (m_scheduler == null) { // execute inline - task = m_transform(value, cancellationToken); + task = m_transform(value, ct); } else { // execute in a scheduler task = Task.Factory.StartNew( (state) => { - var prms = (Tuple, T, CancellationToken>)state; + var prms = (Tuple, TInput, CancellationToken>)state; return prms.Item1.m_transform(prms.Item2, prms.Item3); }, - Tuple.Create(this, value, cancellationToken), - cancellationToken, + Tuple.Create(this, value, ct), + ct, TaskCreationOptions.PreferFairness, m_scheduler ).Unwrap(); } - return m_target.OnNextAsync(task, cancellationToken); + return m_target.OnNextAsync(task, ct); } catch(Exception e) { -#if NET_4_0 - m_target.OnError(e); -#else m_target.OnError(ExceptionDispatchInfo.Capture(e)); -#endif - return TaskHelpers.FromException(e); + return Task.FromException(e); } } public void OnCompleted() { - if (!m_done) - { - m_done = true; - m_target.OnCompleted(); - } + Dispose(); } -#if NET_4_0 - public void OnError(Exception e) - { - if (!m_done) - { - m_target.OnError(e); - } - } -#else public void OnError(ExceptionDispatchInfo e) { if (!m_done) @@ -131,7 +113,6 @@ public void OnError(ExceptionDispatchInfo e) m_target.OnError(e); } } -#endif #endregion diff --git a/FoundationDB.Client/Async/AsyncTransformQueue.cs b/FoundationDB.Client/Shared/Async/AsyncTransformQueue.cs similarity index 85% rename from FoundationDB.Client/Async/AsyncTransformQueue.cs rename to FoundationDB.Client/Shared/Async/AsyncTransformQueue.cs index 0784d4363..de67cadbd 100644 --- a/FoundationDB.Client/Async/AsyncTransformQueue.cs +++ b/FoundationDB.Client/Shared/Async/AsyncTransformQueue.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,21 +26,21 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Async +namespace Doxense.Async { - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.Diagnostics; using System.Runtime.ExceptionServices; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; /// Implements an async queue that asynchronously transform items, outputing them in arrival order, while throttling the producer /// Type of the input elements (from the inner async iterator) /// Type of the output elements (produced by an async lambda) - internal class AsyncTransformQueue : IAsyncBuffer + public class AsyncTransformQueue : IAsyncBuffer { private readonly Func> m_transform; private readonly Queue>> m_queue = new Queue>>(); @@ -53,24 +53,22 @@ internal class AsyncTransformQueue : IAsyncBuffer> transform, int capacity, TaskScheduler scheduler) { - if (transform == null) throw new ArgumentNullException("transform"); - if (capacity <= 0) throw new ArgumentOutOfRangeException("capacity", "Capacity must be greater than zero"); + Contract.NotNull(transform, nameof(transform)); + if (capacity <= 0) throw new ArgumentOutOfRangeException(nameof(capacity), "Capacity must be greater than zero"); m_transform = transform; m_capacity = capacity; m_scheduler = scheduler ?? TaskScheduler.Default; } - #region IFdbAsyncBuffer... + #region IAsyncBuffer... /// Returns the current number of items in the queue public int Count { get { -#if !CORE_CLR Debugger.NotifyOfCrossThreadDependency(); -#endif lock (m_lock) { return m_queue.Count; @@ -79,22 +77,17 @@ public int Count } /// Returns the maximum capacity of the queue - public int Capacity - { - get { return m_capacity; } - } + public int Capacity => m_capacity; /// Returns true if the producer is blocked (queue is full) public bool IsConsumerBlocked { get { -#if !CORE_CLR Debugger.NotifyOfCrossThreadDependency(); -#endif lock (m_lock) { - return m_blockedConsumer != null && m_blockedConsumer.Task.IsCompleted; + return m_blockedConsumer?.Task.IsCompleted == true; } } } @@ -104,12 +97,10 @@ public bool IsProducerBlocked { get { -#if !CORE_CLR Debugger.NotifyOfCrossThreadDependency(); -#endif lock (m_lock) { - return m_blockedProducer != null && m_blockedProducer.Task.IsCompleted; + return m_blockedProducer?.Task.IsCompleted == true; } } } @@ -121,7 +112,7 @@ public Task DrainAsync() #endregion - #region IFdbAsyncTarget... + #region IAsyncTarget... private static async Task> ProcessItemHandler(object state) { @@ -137,19 +128,15 @@ private static async Task> ProcessItemHandler(object state) } catch (Exception e) { -#if NET_4_0 - return Maybe.Error(e); -#else return Maybe.Error(ExceptionDispatchInfo.Capture(e)); -#endif } } private static readonly Func>> s_processItemHandler = ProcessItemHandler; - public async Task OnNextAsync(TInput value, CancellationToken cancellationToken) + public async Task OnNextAsync(TInput value, CancellationToken ct) { - while (!cancellationToken.IsCancellationRequested) + while (!ct.IsCancellationRequested) { AsyncCancelableMutex waiter; lock (m_lock) @@ -160,8 +147,8 @@ public async Task OnNextAsync(TInput value, CancellationToken cancellationToken) { var t = Task.Factory.StartNew( s_processItemHandler, - Tuple.Create(this, value, cancellationToken), - cancellationToken, + Tuple.Create(this, value, ct), + ct, TaskCreationOptions.PreferFairness, m_scheduler ).Unwrap(); @@ -184,14 +171,14 @@ public async Task OnNextAsync(TInput value, CancellationToken cancellationToken) } // no luck, we need to wait for the queue to become non-full - waiter = new AsyncCancelableMutex(cancellationToken); + waiter = new AsyncCancelableMutex(ct); m_blockedProducer = waiter; } await waiter.Task.ConfigureAwait(false); } - cancellationToken.ThrowIfCancellationRequested(); + ct.ThrowIfCancellationRequested(); } public void OnCompleted() @@ -208,13 +195,7 @@ public void OnCompleted() } } - public void OnError( -#if NET_4_0 - Exception error -#else - ExceptionDispatchInfo error -#endif - ) + public void OnError(ExceptionDispatchInfo error) { lock(m_lock) { @@ -230,26 +211,26 @@ ExceptionDispatchInfo error #endregion - #region IFdbAsyncBatchTarget... + #region IAsyncBatchTarget... - public async Task OnNextBatchAsync([NotNull] TInput[] batch, CancellationToken cancellationToken) + public async Task OnNextBatchAsync([NotNull] TInput[] batch, CancellationToken ct) { - if (batch == null) throw new ArgumentNullException("batch"); + Contract.NotNull(batch, nameof(batch)); if (batch.Length == 0) return; - if (cancellationToken.IsCancellationRequested) cancellationToken.ThrowIfCancellationRequested(); + if (ct.IsCancellationRequested) ct.ThrowIfCancellationRequested(); //TODO: optimized version ! foreach (var item in batch) { - await OnNextAsync(item, cancellationToken).ConfigureAwait(false); + await OnNextAsync(item, ct).ConfigureAwait(false); } } #endregion - #region IFdbAsyncSource... + #region IAsyncSource... public Task> ReceiveAsync(CancellationToken ct) { @@ -308,11 +289,7 @@ private async Task> ReceiveWhenDoneAsync(Task> tas } catch(Exception e) { -#if NET_4_0 - return Maybe.Error(e); -#else return Maybe.Error(ExceptionDispatchInfo.Capture(e)); -#endif } finally { @@ -366,7 +343,7 @@ private async Task> ReceiveSlowAsync(Task waiter, CancellationTok #endregion - #region IFdbAsyncBatchSource... + #region IAsyncBatchSource... public Task[]> ReceiveBatchAsync(int count, CancellationToken ct) { @@ -450,7 +427,7 @@ private bool DrainItems_NeedsLocking([NotNull] List> buffer, int private Task WaitForNextItem_NeedsLocking(CancellationToken ct) { - if (m_done) return TaskHelpers.CompletedTask; + if (m_done) return Task.CompletedTask; Contract.Requires(m_blockedConsumer == null || m_blockedConsumer.Task.IsCompleted); @@ -462,21 +439,15 @@ private Task WaitForNextItem_NeedsLocking(CancellationToken ct) private void WakeUpProducer_NeedsLocking() { var waiter = Interlocked.Exchange(ref m_blockedProducer, null); - if (waiter != null) - { - waiter.Set(async: true); - } + waiter?.Set(async: true); } private void WakeUpConsumer_NeedLocking() { var waiter = Interlocked.Exchange(ref m_blockedConsumer, null); - if (waiter != null) - { - waiter.Set(async: true); - } + waiter?.Set(async: true); } - + } } diff --git a/FoundationDB.Client/Async/IAsyncBuffer.cs b/FoundationDB.Client/Shared/Async/IAsyncBuffer.cs similarity index 93% rename from FoundationDB.Client/Async/IAsyncBuffer.cs rename to FoundationDB.Client/Shared/Async/IAsyncBuffer.cs index 4aa5e345f..ea9b89361 100644 --- a/FoundationDB.Client/Async/IAsyncBuffer.cs +++ b/FoundationDB.Client/Shared/Async/IAsyncBuffer.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,7 +26,7 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Async +namespace Doxense.Async { using System; using System.Threading.Tasks; @@ -34,7 +34,7 @@ namespace FoundationDB.Async /// Defines a producer/consumer buffer queue that can hold several items before blocking the producer /// Type of elements entering the buffer /// Type of elements exiting the buffer. Can be different from if the buffer also transforms the elements. - interface IAsyncBuffer : IAsyncTarget, IAsyncSource + public interface IAsyncBuffer : IAsyncTarget, IAsyncSource { /// Returns the current number of items in the buffer int Count { get; } diff --git a/FoundationDB.Client/Async/IAsyncSource.cs b/FoundationDB.Client/Shared/Async/IAsyncSource.cs similarity index 90% rename from FoundationDB.Client/Async/IAsyncSource.cs rename to FoundationDB.Client/Shared/Async/IAsyncSource.cs index 514b03273..d9ffc7211 100644 --- a/FoundationDB.Client/Async/IAsyncSource.cs +++ b/FoundationDB.Client/Shared/Async/IAsyncSource.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,7 +26,7 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Async +namespace Doxense.Async { using System; using System.Threading; @@ -39,9 +39,9 @@ public interface IAsyncSource //note: T cannot be covariant because Task<..> is not covariant :( /// Consume a new value from the source - /// Token used to cancel the operation + /// Token used to cancel the operation /// Task that will return a new value, nothing (if it has completed) or on exception - Task> ReceiveAsync(CancellationToken cancellationToken); + Task> ReceiveAsync(CancellationToken ct); } } diff --git a/FoundationDB.Client/Async/IAsyncTarget.cs b/FoundationDB.Client/Shared/Async/IAsyncTarget.cs similarity index 89% rename from FoundationDB.Client/Async/IAsyncTarget.cs rename to FoundationDB.Client/Shared/Async/IAsyncTarget.cs index 9142ac5c0..b185ff360 100644 --- a/FoundationDB.Client/Async/IAsyncTarget.cs +++ b/FoundationDB.Client/Shared/Async/IAsyncTarget.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,7 +26,7 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Async +namespace Doxense.Async { using System; using System.Runtime.ExceptionServices; @@ -42,20 +42,16 @@ public interface IAsyncTarget /// Push a new item onto the target, if it can accept one /// New value that is being published - /// Cancellation token that is used to abort the call if the target is blocked + /// Cancellation token that is used to abort the call if the target is blocked /// Task that completes once the target has accepted the new value (or fails if the cancellation token fires) - Task OnNextAsync(T value, CancellationToken cancellationToken); + Task OnNextAsync(T value, CancellationToken ct); /// Notifies the target that the producer is done and that no more values will be published void OnCompleted(); /// Notifies the target that tere was an exception, and that no more values will be published /// The error that occurred -#if NET_4_0 - void OnError(Exception error); -#else void OnError(ExceptionDispatchInfo error); -#endif } } diff --git a/FoundationDB.Client/Shared/Async/Maybe.cs b/FoundationDB.Client/Shared/Async/Maybe.cs new file mode 100644 index 000000000..7702cd3ec --- /dev/null +++ b/FoundationDB.Client/Shared/Async/Maybe.cs @@ -0,0 +1,756 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense +{ + using System; + using System.Collections.Generic; + using System.Runtime; + using System.Runtime.CompilerServices; + using System.Runtime.ExceptionServices; + using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; + + public readonly struct Maybe : IEquatable>, IEquatable, IComparable>, IComparable, IFormattable + { + /// Réprésente un résultat vide (no computation) + public static readonly Maybe Nothing = new Maybe(); + + /// Représente un résultat correspondant à la valeur par défaut du type (0, false, null) + public static readonly Maybe Default = new Maybe(default(T)); + + /// Cached completed Task that always return an empty value + public static readonly Task> EmptyTask = Task.FromResult(default(Maybe)); + + #region Private Fields... + + // ================================================================================== + // m_hasValue | m_value | m_error | description + // ================================================================================== + // True | Resultat | null | Le calcul a produit un résultat (qui peut etre le défaut du type, mais qui n'est pas "vide") + // False | - | null | Le calcul n'a pas produit de résultat + // False | - | Exception | Le calcul a provoqué une exception + + /// If true, there is a value. If false, either no value or an exception + private readonly T m_value; + + /// If HasValue is true, holds the value. Else, contains default(T) + private readonly bool m_hasValue; + + /// If HasValue is false optinally holds an error that was captured + private readonly object m_errorContainer; // either an Exception, or an ExceptionDispatchInfo + + #endregion + + public Maybe(T value) + { + m_hasValue = true; + m_value = value; + m_errorContainer = null; + } + + internal Maybe(bool hasValue, T value, object errorContainer) + { + Contract.Requires(errorContainer == null || (errorContainer is Exception) || (errorContainer is ExceptionDispatchInfo)); + + m_hasValue = hasValue; + m_value = value; + m_errorContainer = errorContainer; + } + + /// There is a value + /// !(IsEmpty || HasFailed) + public bool HasValue + { + [TargetedPatchingOptOut("Performance critical to inline this type of method across NGen image boundaries")] + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + get => m_hasValue; + } + + /// Returns the value if the computation succeeded + /// If the value is empty + /// If the value has failed to compute + public T Value + { + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + get => m_hasValue ? m_value : ThrowInvalidState(); + } + + /// Returns the value if the computation succeeded, or default() in all other cases + [TargetedPatchingOptOut("Performance critical to inline this type of method across NGen image boundaries")] + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public T GetValueOrDefault() + { + return m_value; + } + + public Exception Error + { + [TargetedPatchingOptOut("Performance critical to inline this type of method across NGen image boundaries")] + [Pure] + get + { + return m_errorContainer is ExceptionDispatchInfo edi + ? edi.SourceException + : m_errorContainer as Exception; + } + } + + /// Return the captured error context, or null if there wasn't any + public ExceptionDispatchInfo CapturedError => m_errorContainer is Exception exception ? ExceptionDispatchInfo.Capture(exception) : m_errorContainer as ExceptionDispatchInfo; + + /// The value failed to compute + /// !(HasValue || IsEmpty) + public bool Failed + { + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return m_errorContainer != null; } + } + + /// Rethrows any captured error, if there was one. + public void ThrowForNonSuccess() + { + if (m_errorContainer != null) + { + if (!(m_errorContainer is Exception exception)) + { + ((ExceptionDispatchInfo) m_errorContainer).Throw(); + return; // never reached, but helps with code analysis + } + throw exception; + } + } + + internal object ErrorContainer + { + [Pure] + get => m_errorContainer; + } + + /// No value was returned + /// !(HasValue || Failed) + public bool IsEmpty + { + [Pure] + get => !m_hasValue && m_errorContainer == null; + } + + [ContractAnnotation("=> halt"), MethodImpl(MethodImplOptions.NoInlining)] + private T ThrowInvalidState() + { + if (m_errorContainer != null) throw new AggregateException("A computation has triggered an exception.", this.Error); + if (!m_hasValue) throw new InvalidOperationException("This computation has no value."); + throw new InvalidOperationException("This computation already has a value."); + } + + [Pure, NotNull] + public static Func, Maybe> Return([NotNull] Func computation) + { + return Bind(x => new Maybe(computation(x))); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Maybe Failure(Exception error) + { + return new Maybe(false, default(T), error); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Maybe Failure(ExceptionDispatchInfo error) + { + return new Maybe(false, default(T), error); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static explicit operator T(Maybe m) + { + return m.Value; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator Maybe(T value) + { + return new Maybe(value); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator Maybe(Exception error) + { + return Failure(error); + } + + public bool Equals(Maybe other) + { + if (m_hasValue) return other.m_hasValue && EqualityComparer.Default.Equals(m_value, other.m_value); + if (m_errorContainer != null) return !m_hasValue && m_errorContainer.Equals(other.m_errorContainer); + return !other.m_hasValue & other.m_errorContainer == null; + } + public bool Equals(T other) + { + return m_hasValue && EqualityComparer.Default.Equals(m_value, other); + } + + public override bool Equals(object obj) + { + if (obj == null) return !m_hasValue; + if (obj is T value) return Equals(value); + if (obj is Maybe maybe) return Equals(maybe); + if (obj is Exception err) return !m_hasValue && err.Equals(m_errorContainer); + return false; + } + + public override int GetHashCode() + { + return m_hasValue ? EqualityComparer.Default.GetHashCode(m_value) : m_errorContainer?.GetHashCode() ?? -1; + } + + public int CompareTo(Maybe other) + { + // in order: "nothing", then values, then errors + + if (m_hasValue) + { // Some + if (other.m_hasValue) return Comparer.Default.Compare(m_value, other.m_value); + if (other.m_errorContainer != null) return -1; // values come before errors + return +1; // values come after nothing + } + + if (m_errorContainer != null) + { // Error + if (other.m_hasValue | other.m_errorContainer == null) return +1; // errors come after everything except errors + //note: this is tricky, because we cannot realy sort Exceptions, so this sort may not be stable :( + // => the "only" way would be to compare their hash codes! + return ReferenceEquals(m_errorContainer, other.m_errorContainer) ? 0 : m_errorContainer.GetHashCode().CompareTo(other.m_errorContainer.GetHashCode()); + } + + // Nothing comes before everything except nothing + return other.m_hasValue | other.m_errorContainer != null ? -1 : 0; + } + + public int CompareTo(T other) + { + // in order: "nothing", then values, then errors + if (!m_hasValue) + { + return m_errorContainer != null ? +1 : -1; + } + return Comparer.Default.Compare(m_value, other); + } + + public string ToString(string format, IFormatProvider formatProvider) + { + if (this.Failed) return ""; + if (!this.HasValue) return ""; + if (this.Value == null) return ""; //REVIEW: => "" ? + if (this.Value is IFormattable fmt) return fmt.ToString(format, formatProvider); + return this.Value.ToString(); + } + + public override string ToString() + { + return ToString(null, null); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static bool operator ==(Maybe left, T right) + { + return left.Equals(right); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static bool operator !=(Maybe left, T right) + { + return !left.Equals(right); + } + + public static bool operator >(Maybe left, T right) + { + return left.CompareTo(right) > 0; + } + + public static bool operator >=(Maybe left, T right) + { + return left.CompareTo(right) >= 0; + } + + public static bool operator <(Maybe left, T right) + { + return left.CompareTo(right) < 0; + } + + public static bool operator <=(Maybe left, T right) + { + return left.CompareTo(right) <= 0; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static bool operator ==(Maybe left, Maybe right) + { + return left.Equals(right); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static bool operator !=(Maybe left, Maybe right) + { + return !left.Equals(right); + } + + public static bool operator >(Maybe left, Maybe right) + { + return left.CompareTo(right) > 0; + } + + public static bool operator >=(Maybe left, Maybe right) + { + return left.CompareTo(right) >= 0; + } + + public static bool operator <(Maybe left, Maybe right) + { + return left.CompareTo(right) < 0; + } + + public static bool operator <=(Maybe left, Maybe right) + { + return left.CompareTo(right) <= 0; + } + + #region Function Binding... + + public static Func, Maybe> Bind(Func> computation) + { + return (x) => + { + if (x.m_errorContainer != null) return new Maybe(false, default(TResult), x.m_errorContainer); + if (!x.m_hasValue) return Maybe.Nothing; + + try + { + return computation(x.m_value); + } + catch (Exception e) + { + return Maybe.Failure(e); + } + }; + } + + public static Func, Maybe, Maybe> Bind(Func> computation) + { + return (x, y) => + { + if (x.m_errorContainer != null || y.m_errorContainer != null) return Maybe.Error(default(TResult), x.Error, y.Error); + if (x.m_hasValue && y.m_hasValue) + { + try + { + + return computation(x.m_value, y.m_value); + } + catch (Exception e) + { + return Maybe.Failure(e); + } + } + return Maybe.Nothing; + }; + } + + #endregion + + } + + /// Helper class to deal with Maybe<T> monads + public static class Maybe + { + + /// Crée un Maybe<T> représentant une valeur connue + /// Type de la valeur + /// Valeur à convertir + /// Maybe<T> contenant la valeur + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Maybe Return(T value) + { + // ENTER THE MONAD ! + return new Maybe(value); + } + + /// Retourne un Maybe<T> correspondant à cette valeur + /// Type de la valeur + /// Valeur à convertir + /// Maybe<T> contenant cette valeur + /// Note: si T est un ReferenceType et que value est null, le Maybe retourné n'est pas vide (il a une valeur, qui est null). Il faut utiliser .IfNotNull() pour protéger contre les nullref + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Maybe ToMaybe(this T value) + { + // ENTER THE MONAD ! + return new Maybe(value); + } + + /// Retourne un Maybe<T> correspondant à cette valeur + /// Type de la valeur + /// Valeur à convertir (ou null) + /// Maybe<T> contenant cette valeur + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Maybe ToMaybe(this T? value) + where T : struct + { + return value.HasValue ? new Maybe(value.Value) : Maybe.Nothing; + } + + /// Convertit les référence null en Maybe.Nothing + /// Reference Type + /// Instance à protéger (peut être null) + /// Maybe.Nothing si l'instance est null, sinon un Maybe encapsulant cette instance + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Maybe IfNotNull(this T value) + where T : class + { + // ENTER THE MONAD + return value == null ? Maybe.Nothing : new Maybe(value); + } + + /// Helper pour créer un Maybe<T>.Nothing + /// Type de la valeur + /// Maybe vide + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Maybe Nothing() + { + // ENTER THE MONAD ! + return default(Maybe); + } + + /// Helper pour créer un Maybe<T>.Nothing en utilisant le compilateur pour inférer le type de la valeur + /// Type de la valeur + /// Paramètre dont la valeur est ignorée, et qui sert juste à aider le compilateur à inférer le type + /// Maybe vide + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Maybe Nothing(T _) + { + // ENTER THE MONAD ! + return default(Maybe); + } + + /// Helper pour créer un Maybe<T> représentant une Exception + /// Type de la valeur + /// Exception à enrober + /// Maybe encapsulant l'erreur + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Maybe Error(Exception error) + { + // ENTER THE MONAD ! + return Maybe.Failure(error); + } + + /// Helper pour créer un Maybe<T> représentant une Exception + /// Type de la valeur + /// Exception à enrober + /// Maybe encapsulant l'erreur + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Maybe Error(ExceptionDispatchInfo error) + { + // ENTER THE MONAD ! + return Maybe.Failure(error); + } + + /// Helper pour créer un Maybe<T> représentant une Exception, en utilisant le compilateur pour inférer le type de la valeur + /// Type de la valeur + /// Paramètre dont la valeur est ignorée, et qui sert juste à aider le compilateur à inférer le type + /// Exception à enrober + /// Maybe encapsulant l'erreur + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Maybe Error(T _, Exception error) + { + // ENTER THE MONAD ! + return Maybe.Failure(error); + } + + /// Helper pour créer un Maybe<T> représentant une Exception, en utilisant le compilateur pour inférer le type de la valeur + /// Type de la valeur + /// Paramètre dont la valeur est ignorée, et qui sert juste à aider le compilateur à inférer le type + /// Exception à enrober + /// Maybe encapsulant l'erreur + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Maybe Error(T _, ExceptionDispatchInfo error) + { + // ENTER THE MONAD ! + return Maybe.Failure(error); + } + + /// Helper pour combiner des erreurs, en utilisant le compilateur pour inférer le type de la valeur + /// + /// Paramètre dont la valeur est ignorée, et qui sert juste à aider le compilateur à inférer le type + /// Première exception (peut être null) + /// Deuxième exception (peut être null) + /// Maybe encapsulant la ou les erreur. Si les deux erreurs sont présentes, elles sont combinées dans une AggregateException + [Pure] + public static Maybe Error(T _, Exception error0, Exception error1) + { + // Il faut au moins une des deux ! + Contract.Assert(error0 != null || error1 != null); + + if (error1 == null) + { + return Maybe.Failure(error0); + } + if (error0 == null) + { + return Maybe.Failure(error1); + } + return Maybe.Failure(new AggregateException(error0, error1)); + } + + /// Convertit un Maybe<T&;t en T? (lorsque T est un ValueType) + /// ValueType + /// Maybe à convertir + /// Version nullable du maybe, qui vaut default(T?) si le Maybe est Nothing, ou la valeur elle même s'il contient un résultat. + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static T? ToNullable(this Maybe m) + where T : struct + { + // EXIT THE MONAD + //TODO: propager l'exception ? + return m.HasValue ? m.Value : default(T?); + } + + /// Convertit un T? en Maybe<T&;t (lorsque T est un ValueType) + /// ValueType + /// Nullable à convertir + /// Version maybe du nullable, qui vaut Nothing si le nullable est default(T?), ou la valeur elle même s'il contient un résultat. + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Maybe FromNullable(T? value) + where T : struct + { + return value.HasValue ? new Maybe(value.Value) : default(Maybe); + } + + /// Retourne le résultat d'un Maybe, ou une valeur par défaut s'il est vide. + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static T OrDefault(this Maybe m, T @default = default(T)) + { + // EXIT THE MONAD + return m.HasValue ? m.Value : @default; + } + + /// Immediately apply a function to a value, and capture the result into a + [Pure] + public static Maybe Apply(T value, [NotNull, InstantHandle] Func lambda) + { + Contract.Requires(lambda != null); + try + { + return Return(lambda(value)); + } + catch (Exception e) + { + return Error(ExceptionDispatchInfo.Capture(e)); + } + } + + /// Immediately apply a function to a value, and capture the result into a + [Pure] + public static Maybe Apply(T value, [NotNull, InstantHandle] Func> lambda) + { + Contract.Requires(lambda != null); + try + { + return lambda(value); + } + catch (Exception e) + { + return Error(ExceptionDispatchInfo.Capture(e)); + } + } + + /// Immediately apply a function to a value, and capture the result into a + [Pure] + public static Maybe Apply(Maybe value, [NotNull, InstantHandle] Func lambda) + { + Contract.Requires(lambda != null); + if (!value.HasValue) + { + if (value.Failed) + { + // keep the original error untouched + return new Maybe(false, default(TResult), value.ErrorContainer); + } + return Nothing(); + } + try + { + return Return(lambda(value.Value)); + } + catch (Exception e) + { + return Error(e); + } + } + + /// Immediately apply a function to a value, and capture the result into a + [Pure] + public static Maybe Apply(Maybe value, [NotNull, InstantHandle] Func> lambda) + { + Contract.Requires(lambda != null); + if (!value.HasValue) + { + if (value.Failed) + { + // keep the original error untouched + return new Maybe(false, default(TResult), value.ErrorContainer); + } + return Nothing(); + } + try + { + return lambda(value.Value); + } + catch (Exception e) + { + return Error(e); + } + } + + /// Convert a completed into an equivalent + [Pure] + public static Maybe FromTask([NotNull] Task task) + { + //REVIEW: should we return Maybe.Empty if task == null ? + Contract.Requires(task != null); + switch (task.Status) + { + case TaskStatus.RanToCompletion: + { + return Return(task.Result); + } + case TaskStatus.Faulted: + { + //TODO: pass the failed task itself as the error container? (we would keep the original callstack that way...) + var aggEx = task.Exception?.Flatten(); + if (aggEx?.InnerExceptions.Count == 1) + { + return Error(aggEx.InnerException); + } + return Error(aggEx); + } + case TaskStatus.Canceled: + { + return Error(new OperationCanceledException()); + } + default: + { + throw new InvalidOperationException("Task must be in the completed state"); + } + } + } + + /// Convert a completed with being a , into an equivalent + [Pure] + public static Maybe FromTask([NotNull] Task> task) + { + Contract.Requires(task != null); + switch (task.Status) + { + case TaskStatus.RanToCompletion: + { + return task.Result; + } + case TaskStatus.Faulted: + { + //TODO: pass the failed task itself as the error container? (we would keep the original callstack that way...) + var aggEx = task.Exception.Flatten(); + if (aggEx.InnerExceptions.Count == 1) + { + return Error(aggEx.InnerException); + } + return Error(aggEx); + } + case TaskStatus.Canceled: + { + return Error(new OperationCanceledException()); + } + default: + { + throw new InvalidOperationException("Task must be in the completed state"); + } + } + } + + /// Streamline a potentially failed Task<Maybe<T>> into a version that capture the error into the itself + [Pure] + public static Task> Unwrap([NotNull] Task> task) + { + Contract.Requires(task != null); + switch (task.Status) + { + case TaskStatus.RanToCompletion: + { + return task; + } + case TaskStatus.Faulted: + { + //TODO: pass the failed task itself as the error container? (we would keep the original callstack that way...) + var aggEx = task.Exception.Flatten(); + if (aggEx.InnerExceptions.Count == 1) + { + return Task.FromResult(Maybe.Error(aggEx.InnerException)); + } + return Task.FromResult(Maybe.Error(aggEx)); + } + case TaskStatus.Canceled: + { + return Task.FromResult(Error(new OperationCanceledException())); + } + default: + { + throw new InvalidOperationException("Task must be in the completed state"); + } + } + } + + [Pure, NotNull] + private static Func, Maybe> Combine([NotNull] Func, Maybe> f, Func, Maybe> g) + { + return (mt) => g(f(mt)); + } + + [Pure, NotNull] + public static Func, Maybe> Bind([NotNull] Func> f, [NotNull] Func> g) + { + return Combine(Maybe.Bind(f), Maybe.Bind(g)); + } + + [Pure, NotNull] + public static Func, Maybe> Bind([NotNull] Func> f, [NotNull] Func, Maybe> g) + { + return Combine(Maybe.Bind(f), g); + } + + } + +} diff --git a/FoundationDB.Client/Async/TaskHelpers.cs b/FoundationDB.Client/Shared/Async/TaskHelpers.cs similarity index 78% rename from FoundationDB.Client/Async/TaskHelpers.cs rename to FoundationDB.Client/Shared/Async/TaskHelpers.cs index 9334e5e1b..35735120a 100644 --- a/FoundationDB.Client/Async/TaskHelpers.cs +++ b/FoundationDB.Client/Shared/Async/TaskHelpers.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,20 +26,20 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Async +namespace Doxense.Threading.Tasks { - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; /// Helper methods to work on tasks internal static class TaskHelpers { /// Helper type cache class - public static class Cache + public static class CachedTasks { public static readonly Task Default = Task.FromResult(default(T)); @@ -66,22 +66,18 @@ public static Func Identity } - /// Return a task that is already completed - // README: There is a Task.CompletedTask object in the BCL that is internal, and one 'easy' way to get access to it is via Task.Delay(0) that returns it if param is equal to 0... - public static readonly Task CompletedTask = Task.Delay(0); - /// Already completed task that returns false - public static readonly Task FalseTask = Task.FromResult(false); + public static readonly Task False = Task.FromResult(false); /// Already completed task that returns true - public static readonly Task TrueTask = Task.FromResult(true); + public static readonly Task True = Task.FromResult(true); /// Returns an already completed boolean task that is either true of false /// Value of the task /// Already completed task the returns public static Task FromResult(bool value) { - return value ? TrueTask : FalseTask; + return value ? TaskHelpers.True : TaskHelpers.False; } /// Returns a cached completed task that returns the default value of type @@ -89,7 +85,47 @@ public static Task FromResult(bool value) /// Task that is already completed, and returns default() public static Task Default() { - return Cache.Default; + return CachedTasks.Default; + } + + /// Fait en sorte que toute exception non gérée soit observée + /// Tâche, qui peut potentiellement déclencher une exception + /// La même task, mais avec une continuation qui viendra observer toute erreur + /// Cette méthode a pour unique but dans la vie de faire taire les warning du compilateur sur les tasks non awaitées (ou variable non utilisées) + public static void Observed(this TTask task) + where TTask : Task + { + if (task == null) return; + + // A la base en .NET 4.0, le destructeur des task rethrow les errors non observées sur le TP ce qui pouvait killer le process + // => il faut que quelqu'un "touche" a la propriété "Exception" de la task, pour empecher cela. + switch (task.Status) + { + case TaskStatus.Faulted: + case TaskStatus.Canceled: + TouchFaultedTask(task); + return; + + case TaskStatus.RanToCompletion: + return; + + default: + task.ContinueWith((t) => TouchFaultedTask(t), TaskContinuationOptions.OnlyOnFaulted); + return; + } + } + + private static void TouchFaultedTask(Task t) + { + // ReSharper disable once UnusedVariable + var error = t.Exception; +#if DEBUG + if (t.IsFaulted) + { + // C'est une mauvaise pratique, donc râle quand même dans les logs en mode debug! + System.Diagnostics.Debug.WriteLine($"### muted unobserved failed Task[{t.Id}]: [{error?.InnerException?.GetType().Name}] {error?.InnerException?.Message}"); + } +#endif } /// Continue processing a task, if it succeeded @@ -118,9 +154,9 @@ public static async Task Then(this Task task, [NotNull] Func i /// If is null public static Task Inline([NotNull] Func lambda, CancellationToken ct = default(CancellationToken)) { - if (lambda == null) throw new ArgumentNullException("lambda"); + if (lambda == null) throw new ArgumentNullException(nameof(lambda)); - if (ct.IsCancellationRequested) return FromCancellation(ct); + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); try { var res = lambda(); @@ -142,13 +178,13 @@ public static async Task Then(this Task task, [NotNull] Func i public static Task Inline([NotNull] Action action, T1 arg1, CancellationToken ct = default(CancellationToken)) { // note: if action is null, then there is a bug in the caller, and it should blow up instantly (will help preserving the call stack) - if (action == null) throw new ArgumentNullException("action"); + if (action == null) throw new ArgumentNullException(nameof(action)); // for all other exceptions, they will be wrapped in the returned task - if (ct.IsCancellationRequested) return FromCancellation(ct); + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); try { action(arg1); - return TaskHelpers.CompletedTask; + return Task.CompletedTask; } catch (Exception e) { @@ -168,13 +204,13 @@ public static async Task Then(this Task task, [NotNull] Func i public static Task Inline([NotNull] Action action, T1 arg1, T2 arg2, CancellationToken ct = default(CancellationToken)) { // note: if action is null, then there is a bug in the caller, and it should blow up instantly (will help preserving the call stack) - if (action == null) throw new ArgumentNullException("action"); + if (action == null) throw new ArgumentNullException(nameof(action)); // for all other exceptions, they will be wrapped in the returned task - if (ct.IsCancellationRequested) return FromCancellation(ct); + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); try { action(arg1, arg2); - return TaskHelpers.CompletedTask; + return Task.CompletedTask; } catch (Exception e) { @@ -196,13 +232,13 @@ public static async Task Then(this Task task, [NotNull] Func i public static Task Inline([NotNull] Action action, T1 arg1, T2 arg2, T3 arg3, CancellationToken ct = default(CancellationToken)) { // note: if action is null, then there is a bug in the caller, and it should blow up instantly (will help preserving the call stack) - if (action == null) throw new ArgumentNullException("action"); + if (action == null) throw new ArgumentNullException(nameof(action)); // for all other exceptions, they will be wrapped in the returned task - if (ct.IsCancellationRequested) return FromCancellation(ct); + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); try { action(arg1, arg2, arg3); - return TaskHelpers.CompletedTask; + return Task.CompletedTask; } catch (Exception e) { @@ -226,13 +262,13 @@ public static async Task Then(this Task task, [NotNull] Func i public static Task Inline([NotNull] Action action, T1 arg1, T2 arg2, T3 arg3, T4 arg4, CancellationToken ct = default(CancellationToken)) { // note: if action is null, then there is a bug in the caller, and it should blow up instantly (will help preserving the call stack) - if (action == null) throw new ArgumentNullException("action"); + if (action == null) throw new ArgumentNullException(nameof(action)); // for all other exceptions, they will be wrapped in the returned task - if (ct.IsCancellationRequested) return FromCancellation(ct); + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); try { action(arg1, arg2, arg3, arg4); - return TaskHelpers.CompletedTask; + return Task.CompletedTask; } catch (Exception e) { @@ -258,13 +294,13 @@ public static async Task Then(this Task task, [NotNull] Func i public static Task Inline([NotNull] Action action, T1 arg1, T2 arg2, T3 arg3, T4 arg4, T5 arg5, CancellationToken ct = default(CancellationToken)) { // note: if action is null, then there is a bug in the caller, and it should blow up instantly (will help preserving the call stack) - if (action == null) throw new ArgumentNullException("action"); + if (action == null) throw new ArgumentNullException(nameof(action)); // for all other exceptions, they will be wrapped in the returned task - if (ct.IsCancellationRequested) return FromCancellation(ct); + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); try { action(arg1, arg2, arg3, arg4, arg5); - return TaskHelpers.CompletedTask; + return Task.CompletedTask; } catch (Exception e) { @@ -293,27 +329,11 @@ public static Func> WithCancellation { - if (ct.IsCancellationRequested) return FromCancellation(ct); + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); return lambda(value); }; } - /// Returns a cancelled Task that is linked with a specific token - /// Type of the result of the task - /// Cancellation token that should already be cancelled - /// Task in the cancelled state that is linked with this cancellation token - public static Task FromCancellation(CancellationToken cancellationToken) - { - // There is a Task.FromCancellation() method in the BCL, but unfortunately it is internal :( - // The "best" way I've seen to emulate the same behavior, is creating a fake task (with a dummy action) with the same alread-cancelled CancellationToken - // This should throw the correct TaskCanceledException that is linked with this token - - // ensure that it is actually cancelled, so that we don't deadlock - if (!cancellationToken.IsCancellationRequested) throw new InvalidOperationException(); - - return new Task(Cache.Nop, cancellationToken); - } - /// Returns a cancelled Task that is not linked to any particular token /// Type of the result of the task /// Task in the cancelled state @@ -351,14 +371,14 @@ public static Task FromException(Exception e) /// Returns a failed Task that wraps an exception /// Type of the result of the task /// Exception that will be wrapped in the task - /// Original cancellation token that may have triggered + /// Original cancellation token that may have triggered /// Task that is already completed, and that will rethrow the exception once observed - public static Task FromFailure(Exception e, CancellationToken cancellationToken) + public static Task FromFailure(Exception e, CancellationToken ct) { if (e is OperationCanceledException) { - if (cancellationToken.IsCancellationRequested) - return FromCancellation(cancellationToken); + if (ct.IsCancellationRequested) + return Task.FromCanceled(ct); else return Canceled(); } @@ -400,6 +420,36 @@ public static void Observe(Task task) } } + private delegate CancellationTokenRegistration RegisterWithoutECDelegate(ref CancellationToken ct, Action callback, object state); + private static readonly RegisterWithoutECDelegate RegisterWithoutECHandler = GetRegisterWithoutECDelegate(); + + [NotNull] + private static RegisterWithoutECDelegate GetRegisterWithoutECDelegate() + { + try + { + // CancellationToken.Register(..., useExecutionContext) is "private", and all the public version of Register pass true, which does costly allocations (capturing context, ...) + // There is however CancellationToken.InternalRegisterWithoutEC which is internal and pass false. + // => we will attempt to create a delegate to call the internal method - if possible - or fallback to the default version of Register, if this is not possible. + var method = typeof(CancellationToken).GetMethod("InternalRegisterWithoutEC", System.Reflection.BindingFlags.Instance | System.Reflection.BindingFlags.NonPublic, null, new[] { typeof(Action), typeof(object) }, null); + if (method != null) + { + return (RegisterWithoutECDelegate)Delegate.CreateDelegate(typeof(RegisterWithoutECDelegate), null, method); + } + } + catch + { } + + return (ref CancellationToken token, Action callback, object state) => token.Register(callback, state); + } + + /// Version of CancellationToken.Register() that does not propagate the current ExecutionContext to the callback (faster, but unsafe!) + /// This should only be used with callbacks that do not execute user-provided code! + internal static CancellationTokenRegistration RegisterWithoutEC(this CancellationToken ct, [NotNull] Action callback, object state) + { + return RegisterWithoutECHandler(ref ct, callback, state); + } + /// Safely cancel a CancellationTokenSource /// CancellationTokenSource that needs to be cancelled public static void SafeCancel(this CancellationTokenSource source) diff --git a/FoundationDB.Client/Utils/CodeAnnotations.cs b/FoundationDB.Client/Shared/CodeAnnotations.cs similarity index 64% rename from FoundationDB.Client/Utils/CodeAnnotations.cs rename to FoundationDB.Client/Shared/CodeAnnotations.cs index acbefd190..8d9fab142 100644 --- a/FoundationDB.Client/Utils/CodeAnnotations.cs +++ b/FoundationDB.Client/Shared/CodeAnnotations.cs @@ -15,37 +15,42 @@ namespace JetBrains.Annotations /// /// Indicates that the value of the marked element could be null sometimes, - /// so the check for null is necessary before its usage + /// so the check for null is necessary before its usage. /// /// - /// [CanBeNull] public object Test() { return null; } - /// public void UseTest() { + /// [CanBeNull] object Test() => null; + /// + /// void UseTest() { /// var p = Test(); /// var s = p.ToString(); // Warning: Possible 'System.NullReferenceException' /// } /// [AttributeUsage( AttributeTargets.Method | AttributeTargets.Parameter | AttributeTargets.Property | - AttributeTargets.Delegate | AttributeTargets.Field | AttributeTargets.Event)] + AttributeTargets.Delegate | AttributeTargets.Field | AttributeTargets.Event | + AttributeTargets.Class | AttributeTargets.Interface | AttributeTargets.GenericParameter)] [Conditional("JETBRAINS_ANNOTATIONS")] internal sealed class CanBeNullAttribute : Attribute { } /// - /// Indicates that the value of the marked element could never be null + /// Indicates that the value of the marked element could never be null. /// /// - /// [NotNull] public object Foo() { + /// [NotNull] object Foo() { /// return null; // Warning: Possible 'null' assignment /// } /// [AttributeUsage( AttributeTargets.Method | AttributeTargets.Parameter | AttributeTargets.Property | - AttributeTargets.Delegate | AttributeTargets.Field | AttributeTargets.Event)] + AttributeTargets.Delegate | AttributeTargets.Field | AttributeTargets.Event | + AttributeTargets.Class | AttributeTargets.Interface | AttributeTargets.GenericParameter)] [Conditional("JETBRAINS_ANNOTATIONS")] internal sealed class NotNullAttribute : Attribute { } /// - /// Indicates that collection or enumerable value does not contain null elements + /// Can be appplied to symbols of types derived from IEnumerable as well as to symbols of Task + /// and Lazy classes to indicate that the value of a collection item, of the Task.Result property + /// or of the Lazy.Value property can never be null. /// [AttributeUsage( AttributeTargets.Method | AttributeTargets.Parameter | AttributeTargets.Property | @@ -54,7 +59,9 @@ internal sealed class NotNullAttribute : Attribute { } internal sealed class ItemNotNullAttribute : Attribute { } /// - /// Indicates that collection or enumerable value can contain null elements + /// Can be appplied to symbols of types derived from IEnumerable as well as to symbols of Task + /// and Lazy classes to indicate that the value of a collection item, of the Task.Result property + /// or of the Lazy.Value property can be null. /// [AttributeUsage( AttributeTargets.Method | AttributeTargets.Parameter | AttributeTargets.Property | @@ -62,20 +69,30 @@ internal sealed class ItemNotNullAttribute : Attribute { } [Conditional("JETBRAINS_ANNOTATIONS")] internal sealed class ItemCanBeNullAttribute : Attribute { } + /// + /// Implicitly apply [NotNull]/[ItemNotNull] annotation to all the of type members and parameters + /// in particular scope where this annotation is used (type declaration or whole assembly). + /// + [AttributeUsage( + AttributeTargets.Class | AttributeTargets.Struct | AttributeTargets.Interface | AttributeTargets.Assembly)] + internal sealed class ImplicitNotNullAttribute : Attribute { } + /// /// Indicates that the marked method builds string by format pattern and (optional) arguments. /// Parameter, which contains format string, should be given in constructor. The format string - /// should be in -like form + /// should be in -like form. /// /// /// [StringFormatMethod("message")] - /// public void ShowError(string message, params object[] args) { /* do something */ } - /// public void Foo() { + /// void ShowError(string message, params object[] args) { /* do something */ } + /// + /// void Foo() { /// ShowError("Failed: {0}"); // Warning: Non-existing argument in format string /// } /// [AttributeUsage( - AttributeTargets.Constructor | AttributeTargets.Method | AttributeTargets.Delegate)] + AttributeTargets.Constructor | AttributeTargets.Method | + AttributeTargets.Property | AttributeTargets.Delegate)] [Conditional("JETBRAINS_ANNOTATIONS")] internal sealed class StringFormatMethodAttribute : Attribute { @@ -87,16 +104,33 @@ public StringFormatMethodAttribute(string formatParameterName) FormatParameterName = formatParameterName; } - public string FormatParameterName { get; private set; } + public string FormatParameterName { get; } + } + + /// + /// For a parameter that is expected to be one of the limited set of values. + /// Specify fields of which type should be used as values for this parameter. + /// + [AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Property | AttributeTargets.Field)] + [Conditional("JETBRAINS_ANNOTATIONS")] + internal sealed class ValueProviderAttribute : Attribute + { + public ValueProviderAttribute(string name) + { + Name = name; + } + + [NotNull] + public string Name { get; } } /// /// Indicates that the function argument should be string literal and match one /// of the parameters of the caller function. For example, ReSharper annotates - /// the parameter of + /// the parameter of . /// /// - /// public void Foo(string param) { + /// void Foo(string param) { /// if (param == null) /// throw new ArgumentNullException("par"); // Warning: Cannot resolve symbol /// } @@ -106,7 +140,7 @@ public StringFormatMethodAttribute(string formatParameterName) internal sealed class InvokerParameterNameAttribute : Attribute { } /// - /// Describes dependency between method input and output + /// Describes dependency between method input and output. /// /// ///

Function Definition Table syntax:

@@ -153,7 +187,7 @@ internal sealed class InvokerParameterNameAttribute : Attribute { } internal sealed class ContractAnnotationAttribute : Attribute { public ContractAnnotationAttribute([NotNull] string contract) - : this(contract, false) { } + : this(contract, false) { } public ContractAnnotationAttribute([NotNull] string contract, bool forceFullStates) { @@ -161,8 +195,8 @@ public ContractAnnotationAttribute([NotNull] string contract, bool forceFullStat ForceFullStates = forceFullStates; } - public string Contract { get; private set; } - public bool ForceFullStates { get; private set; } + public string Contract { get; } + public bool ForceFullStates { get; } } /// @@ -174,8 +208,9 @@ public ContractAnnotationAttribute([NotNull] string contract, bool forceFullStat /// /// [CannotApplyEqualityOperator] /// class NoEquality { } + /// /// class UsesNoEquality { - /// public void Test() { + /// void Test() { /// var ca1 = new NoEquality(); /// var ca2 = new NoEquality(); /// if (ca1 != null) { // OK @@ -184,8 +219,7 @@ public ContractAnnotationAttribute([NotNull] string contract, bool forceFullStat /// } /// } /// - [AttributeUsage( - AttributeTargets.Interface | AttributeTargets.Class | AttributeTargets.Struct)] + [AttributeUsage(AttributeTargets.Interface | AttributeTargets.Class | AttributeTargets.Struct)] [Conditional("JETBRAINS_ANNOTATIONS")] internal sealed class CannotApplyEqualityOperatorAttribute : Attribute { } @@ -195,11 +229,12 @@ internal sealed class CannotApplyEqualityOperatorAttribute : Attribute { } /// /// /// [BaseTypeRequired(typeof(IComponent)] // Specify requirement - /// public class ComponentAttribute : Attribute { } + /// class ComponentAttribute : Attribute { } + /// /// [Component] // ComponentAttribute requires implementing IComponent interface - /// public class MyComponent : IComponent { } + /// class MyComponent : IComponent { } /// - [AttributeUsage(AttributeTargets.Class, AllowMultiple = true, Inherited = true)] + [AttributeUsage(AttributeTargets.Class, AllowMultiple = true)] [BaseTypeRequired(typeof(Attribute))] [Conditional("JETBRAINS_ANNOTATIONS")] internal sealed class BaseTypeRequiredAttribute : Attribute @@ -210,13 +245,12 @@ public BaseTypeRequiredAttribute([NotNull] Type baseType) } [NotNull] - public Type BaseType { get; private set; } + public Type BaseType { get; set; } } /// - /// Indicates that the marked symbol is used implicitly - /// (e.g. via reflection, in external library), so this symbol - /// will not be marked as unused (as well as by other usage inspections) + /// Indicates that the marked symbol is used implicitly (e.g. via reflection, in external library), + /// so this symbol will not be marked as unused (as well as by other usage inspections). /// [AttributeUsage(AttributeTargets.All)] [Conditional("JETBRAINS_ANNOTATIONS")] @@ -231,21 +265,19 @@ public UsedImplicitlyAttribute(ImplicitUseKindFlags useKindFlags) public UsedImplicitlyAttribute(ImplicitUseTargetFlags targetFlags) : this(ImplicitUseKindFlags.Default, targetFlags) { } - public UsedImplicitlyAttribute( - ImplicitUseKindFlags useKindFlags, ImplicitUseTargetFlags targetFlags) + public UsedImplicitlyAttribute(ImplicitUseKindFlags useKindFlags, ImplicitUseTargetFlags targetFlags) { UseKindFlags = useKindFlags; TargetFlags = targetFlags; } - public ImplicitUseKindFlags UseKindFlags { get; private set; } - public ImplicitUseTargetFlags TargetFlags { get; private set; } + public ImplicitUseKindFlags UseKindFlags { get; } + public ImplicitUseTargetFlags TargetFlags { get; } } /// - /// Should be used on attributes and causes ReSharper - /// to not mark symbols marked with such attributes as unused - /// (as well as by other usage inspections) + /// Should be used on attributes and causes ReSharper to not mark symbols marked with such attributes + /// as unused (as well as by other usage inspections) /// [AttributeUsage(AttributeTargets.Class | AttributeTargets.GenericParameter)] [Conditional("JETBRAINS_ANNOTATIONS")] @@ -260,8 +292,7 @@ public MeansImplicitUseAttribute(ImplicitUseKindFlags useKindFlags) public MeansImplicitUseAttribute(ImplicitUseTargetFlags targetFlags) : this(ImplicitUseKindFlags.Default, targetFlags) { } - public MeansImplicitUseAttribute( - ImplicitUseKindFlags useKindFlags, ImplicitUseTargetFlags targetFlags) + public MeansImplicitUseAttribute(ImplicitUseKindFlags useKindFlags, ImplicitUseTargetFlags targetFlags) { UseKindFlags = useKindFlags; TargetFlags = targetFlags; @@ -277,40 +308,39 @@ public MeansImplicitUseAttribute( internal enum ImplicitUseKindFlags { Default = Access | Assign | InstantiatedWithFixedConstructorSignature, - /// Only entity marked with attribute considered used + /// Only entity marked with attribute considered used. Access = 1, - /// Indicates implicit assignment to a member + /// Indicates implicit assignment to a member. Assign = 2, /// /// Indicates implicit instantiation of a type with fixed constructor signature. /// That means any unused constructor parameters won't be reported as such. /// InstantiatedWithFixedConstructorSignature = 4, - /// Indicates implicit instantiation of a type + /// Indicates implicit instantiation of a type. InstantiatedNoFixedConstructorSignature = 8, } /// - /// Specify what is considered used implicitly - /// when marked with - /// or + /// Specify what is considered used implicitly when marked + /// with or . /// [Flags] internal enum ImplicitUseTargetFlags { Default = Itself, Itself = 1, - /// Members of entity marked with attribute are considered used + /// Members of entity marked with attribute are considered used. Members = 2, - /// Entity marked with attribute and all its members considered used + /// Entity marked with attribute and all its members considered used. WithMembers = Itself | Members } /// /// This attribute is intended to mark publicly available API - /// which should not be removed and so is treated as used + /// which should not be removed and so is treated as used. /// - [MeansImplicitUse] + [MeansImplicitUse(ImplicitUseTargetFlags.WithMembers)] [Conditional("JETBRAINS_ANNOTATIONS")] internal sealed class PublicAPIAttribute : Attribute { @@ -320,15 +350,13 @@ public PublicAPIAttribute([NotNull] string comment) Comment = comment; } - public string Comment { get; private set; } + public string Comment { get; } } /// - /// Tells code analysis engine if the parameter is completely handled - /// when the invoked method is on stack. If the parameter is a delegate, - /// indicates that delegate is executed while the method is executed. - /// If the parameter is an enumerable, indicates that it is enumerated - /// while the method is executed + /// Tells code analysis engine if the parameter is completely handled when the invoked method is on stack. + /// If the parameter is a delegate, indicates that delegate is executed while the method is executed. + /// If the parameter is an enumerable, indicates that it is enumerated while the method is executed. /// [AttributeUsage(AttributeTargets.Parameter)] [Conditional("JETBRAINS_ANNOTATIONS")] @@ -336,13 +364,13 @@ internal sealed class InstantHandleAttribute : Attribute { } /// /// Indicates that a method does not make any observable state changes. - /// The same as System.Diagnostics.Contracts.PureAttribute + /// The same as System.Diagnostics.Contracts.PureAttribute. /// /// - /// [Pure] private int Multiply(int x, int y) { return x * y; } - /// public void Foo() { - /// const int a = 2, b = 2; - /// Multiply(a, b); // Waring: Return value of pure method is not used + /// [Pure] int Multiply(int x, int y) => x * y; + /// + /// void M() { + /// Multiply(123, 42); // Waring: Return value of pure method is not used /// } /// [AttributeUsage(AttributeTargets.Method)] @@ -350,10 +378,48 @@ internal sealed class InstantHandleAttribute : Attribute { } internal sealed class PureAttribute : Attribute { } /// - /// Indicates how method invocation affects content of the collection + /// Indicates that the return value of method invocation must be used. /// [AttributeUsage(AttributeTargets.Method)] [Conditional("JETBRAINS_ANNOTATIONS")] + internal sealed class MustUseReturnValueAttribute : Attribute + { + public MustUseReturnValueAttribute() { } + public MustUseReturnValueAttribute([NotNull] string justification) + { + Justification = justification; + } + + public string Justification { get; } + } + + /// + /// Indicates the type member or parameter of some type, that should be used instead of all other ways + /// to get the value that type. This annotation is useful when you have some "context" value evaluated + /// and stored somewhere, meaning that all other ways to get this value must be consolidated with existing one. + /// + /// + /// class Foo { + /// [ProvidesContext] IBarService _barService = ...; + /// + /// void ProcessNode(INode node) { + /// DoSomething(node, node.GetGlobalServices().Bar); + /// // ^ Warning: use value of '_barService' field + /// } + /// } + /// + [AttributeUsage( + AttributeTargets.Field | AttributeTargets.Property | AttributeTargets.Parameter | AttributeTargets.Method | + AttributeTargets.Class | AttributeTargets.Interface | AttributeTargets.Struct | AttributeTargets.GenericParameter)] + [Conditional("JETBRAINS_ANNOTATIONS")] + internal sealed class ProvidesContextAttribute : Attribute { } + + /// + /// Indicates how method, constructor invocation or property access + /// over collection type affects content of the collection. + /// + [AttributeUsage(AttributeTargets.Method | AttributeTargets.Constructor | AttributeTargets.Property)] + [Conditional("JETBRAINS_ANNOTATIONS")] internal sealed class CollectionAccessAttribute : Attribute { public CollectionAccessAttribute(CollectionAccessType collectionAccessType) @@ -361,26 +427,26 @@ public CollectionAccessAttribute(CollectionAccessType collectionAccessType) CollectionAccessType = collectionAccessType; } - public CollectionAccessType CollectionAccessType { get; private set; } + public CollectionAccessType CollectionAccessType { get; } } [Flags] internal enum CollectionAccessType { - /// Method does not use or modify content of the collection + /// Method does not use or modify content of the collection. None = 0, - /// Method only reads content of the collection but does not modify it + /// Method only reads content of the collection but does not modify it. Read = 1, - /// Method can change content of the collection but does not add new elements + /// Method can change content of the collection but does not add new elements. ModifyExistingContent = 2, - /// Method can add new elements to the collection + /// Method can add new elements to the collection. UpdatedContent = ModifyExistingContent | 4 } /// /// Indicates that the marked method is assertion method, i.e. it halts control flow if /// one of the conditions is satisfied. To set the condition, mark one of the parameters with - /// attribute + /// attribute. /// [AttributeUsage(AttributeTargets.Method)] [Conditional("JETBRAINS_ANNOTATIONS")] @@ -400,22 +466,22 @@ public AssertionConditionAttribute(AssertionConditionType conditionType) ConditionType = conditionType; } - public AssertionConditionType ConditionType { get; private set; } + public AssertionConditionType ConditionType { get; } } /// /// Specifies assertion type. If the assertion method argument satisfies the condition, - /// then the execution continues. Otherwise, execution is assumed to be halted + /// then the execution continues. Otherwise, execution is assumed to be halted. /// internal enum AssertionConditionType { - /// Marked parameter should be evaluated to true + /// Marked parameter should be evaluated to true. IS_TRUE = 0, - /// Marked parameter should be evaluated to false + /// Marked parameter should be evaluated to false. IS_FALSE = 1, - /// Marked parameter should be evaluated to null value + /// Marked parameter should be evaluated to null value. IS_NULL = 2, - /// Marked parameter should be evaluated to not null value + /// Marked parameter should be evaluated to not null value. IS_NOT_NULL = 3, } @@ -452,4 +518,29 @@ internal sealed class RegexPatternAttribute : Attribute { } [Conditional("JETBRAINS_ANNOTATIONS")] internal sealed class NoReorder : Attribute { } -} \ No newline at end of file + // ==================================================== + // === CUSTOM CONTRACT ATTRIBUTES + // ==================================================== + + // NOTE: these attributes are not recognize by Resharper (yet?) but can be used + // by Roslyn Analyzers or other static analysis tools to further verify the code. + + // DO NOT OVERWRITE THESE WHEN UPDATING THE OFFICAL CONTRACT ATTRIBUTES! + + /// The value cannot be negative + /// REQUIRES: x >= 0 + [AttributeUsage( + AttributeTargets.Method | AttributeTargets.Parameter | AttributeTargets.Property | + AttributeTargets.Delegate | AttributeTargets.Field | AttributeTargets.Event)] + [Conditional("JETBRAINS_ANNOTATIONS")] + internal sealed class PositiveAttribute : Attribute { } + + /// The value must be a power of two + /// REQUIRES: x == 1 << Round(Log2(X)) + [AttributeUsage( + AttributeTargets.Method | AttributeTargets.Parameter | AttributeTargets.Property | + AttributeTargets.Delegate | AttributeTargets.Field | AttributeTargets.Event)] + [Conditional("JETBRAINS_ANNOTATIONS")] + internal sealed class PowerOfTwoAttribute : Attribute { } + +} diff --git a/FoundationDB.Client/Shared/Contract.cs b/FoundationDB.Client/Shared/Contract.cs new file mode 100644 index 000000000..22b602fb0 --- /dev/null +++ b/FoundationDB.Client/Shared/Contract.cs @@ -0,0 +1,1394 @@ +#region Copyright (c) 2013-2018, Doxense SAS. All rights reserved. +// See License.MD for license information +#endregion + +namespace Doxense.Diagnostics.Contracts +{ + using JetBrains.Annotations; + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.Reflection; + using System.Runtime.CompilerServices; + using System.Runtime.ConstrainedExecution; + using SDC = System.Diagnostics.Contracts; + using SRC = System.Runtime.CompilerServices; + + internal static class ContractMessages + { + public const string PreconditionWasNotMet = "A pre-condition was not met"; + public const string ValueCannotBeNull = "Value cannot be null."; + public const string StringCannotBeEmpty = "String cannot be empty."; + public const string StringCannotBeWhiteSpaces = "String cannot contain only whitespaces."; + public const string CollectionCannotBeEmpty = "Collection cannot be empty."; + public const string BufferCannotBeNull = "Buffer cannot be null."; + public const string BufferCannotBeEmpty = "Buffer cannot be empty."; + public const string PositiveNumberRequired = "Positive number required."; + public const string PowerOfTwoRequired = "Power of two number required."; + public const string AboveZeroNumberRequired = "Non-Zero Positive number required."; + public const string ValueIsTooSmall = "The specified value is too small."; + public const string ValueIsTooBig = "The specified value is too big."; + public const string ValueIsForbidden = "The specified value is not allowed."; + public const string ValueIsExpected = "The specified value is not the expected value."; + public const string ValueMustBeBetween = "The specified value was outside the specified range."; + public const string ValueMustBeMultiple = "The specified value must be a multiple of another value."; + public const string NonNegativeNumberRequired = "Non-negative number required."; + public const string OffsetMustBeWithinBuffer = "Offset and length were out of bounds for the buffer or count is greater than the number of elements from index to the end of the buffer."; + + public const string ConditionNotNull = "{0} != null"; + public const string ConditionNotEmptyLength = "{0}.Length > 0"; + public const string ConditionNotWhiteSpace = "{0}.All(c => !char.IsWhiteSpace(c))"; + public const string ConditionNotEmptyCount = "{0}.Count > 0"; + public const string ConditionArgPositive = "{0} >= 0"; + public const string ConditionArgNotEqualTo = "{0} != x"; + public const string ConditionArgEqualTo = "{0} == x"; + public const string ConditionArgGreaterThan = "{0} > x"; + public const string ConditionArgGreaterThanZero = "{0} > 0"; + public const string ConditionArgGreaterOrEqual = "{0} >= x"; + public const string ConditionArgGreaterOrEqualZero = "{0} >= 0"; + public const string ConditionArgMultiple = "{0} % x == 0"; + public const string ConditionArgLessThan = "{0} < x"; + public const string ConditionArgLessThanOrEqual = "{0} <= x"; + public const string ConditionArgBetween = "min <= {0} <= max"; + public const string ConditionArgBufferOverflow = "(buffer.Length - offset) < count"; + } + + /// Classe helper pour la vérification de pré-requis, invariants, assertions, ... + [DebuggerNonUserCode] + public static class Contract + { + + public static bool IsUnitTesting { get; set; } + + private static readonly ConstructorInfo s_constructorNUnitException; + + static Contract() + { + // détermine si on est lancé depuis des tests unitaires (pour désactiver les breakpoints et autres opérations intrusivent qui vont parasiter les tests) + + var nUnitAssert = Type.GetType("NUnit.Framework.AssertionException,nunit.framework"); + if (nUnitAssert != null) + { + // on convertit les échecs "soft" en échec d'assertion NUnit + s_constructorNUnitException = nUnitAssert.GetConstructor(new [] { typeof (string) }); + IsUnitTesting = true; + } + } + + private static Exception MapToNUnitAssertion(string message) + { + return (Exception) s_constructorNUnitException?.Invoke(new object[] { message }); // => new NUnit.Framework.AssertionException(...) + } + + #region DEBUG checks... + + /// [DEBUG ONLY] Dummy method (no-op) + [Conditional("CONTRACTS_FULL")] + public static void EndContractBlock() + { + // cette méthode ne fait rien, et sert juste à émuler la Contract API + } + + /// [DEBUG ONLY] Vérifie qu'une pré-condition est vrai, lors de l'entrée dans une méthode + /// Condition qui ne doit jamais être fausse + /// Ne fait rien si la condition est vrai. Sinon déclenche une ContractException, après avoir essayé de breakpointer le debugger + [Conditional("DEBUG")] + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + [ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)] + [DebuggerStepThrough] + public static void Requires([AssertionCondition(AssertionConditionType.IS_TRUE)] bool condition) + { +#if DEBUG + if (!condition) throw RaiseContractFailure(SDC.ContractFailureKind.Precondition, null); +#endif + } + + /// [DEBUG ONLY] Vérifie qu'une pré-condition est vrai, lors de l'entrée dans une méthode + /// Condition qui ne doit jamais être fausse + /// Message décrivant l'erreur (optionnel) + /// Ne fait rien si la condition est vrai. Sinon déclenche une ContractException, après avoir essayé de breakpointer le debugger + [Conditional("DEBUG")] + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + [ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)] + public static void Requires([AssertionCondition(AssertionConditionType.IS_TRUE)] bool condition, string userMessage) + { +#if DEBUG + if (!condition) throw RaiseContractFailure(SDC.ContractFailureKind.Precondition, userMessage); +#endif + } + + /// [DEBUG ONLY] Vérifie qu'une condition est toujours vrai, dans le body dans une méthode + /// Condition qui ne doit jamais être fausse + /// Ne fait rien si la condition est vrai. Sinon déclenche une ContractException, après avoir essayé de breakpointer le debugger + [Conditional("DEBUG")] + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + [ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)] + public static void Assert([AssertionCondition(AssertionConditionType.IS_TRUE)] bool condition) + { +#if DEBUG + if (!condition) throw RaiseContractFailure(SDC.ContractFailureKind.Assert, null); +#endif + } + + /// [DEBUG ONLY] Vérifie qu'une condition est toujours vrai, dans le body dans une méthode + /// Condition qui ne doit jamais être fausse + /// Message décrivant l'erreur (optionnel) + /// Ne fait rien si la condition est vrai. Sinon déclenche une ContractException, après avoir essayé de breakpointer le debugger + [Conditional("DEBUG")] + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + [ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)] + public static void Assert([AssertionCondition(AssertionConditionType.IS_TRUE)] bool condition, string userMessage) + { +#if DEBUG + if (!condition) throw RaiseContractFailure(SDC.ContractFailureKind.Assert, userMessage); +#endif + } + +#if DEPRECATED + /// [DEBUG ONLY] Vérifie qu'une condition est toujours vrai, dans le body dans une méthode + /// Valeur observée + /// Valeur attendue + /// Ne fait rien si la condition est vrai. Sinon déclenche une ContractException, après avoir essayé de breakpointer le debugger + [Conditional("DEBUG")] + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + [ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)] + [Obsolete("Use Contract.Assert(actual == expected) instead")] + public static void Expect(T actual, T expected) + { + if (!EqualityComparer.Default.Equals(actual, expected)) RaiseContractFailure(SDC.ContractFailureKind.Assert, String.Format(CultureInfo.InvariantCulture, "Expected value {0} but was {1}", expected, actual)); + } +#endif + + /// [DEBUG ONLY] Vérifie qu'une condition est toujours vrai, lors de la sortie d'une méthode + /// Condition qui ne doit jamais être fausse + /// Ne fait rien si la condition est vrai. Sinon déclenche une ContractException, après avoir essayé de breakpointer le debugger + [Conditional("DEBUG")] + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + [ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)] + public static void Ensures([AssertionCondition(AssertionConditionType.IS_TRUE)] bool condition) + { +#if DEBUG + if (!condition) throw RaiseContractFailure(SDC.ContractFailureKind.Postcondition, null); +#endif + } + + /// [DEBUG ONLY] Vérifie qu'une condition est toujours vrai, lors de la sortie d'une méthode + /// Condition qui ne doit jamais être fausse + /// Message décrivant l'erreur (optionnel) + /// Ne fait rien si la condition est vrai. Sinon déclenche une ContractException, après avoir essayé de breakpointer le debugger + [Conditional("DEBUG")] + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + [ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)] + public static void Ensures([AssertionCondition(AssertionConditionType.IS_TRUE)] bool condition, string userMessage) + { +#if DEBUG + if (!condition) throw RaiseContractFailure(SDC.ContractFailureKind.Postcondition, userMessage); +#endif + } + + /// [DEBUG ONLY] Vérifie qu'une condition est toujours vrai pendant toute la vie d'une instance + /// Condition qui ne doit jamais être fausse + /// Message décrivant l'erreur (optionnel) + /// Ne fait rien si la condition est vrai. Sinon déclenche une ContractException, après avoir essayé de breakpointer le debugger + [Conditional("DEBUG")] + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + [ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)] + public static void Invariant([AssertionCondition(AssertionConditionType.IS_TRUE)] bool condition, string userMessage = null) + { +#if DEBUG + if (!condition) throw RaiseContractFailure(SDC.ContractFailureKind.Invariant, userMessage); +#endif + } + + #endregion + + #region RUNTIME checks... + + #region Contract.NotNull + + /// [RUNTIME] The specified instance must not be null (assert: value != null) + /// if is null + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void NotNull( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL), NoEnumeration] TValue value, + [InvokerParameterName] string paramName) + where TValue : class + { + if (value == null) throw FailArgumentNull(paramName, null); + } + + /// [RUNTIME] The specified instance must not be null (assert: value != null) + /// if is null + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void NotNull( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL), NoEnumeration] string value, + [InvokerParameterName] string paramName) + { + if (value == null) throw FailArgumentNull(paramName, null); + } + + /// [RUNTIME] The specified instance must not be null (assert: value != null) + /// if is null + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void NotNull( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL), NoEnumeration] TValue value, + [InvokerParameterName] string paramName, + string message) + where TValue : class + { + if (value == null) throw FailArgumentNull(paramName, message); + } + + /// [RUNTIME] The specified instance must not be null (assert: value != null) + /// This methods allow structs (that can never be null) + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void NotNullAllowStructs( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL), NoEnumeration] TValue value, + [InvokerParameterName] string paramName) + { + if (value == null) throw FailArgumentNull(paramName, null); + } + + /// [RUNTIME] The specified instance must not be null (assert: value != null) + /// This methods allow structs (that can never be null) + /// if is null + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void NotNullAllowStructs( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL), NoEnumeration] TValue value, + [InvokerParameterName] string paramName, + string message) + { + if (value == null) throw FailArgumentNull(paramName, message); + } + + /// [RUNTIME] The specified pointer must not be null (assert: pointer != null) + /// if is null + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static unsafe void PointerNotNull( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL)] void* pointer, + [InvokerParameterName] string paramName) + { + if (pointer == null) throw FailArgumentNull(paramName, null); + } + + /// [RUNTIME] The specified pointer must not be null (assert: pointer != null) + /// if is null + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static unsafe void PointerNotNull( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL)] void* pointer, + [InvokerParameterName] string paramName, + string message) + { + if (pointer == null) throw FailArgumentNull(paramName, message); + } + + /// [RUNTIME] The specified value cannot be null (assert: value != null) + /// Passed value, or throws an exception if it was null + /// if is null + /// This method is intended for use in single-line property setters + /// + /// public string FooThatIsNeverNull + /// { + /// get => return m_foo; + /// set => m_foo = Contract.ValueNotNull(value); + /// } + /// + [Pure, NotNull, AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static T ValueNotNull( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL), NoEnumeration] T value + ) + where T : class + { + return value ?? throw FailArgumentNull(nameof(value), null); + } + + /// [RUNTIME] The specified value cannot be null (assert: value != null) + /// Passed value, or throws an exception if it was null + /// if is null + /// This method is intended for use in single-line property setters + /// + /// private string m_fooThatIsNeverNull; + /// public string Foo + /// { + /// get => return m_fooThatIsNeverNull; + /// set => m_fooThatIsNeverNull = Contract.ValueNotNull(value, "Foo cannot be set to null"); + /// } + /// + [Pure, NotNull, AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static T ValueNotNull( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL), NoEnumeration] T value, + string message + ) + where T : class + { + return value ?? throw FailArgumentNull(nameof(value), message); + } + + #endregion + + #region Contract.NotNullOrEmpty + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception FailStringNullOrEmpty(string value, string paramName, string message = null) + { + if (value == null) + return ReportFailure(typeof(ArgumentNullException), ContractMessages.ValueCannotBeNull, message, paramName, ContractMessages.ConditionNotNull); + else + return ReportFailure(typeof(ArgumentException), ContractMessages.StringCannotBeEmpty, message, paramName, ContractMessages.ConditionNotEmptyLength); + } + + /// [RUNTIME] The specified string must not be null or empty (assert: value != null && value.Length != 0) + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void NotNullOrEmpty( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL)] string value, + [InvokerParameterName] string paramName + ) + { + if (string.IsNullOrEmpty(value)) throw FailStringNullOrEmpty(value, paramName, null); + } + + /// [RUNTIME] The specified string must not be null or empty (assert: value != null && value.Length != 0) + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void NotNullOrEmpty( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL)] string value, + [InvokerParameterName] string paramName, + string message) + { + if (string.IsNullOrEmpty(value)) throw FailStringNullOrEmpty(value, paramName, message); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception FailStringNullOrWhiteSpace(string value, string paramName, string message = null) + { + if (value == null) + return ReportFailure(typeof(ArgumentNullException), ContractMessages.ValueCannotBeNull, message, paramName, ContractMessages.ConditionNotNull); + else if (value.Length == 0) + return ReportFailure(typeof(ArgumentException), ContractMessages.StringCannotBeEmpty, message, paramName, ContractMessages.ConditionNotEmptyLength); + else + return ReportFailure(typeof(ArgumentException), ContractMessages.StringCannotBeWhiteSpaces, message, paramName, ContractMessages.ConditionNotWhiteSpace); + } + + /// [RUNTIME] The specified string must not be null, empty or contain only whitespaces (assert: value != null && value.Length != 0) + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void NotNullOrWhiteSpace( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL)] string value, + [InvokerParameterName] string paramName) + { + if (string.IsNullOrWhiteSpace(value)) throw FailStringNullOrWhiteSpace(value, paramName, null); + } + + /// [RUNTIME] The specified string must not be null, empty or contain only whitespaces (assert: value != null && value.Length != 0) + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void NotNullOrWhiteSpace( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL)] string value, + [InvokerParameterName] string paramName, + string message) + { + if (string.IsNullOrWhiteSpace(value)) throw FailStringNullOrWhiteSpace(value, paramName, message); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception FailArrayNullOrEmpty(object collection, string paramName, string message = null) + { + if (collection == null) + return ReportFailure(typeof(ArgumentNullException), ContractMessages.ValueCannotBeNull, message, paramName, ContractMessages.ConditionNotNull); + else + return ReportFailure(typeof(ArgumentException), ContractMessages.CollectionCannotBeEmpty, message, paramName, ContractMessages.ConditionNotEmptyCount); + } + + /// [RUNTIME] The specified array must not be null or emtpy (assert: value != null && value.Count != 0) + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void NotNullOrEmpty( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL)] T[] value, + [InvokerParameterName] string paramName) + { + if (value == null || value.Length == 0) throw FailArrayNullOrEmpty(value, paramName, null); + } + + /// [RUNTIME] The specified array must not be null or emtpy (assert: value != null && value.Count != 0) + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void NotNullOrEmpty( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL)] T[] value, + [InvokerParameterName] string paramName, + string message) + { + if (value == null || value.Length == 0) throw FailArrayNullOrEmpty(value, paramName, message); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception FailCollectionNullOrEmpty(object collection, string paramName, string message = null) + { + if (collection == null) + return ReportFailure(typeof(ArgumentNullException), ContractMessages.ValueCannotBeNull, message, paramName, ContractMessages.ConditionNotNull); + else + return ReportFailure(typeof(ArgumentException), ContractMessages.CollectionCannotBeEmpty, message, paramName, ContractMessages.ConditionNotEmptyCount); + } + + /// [RUNTIME] The specified collection must not be null or emtpy (assert: value != null && value.Count != 0) + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void NotNullOrEmpty( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL)] ICollection value, + [InvokerParameterName] string paramName) + { + if (value == null || value.Count == 0) throw FailCollectionNullOrEmpty(value, paramName, null); + } + + /// [RUNTIME] The specified collection must not be null or emtpy (assert: value != null && value.Count != 0) + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void NotNullOrEmpty( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL)] ICollection value, + [InvokerParameterName] string paramName, + string message) + { + if (value == null || value.Count == 0) throw FailCollectionNullOrEmpty(value, paramName, message); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception FailBufferNull(string paramName, string message = null) + { + return ReportFailure(typeof(ArgumentNullException), ContractMessages.BufferCannotBeNull, message, paramName, ContractMessages.ConditionNotNull); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception FailBufferNullOrEmpty(object array, string paramName, string message = null) + { + if (array == null) + return ReportFailure(typeof(ArgumentNullException), ContractMessages.BufferCannotBeNull, message, paramName, ContractMessages.ConditionNotNull); + else + return ReportFailure(typeof(ArgumentException), ContractMessages.BufferCannotBeEmpty, message, paramName, ContractMessages.ConditionNotEmptyCount); + } + + /// [RUNTIME] The specified buffer must not be null or empty (assert: buffer.Array != null && buffer.Count != 0) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void NotNullOrEmpty( + ArraySegment buffer, + [InvokerParameterName] string paramName) + { + if (buffer.Array == null | buffer.Count == 0) throw FailBufferNullOrEmpty(buffer.Array, paramName, null); + } + + /// [RUNTIME] The specified buffer must not be null or empty (assert: buffer.Array != null && buffer.Count != 0) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void NotNullOrEmpty( + ArraySegment buffer, + [InvokerParameterName] string paramName, + string message) + { + if (buffer.Array == null | buffer.Count == 0) throw FailBufferNullOrEmpty(buffer.Array, paramName, message); + } + + #endregion + + #region Contract.Positive, LessThan[OrEqual], GreaterThen[OrEqual], EqualTo, Between, ... + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception FailArgumentNotPositive(string paramName, string message = null) + { + return ReportFailure(typeof(ArgumentException), ContractMessages.PositiveNumberRequired, message, paramName, ContractMessages.ConditionArgPositive); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception FailArgumentNotNonNegative(string paramName, string message = null) + { + return ReportFailure(typeof(ArgumentException), ContractMessages.NonNegativeNumberRequired, message, paramName, ContractMessages.ConditionArgPositive); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception FailArgumentNotPowerOfTwo(string paramName, string message = null) + { + return ReportFailure(typeof(ArgumentException), ContractMessages.PowerOfTwoRequired, message, paramName, ContractMessages.ConditionArgPositive); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception FailArgumentForbidden(string paramName, T forbidden, string message = null) + { + //TODO: need support for two format arguments for conditionTxt ! + return ReportFailure(typeof(ArgumentException), ContractMessages.ValueIsForbidden, message, paramName, ContractMessages.ConditionArgNotEqualTo); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception FailArgumentExpected(string paramName, T expected, string message = null) + { + //TODO: need support for two format arguments for conditionTxt ! + return ReportFailure(typeof(ArgumentException), ContractMessages.ValueIsExpected, message, paramName, ContractMessages.ConditionArgEqualTo); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception FailArgumentNotGreaterThan(string paramName, bool zero, string message = null) + { + return ReportFailure(typeof(ArgumentException), zero ? ContractMessages.AboveZeroNumberRequired : ContractMessages.ValueIsTooSmall, message, paramName, zero ? ContractMessages.ConditionArgGreaterThanZero : ContractMessages.ConditionArgGreaterThan); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception FailArgumentNotGreaterOrEqual(string paramName, bool zero, string message = null) + { + return ReportFailure(typeof(ArgumentException), zero ? ContractMessages.PositiveNumberRequired : ContractMessages.ValueIsTooSmall, message, paramName, zero ? ContractMessages.ConditionArgGreaterOrEqualZero : ContractMessages.ConditionArgGreaterOrEqual); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception FailArgumentNotLessThan(string paramName, string message = null) + { + return ReportFailure(typeof(ArgumentException), ContractMessages.ValueIsTooBig, message, paramName, ContractMessages.ConditionArgLessThan); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception FailArgumentNotLessOrEqual(string paramName, string message = null) + { + return ReportFailure(typeof(ArgumentException), ContractMessages.ValueIsTooBig, message, paramName, ContractMessages.ConditionArgLessThanOrEqual); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception FailArgumentOutOfBounds(string paramName, string message = null) + { + return ReportFailure(typeof(ArgumentException), ContractMessages.ValueMustBeBetween, message, paramName, ContractMessages.ConditionArgBetween); + } + + /// [RUNTIME] The specified value must not be a negative number (assert: value >= 0) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void Positive(int value, [InvokerParameterName] string paramName) + { + if (value < 0) + { + throw FailArgumentNotPositive(paramName, null); + } + } + + /// [RUNTIME] The specified value must not be a negative number (assert: value >= 0) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void Positive(int value, [InvokerParameterName] string paramName, string message) + { + if (value < 0) + { + throw FailArgumentNotPositive(paramName, message); + } + } + + /// [RUNTIME] The specified value must not be a negative number (assert: value >= 0) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void Positive(long value, [InvokerParameterName] string paramName) + { + if (value < 0) + { + throw FailArgumentNotPositive(paramName, null); + } + } + + /// [RUNTIME] The specified value must not be a negative number (assert: value >= 0) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void Positive(long value, [InvokerParameterName] string paramName, string message) + { + if (value < 0) + { + throw FailArgumentNotPositive(paramName, message); + } + } + + /// [RUNTIME] The specified value must be a power of two (assert: NextPowerOfTwo(value) == value) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void PowerOfTwo(int value, [InvokerParameterName] string paramName, string message = null) + { + if (value < 0 || unchecked((value & (value - 1)) != 0)) + { + throw FailArgumentNotPowerOfTwo(paramName, message); + } + } + + /// [RUNTIME] The specified value must be a power of two (assert: NextPowerOfTwo(value) == value) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void PowerOfTwo(uint value, [InvokerParameterName] string paramName, string message = null) + { + if (unchecked((value & (value - 1)) != 0)) + { + throw FailArgumentNotPowerOfTwo(paramName, message); + } + } + + /// [RUNTIME] The specified value must be a power of two (assert: NextPowerOfTwo(value) == value) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void PowerOfTwo(long value, [InvokerParameterName] string paramName, string message = null) + { + if (value < 0 || unchecked((value & (value - 1)) != 0)) + { + throw FailArgumentNotPowerOfTwo(paramName, message); + } + } + + /// [RUNTIME] The specified value must be a power of two (assert: NextPowerOfTwo(value) == value) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void PowerOfTwo(ulong value, [InvokerParameterName] string paramName, string message = null) + { + if (unchecked((value & (value - 1)) != 0)) + { + throw FailArgumentNotPowerOfTwo(paramName, message); + } + } + + /// [RUNTIME] The specified value must not less than or equal to the specified lower bound (assert: value > threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void GreaterThan(int value, int threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value <= threshold) + { + throw FailArgumentNotGreaterThan(paramName, threshold == 0, message); + } + } + + /// [RUNTIME] The specified value must not equal to the specified constant (assert: value != forbidden) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void EqualTo(long value, long expected, [InvokerParameterName] string paramName, string message = null) + { + if (value != expected) + { + throw FailArgumentExpected(paramName, expected, message); + } + } + + /// [RUNTIME] The specified value must not equal to the specified constant (assert: value != forbidden) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void EqualTo(ulong value, ulong expected, [InvokerParameterName] string paramName, string message = null) + { + if (value != expected) + { + throw FailArgumentExpected(paramName, expected, message); + } + } + + /// [RUNTIME] The specified value must not equal to the specified constant (assert: value != forbidden) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void EqualTo(string value, string expected, [InvokerParameterName] string paramName, string message = null) + { + if (value != expected) + { + throw FailArgumentExpected(paramName, expected, message); + } + } + + /// [RUNTIME] The specified value must not equal to the specified constant (assert: value != forbidden) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void EqualTo(T value, T expected, [InvokerParameterName] string paramName, string message = null) + where T : struct, IEquatable + { + if (!value.Equals(expected)) + { + throw FailArgumentExpected(paramName, expected, message); + } + } + + /// [RUNTIME] The specified value must not equal to the specified constant (assert: value != forbidden) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void NotEqualTo(long value, long forbidden, [InvokerParameterName] string paramName, string message = null) + { + if (value == forbidden) + { + throw FailArgumentForbidden(paramName, forbidden, message); + } + } + + /// [RUNTIME] The specified value must not equal to the specified constant (assert: value != forbidden) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void NotEqualTo(ulong value, ulong forbidden, [InvokerParameterName] string paramName, string message = null) + { + if (value == forbidden) + { + throw FailArgumentForbidden(paramName, forbidden, message); + } + } + + /// [RUNTIME] The specified value must not equal to the specified constant (assert: value != forbidden) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void NotEqualTo(string value, string forbidden, [InvokerParameterName] string paramName, string message = null) + { + if (value == forbidden) + { + throw FailArgumentForbidden(paramName, forbidden, message); + } + } + + /// [RUNTIME] The specified value must not equal to the specified constant (assert: value != forbidden) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void NotEqualTo(T value, T forbidden, [InvokerParameterName] string paramName, string message = null) + where T : struct, IEquatable + { + if (value.Equals(forbidden)) + { + throw FailArgumentForbidden(paramName, forbidden, message); + } + } + + /// [RUNTIME] The specified value must not less than or equal to the specified lower bound (assert: value > threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void GreaterThan(uint value, uint threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value <= threshold) + { + throw FailArgumentNotGreaterThan(paramName, threshold == 0, message); + } + } + + /// [RUNTIME] The specified value must not less than or equal to the specified lower bound (assert: value > threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void GreaterThan(long value, long threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value <= threshold) + { + throw FailArgumentNotGreaterThan(paramName, threshold == 0, message); + } + } + + /// [RUNTIME] The specified value must not less than or equal to the specified lower bound (assert: value > threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void GreaterThan(ulong value, ulong threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value <= threshold) + { + throw FailArgumentNotGreaterThan(paramName, threshold == 0, message); + } + } + + /// [RUNTIME] The specified value must not less than or equal to the specified lower bound (assert: value > threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void GreaterThan(float value, float threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value <= threshold) + { + // ReSharper disable once CompareOfFloatsByEqualityOperator + throw FailArgumentNotGreaterThan(paramName, threshold == 0.0f, message); + } + } + + /// [RUNTIME] The specified value must not less than or equal to the specified lower bound (assert: value > threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void GreaterThan(double value, double threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value <= threshold) + { + // ReSharper disable once CompareOfFloatsByEqualityOperator + throw FailArgumentNotGreaterThan(paramName, threshold == 0.0d, message); + } + } + + /// [RUNTIME] The specified value must not less than the specified lower bound (assert: value >= threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void GreaterOrEqual(int value, int threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value < threshold) + { + throw FailArgumentNotGreaterOrEqual(paramName, threshold == 0, message); + } + } + + /// [RUNTIME] The specified value must not less than the specified lower bound (assert: value >= threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void GreaterOrEqual(uint value, uint threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value < threshold) + { + throw FailArgumentNotGreaterOrEqual(paramName, threshold == 0, message); + } + } + + /// [RUNTIME] The specified value must not less than the specified lower bound (assert: value >= threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void GreaterOrEqual(long value, long threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value < threshold) + { + throw FailArgumentNotGreaterOrEqual(paramName, threshold == 0, message); + } + } + + /// [RUNTIME] The specified value must not less than the specified lower bound (assert: value >= threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void GreaterOrEqual(ulong value, ulong threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value < threshold) + { + throw FailArgumentNotGreaterOrEqual(paramName, threshold == 0, message); + } + } + + /// [RUNTIME] The specified value must not less than the specified lower bound (assert: value >= threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void GreaterOrEqual(float value, float threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value < threshold) + { + // ReSharper disable once CompareOfFloatsByEqualityOperator + throw FailArgumentNotGreaterOrEqual(paramName, threshold == 0.0f, message); + } + } + + /// [RUNTIME] The specified value must not less than the specified lower bound (assert: value >= threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void GreaterOrEqual(double value, double threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value < threshold) + { + // ReSharper disable once CompareOfFloatsByEqualityOperator + throw FailArgumentNotGreaterOrEqual(paramName, threshold == 0.0d, message); + } + } + + /// [RUNTIME] The specified value must not greater than or equal to the specified upper bound + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void LessThan(int value, int threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value >= threshold) + { + throw FailArgumentNotLessThan(paramName, message); + } + } + + /// [RUNTIME] The specified value must not greater than or equal to the specified upper bound + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void LessThan(uint value, uint threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value >= threshold) + { + throw FailArgumentNotLessThan(paramName, message); + } + } + + /// [RUNTIME] The specified value must not greater than or equal to the specified uppper bound (assert: value < threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void LessThan(long value, long threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value >= threshold) + { + throw FailArgumentNotLessThan(paramName, message); + } + } + + /// [RUNTIME] The specified value must not greater than or equal to the specified uppper bound (assert: value < threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void LessThan(ulong value, ulong threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value >= threshold) + { + throw FailArgumentNotLessThan(paramName, message); + } + } + + /// [RUNTIME] The specified value must not greater than or equal to the specified uppper bound (assert: value < threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void LessThan(float value, float threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value >= threshold) + { + throw FailArgumentNotLessThan(paramName, message); + } + } + + /// [RUNTIME] The specified value must not greater than or equal to the specified uppper bound (assert: value < threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void LessThan(double value, double threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value >= threshold) + { + throw FailArgumentNotLessThan(paramName, message); + } + } + + /// [RUNTIME] The specified value must not greater than the specified upper bound (assert: value <= threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void LessOrEqual(int value, int threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value > threshold) + { + throw FailArgumentNotLessOrEqual(paramName, message); + } + } + + /// [RUNTIME] The specified value must not greater than the specified upper bound (assert: value <= threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void LessOrEqual(uint value, uint threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value > threshold) + { + throw FailArgumentNotLessOrEqual(paramName, message); + } + } + + /// [RUNTIME] The specified value must not greater than the specified upper bound (assert: value <= threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void LessOrEqual(long value, long threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value > threshold) + { + throw FailArgumentNotLessOrEqual(paramName, message); + } + } + + /// [RUNTIME] The specified value must not greater than the specified upper bound (assert: value <= threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void LessOrEqual(ulong value, ulong threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value > threshold) + { + throw FailArgumentNotLessOrEqual(paramName, message); + } + } + + /// [RUNTIME] The specified value must not greater than the specified upper bound (assert: value <= threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void LessOrEqual(float value, float threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value > threshold) + { + throw FailArgumentNotLessOrEqual(paramName, message); + } + } + + /// [RUNTIME] The specified value must not greater than the specified upper bound (assert: value <= threshold) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void LessOrEqual(double value, double threshold, [InvokerParameterName] string paramName, string message = null) + { + if (value > threshold) + { + throw FailArgumentNotLessOrEqual(paramName, message); + } + } + + /// [RUNTIME] The specified value must not be outside of the specified bounds (assert: min <= value <= max) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void Between(int value, int minimumInclusive, int maximumInclusive, [InvokerParameterName] string paramName, string message = null) + { + if (value < minimumInclusive || value > maximumInclusive) + { + throw FailArgumentOutOfBounds(paramName, message); + } + } + + /// [RUNTIME] The specified value must not be outside of the specified bounds (assert: min <= value <= max) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void Between(uint value, uint minimumInclusive, uint maximumInclusive, [InvokerParameterName] string paramName, string message = null) + { + if (value < minimumInclusive || value > maximumInclusive) + { + throw FailArgumentOutOfBounds(paramName, message); + } + } + + /// [RUNTIME] The specified value must not be outside of the specified bounds (assert: min <= value <= max) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void Between(long value, long minimumInclusive, long maximumInclusive, [InvokerParameterName] string paramName, string message = null) + { + if (value < minimumInclusive || value > maximumInclusive) + { + throw FailArgumentOutOfBounds(paramName, message); + } + } + + /// [RUNTIME] The specified value must not be outside of the specified bounds (assert: min <= value <= max) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void Between(ulong value, ulong minimumInclusive, ulong maximumInclusive, [InvokerParameterName] string paramName, string message = null) + { + if (value < minimumInclusive || value > maximumInclusive) + { + throw FailArgumentOutOfBounds(paramName, message); + } + } + + /// [RUNTIME] The specified value must not be outside of the specified bounds (assert: min <= value <= max) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void Between(float value, float minimumInclusive, float maximumInclusive, [InvokerParameterName] string paramName, string message = null) + { + if (value < minimumInclusive || value > maximumInclusive) + { + throw FailArgumentOutOfBounds(paramName, message); + } + } + + /// [RUNTIME] The specified value must not be outside of the specified bounds (assert: min <= value <= max) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void Between(double value, double minimumInclusive, double maximumInclusive, [InvokerParameterName] string paramName, string message = null) + { + if (value < minimumInclusive || value > maximumInclusive) + { + throw FailArgumentOutOfBounds(paramName, message); + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void Multiple(int value, int multiple, [InvokerParameterName] string paramName, string message = null) + { + if (value % multiple != 0) + { + throw FailArgumentNotMultiple(paramName, message); + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void Multiple(uint value, uint multiple, [InvokerParameterName] string paramName, string message = null) + { + if (value % multiple != 0) + { + throw FailArgumentNotMultiple(paramName, message); + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void Multiple(long value, long multiple, [InvokerParameterName] string paramName, string message = null) + { + if (value % multiple != 0) + { + throw FailArgumentNotMultiple(paramName, message); + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void Multiple(ulong value, ulong multiple, [InvokerParameterName] string paramName, string message = null) + { + if (value % multiple != 0) + { + throw FailArgumentNotMultiple(paramName, message); + } + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception FailArgumentNotMultiple(string paramName, string message = null) + { + return ReportFailure(typeof(ArgumentException), ContractMessages.ValueMustBeMultiple, message, paramName, ContractMessages.ConditionArgMultiple); + } + + #endregion + + #region Contract.DoesNotOverflow + + /// Vérifie qu'une couple index/count ne débord pas d'un buffer, et qu'il n'est pas null + /// Buffer (qui ne doit pas être null) + /// Index (qui ne doit pas être négatif) + /// Taille (qui ne doit pas être négative) + /// + [AssertionMethod] + public static void DoesNotOverflow([AssertionCondition(AssertionConditionType.IS_NOT_NULL)] string buffer, int index, int count, string message = null) + { + if (buffer == null) throw FailArgumentNull("buffer", message); + if (index < 0 || count < 0) throw FailArgumentNotNonNegative(index < 0 ? "index" : "count", message); + if ((buffer.Length - index) < count) throw FailBufferTooSmall("count", message); + } + + /// Vérifie qu'une couple index/count ne débord pas d'un buffer, et qu'il n'est pas null + /// Taille du buffer + /// Index (qui ne doit pas être négatif) + /// Taille (qui ne doit pas être négative) + [AssertionMethod] + public static void DoesNotOverflow(int bufferLength, int offset, int count) + { + if (offset < 0 || count < 0) throw FailArgumentNotNonNegative(offset < 0 ? "offset" : "count", null); + if ((bufferLength - offset) < count) throw FailBufferTooSmall("count", null); + } + + /// Vérifie qu'une couple index/count ne débord pas d'un buffer, et qu'il n'est pas null + /// Taille du buffer + /// Index (qui ne doit pas être négatif) + /// Taille (qui ne doit pas être négative) + [AssertionMethod] + public static void DoesNotOverflow(long bufferLength, long offset, long count) + { + if (offset < 0 || count < 0) throw FailArgumentNotNonNegative(offset < 0 ? "offset" : "count", null); + if ((bufferLength - offset) < count) throw FailBufferTooSmall("count", null); + } + + /// Vérifie qu'une couple index/count ne débord pas d'un buffer, et qu'il n'est pas null + /// Buffer (qui ne doit pas être null) + /// Index (qui ne doit pas être négatif) + /// Taille (qui ne doit pas être négative) + /// + [AssertionMethod] + public static void DoesNotOverflow([AssertionCondition(AssertionConditionType.IS_NOT_NULL)] TElement[] buffer, int offset, int count, string message = null) + { + if (buffer == null) throw FailArgumentNull("buffer", message); + if (offset < 0 || count < 0) throw FailArgumentNotNonNegative(offset < 0 ? "offset" : "count", message); + if ((buffer.Length - offset) < count) throw FailBufferTooSmall("count", message); + } + + /// Vérifie qu'une couple index/count ne débord pas d'un buffer, et qu'il n'est pas null + /// Buffer (qui ne doit pas être null) + /// + public static void DoesNotOverflow(ArraySegment buffer, string message = null) + { + if (buffer.Offset < 0 || buffer.Count < 0) throw FailArgumentNotNonNegative(buffer.Offset < 0 ? "offset" : "count", message); + if (buffer.Count > 0) + { + if (buffer.Array == null) throw FailBufferNull("buffer", message); + if ((buffer.Array.Length - buffer.Offset) < buffer.Count) throw FailBufferTooSmall("count", message); + } + else + { + if (buffer.Array != null && buffer.Array.Length < buffer.Offset) throw FailBufferTooSmall("count", message); + } + } + + /// Vérifie qu'une couple index/count ne débord pas d'un buffer, et qu'il n'est pas null + /// Buffer (qui ne doit pas être null) + /// Index (qui ne doit pas être négatif) + /// Taille (qui ne doit pas être négative) + /// + [AssertionMethod] + public static void DoesNotOverflow([AssertionCondition(AssertionConditionType.IS_NOT_NULL)] ICollection buffer, int offset, int count, string message = null) + { + if (buffer == null) throw FailArgumentNull("buffer", message); + if (offset < 0 || count < 0) throw FailArgumentNotNonNegative(offset < 0 ? "offset" : "count", message); + if ((buffer.Count - offset) < count) throw FailBufferTooSmall("count", message); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception FailBufferTooSmall(string paramName, string message = null) + { + return ReportFailure(typeof(ArgumentException), ContractMessages.OffsetMustBeWithinBuffer, message, paramName, ContractMessages.ConditionArgBufferOverflow); + } + + #endregion + + #endregion + + #region Internal Helpers... + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception FailArgumentNull(string paramName, string message = null) + { + return ReportFailure(typeof(ArgumentNullException), ContractMessages.ValueCannotBeNull, message, paramName, ContractMessages.ConditionNotNull); + } + + /// Déclenche une exception suite à l'échec d'une condition + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + internal static Exception ReportFailure(Type exceptionType, string msg, string userMessage, string paramName, string conditionTxt) + { + if (conditionTxt != null && conditionTxt.IndexOf('{') >= 0) + { // il y a peut etre un "{0}" dans la condition qu'il faut remplacer le nom du paramètre + conditionTxt = string.Format(conditionTxt, paramName ?? "value"); + } + + string str = SRC.ContractHelper.RaiseContractFailedEvent(SDC.ContractFailureKind.Precondition, userMessage ?? msg, conditionTxt, null); + // si l'appelant retourne null, c'est qu'il a lui même traité l'incident ... + // mais ca n'empeche pas qu'on doit quand même stopper l'execution ! +#if DEBUG + if (str != null) + { + // note: on ne spam les logs si on est en train de unit tester ! (vu qu'on va provoquer intentionellement plein d'erreurs!) + if (!IsUnitTesting) + { + System.Diagnostics.Debug.Fail(str); + } + } +#endif + string description = userMessage ?? str ?? msg; + + var exception = ThrowHelper.TryMapToKnownException(exceptionType, description, paramName); + + if (exception == null) + { // c'est un type compliqué ?? + exception = ThrowHelper.TryMapToComplexException(exceptionType, description, paramName); + } + + if (exception == null) + { // uh? on va quand même envoyer une exception proxy ! + exception = FallbackForUnknownException(description, paramName); + } + + return exception; + } + + [NotNull] + private static Exception FallbackForUnknownException(string description, string paramName) + { +#if DEBUG + if (System.Diagnostics.Debugger.IsAttached) System.Diagnostics.Debugger.Break(); // README: Si vous tombez ici, c'est que l'appelant a spécifié un type d'Exception qu'on n'arrive pas a construire! il faudrait peut être changer le type... +#endif + if (paramName != null) + return new ArgumentException(description, paramName); + else + return new InvalidOperationException(description); + } + + /// Signale l'échec d'une condition en déclenchant une ContractException + /// Si un debugger est attaché, un breakpoint est déclenché. Sinon, une ContractException est générée + [Pure, NotNull] + [ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)] + [MethodImpl(MethodImplOptions.NoInlining)] + [DebuggerNonUserCode] + internal static Exception RaiseContractFailure(SDC.ContractFailureKind kind, string msg) + { + string str = SRC.ContractHelper.RaiseContractFailedEvent(kind, msg, null, null); + if (str != null) + { + if (IsUnitTesting) + { + // throws une AssertionException si on a réussi a se connecter avec NUnit + var ex = MapToNUnitAssertion(str); +#if DEBUG + // README: Si vous breakpointez ici, il faut remonter plus haut dans la callstack, et trouver la fonction invoque Contract.xxx(...) + if (System.Diagnostics.Debugger.IsAttached) System.Diagnostics.Debugger.Break(); + // note: à partir de VS 2015 Up2, [DebuggerNonUserCode] n'est plus respecté si la regkey AlwaysEnableExceptionCallbacksOutsideMyCode n'est pas égale à 1, pour améliorer les perfs. + // cf "How to Suppress Ignorable Exceptions with DebuggerNonUserCode" dans https://blogs.msdn.microsoft.com/visualstudioalm/2016/02/12/using-the-debuggernonusercode-attribute-in-visual-studio-2015/ +#endif + if (ex != null) return ex; + // sinon, on continue + } +#if DEBUG + else if (kind == SDC.ContractFailureKind.Assert && Debugger.IsAttached) + { + // uniquement si on F5 depuis VS, car sinon cela cause problèmes avec le TestRunner de R# (qui popup les assertion fail!) + System.Diagnostics.Debug.Fail(str); + } +#endif + + return new ContractException(kind, str, msg, null, null); + } + //note: on doit quand même retourner quelque chose! + return new ContractException(kind, "Contract Failed", msg, null, null); + } + + #endregion + + /// Contracts that are only evaluted in Debug builds + public static class Debug + { + // ReSharper disable MemberHidesStaticFromOuterClass + + // contains most of the same contracts as the main class, but only for Debug builds. + // ie: Contract.NotNull(...) will run in both Debug and Release builds, while Contract.Debug.NotNull(...) will NOT be evaluated in Release builds + + [AssertionMethod, Conditional("DEBUG")] + public static void NotNull( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL)] string value, + [InvokerParameterName] string paramName + ) + { +#if DEBUG + if (value == null) + { + throw FailArgumentNull(paramName, null); + } +#endif + } + + [AssertionMethod, Conditional("DEBUG")] + public static void NotNull( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL)] string value, + [InvokerParameterName] string paramName, + string message) + { +#if DEBUG + if (value == null) + { + throw FailArgumentNull(paramName, message); + } +#endif + } + + [AssertionMethod, Conditional("DEBUG")] + public static void NotNullOrEmpty( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL)] string value, + [InvokerParameterName] string paramName + ) + { +#if DEBUG + if (string.IsNullOrEmpty(value)) + { + throw FailArgumentNull(paramName, null); + } +#endif + } + + [AssertionMethod, Conditional("DEBUG")] + public static void NotNullOrEmpty( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL)] string value, + [InvokerParameterName] string paramName, + string message + ) + { +#if DEBUG + if (string.IsNullOrEmpty(value)) + { + throw FailArgumentNull(paramName, message); + } +#endif + } + + [AssertionMethod, Conditional("DEBUG")] + public static void NotNull( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL), NoEnumeration] T value, + [InvokerParameterName] string paramName + ) + where T : class + { +#if DEBUG + if (value == null) + { + throw FailArgumentNull(paramName, null); + } +#endif + } + + [AssertionMethod, Conditional("DEBUG")] + public static void NotNullAllowStructs( + [AssertionCondition(AssertionConditionType.IS_NOT_NULL), NoEnumeration] T value, + [InvokerParameterName] string paramName + ) + { +#if DEBUG + if (value == null) + { + throw FailArgumentNull(paramName, null); + } +#endif + } + + /// [DEBUG ONLY] Vérifie qu'une condition est toujours vrai, dans le body dans une méthode + /// Condition qui ne doit jamais être fausse + /// Ne fait rien si la condition est vrai. Sinon déclenche une ContractException, après avoir essayé de breakpointer le debugger + [Conditional("DEBUG")] + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + [ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)] + public static void Assert([AssertionCondition(AssertionConditionType.IS_TRUE)] bool condition) + { +#if DEBUG + if (!condition) throw RaiseContractFailure(SDC.ContractFailureKind.Assert, null); +#endif + } + + /// [DEBUG ONLY] Vérifie qu'une condition est toujours vrai, dans le body dans une méthode + /// Condition qui ne doit jamais être fausse + /// Message décrivant l'erreur (optionnel) + /// Ne fait rien si la condition est vrai. Sinon déclenche une ContractException, après avoir essayé de breakpointer le debugger + [Conditional("DEBUG")] + [AssertionMethod, MethodImpl(MethodImplOptions.AggressiveInlining)] + [ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)] + public static void Assert([AssertionCondition(AssertionConditionType.IS_TRUE)] bool condition, string userMessage) + { +#if DEBUG + if (!condition) throw RaiseContractFailure(SDC.ContractFailureKind.Assert, userMessage); +#endif + } + + /// [DEBUG ONLY] Déclenche incontionellement une assertion + [Conditional("DEBUG")] + [AssertionMethod, ContractAnnotation("=>halt"), MethodImpl(MethodImplOptions.NoInlining)] + [ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)] + public static void Fail() + { +#if DEBUG + throw RaiseContractFailure(SDC.ContractFailureKind.Assert, null); +#endif + } + + /// [DEBUG ONLY] Déclenche incontionellement une assertion + [Conditional("DEBUG")] + [AssertionMethod, ContractAnnotation("=>halt"), MethodImpl(MethodImplOptions.NoInlining)] + [ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)] + public static void Fail(string userMessage) + { +#if DEBUG + throw RaiseContractFailure(SDC.ContractFailureKind.Assert, userMessage); +#endif + } + + // ReSharper restore MemberHidesStaticFromOuterClass + + } + + } + +} diff --git a/FoundationDB.Client/Shared/ContractException.cs b/FoundationDB.Client/Shared/ContractException.cs new file mode 100644 index 000000000..3240f72ba --- /dev/null +++ b/FoundationDB.Client/Shared/ContractException.cs @@ -0,0 +1,66 @@ +#region Copyright (c) 2013-2016, Doxense SAS. All rights reserved. +// See License.MD for license information +#endregion + +namespace Doxense.Diagnostics.Contracts +{ + using System; + using System.Runtime.Serialization; + using System.Security; + using SDC = System.Diagnostics.Contracts; + + [Serializable] + public sealed class ContractException : Exception + { + // copie de l'implémentation "internal" de System.Data.Contracts.ContractException + + #region Constructors... + + private ContractException() + { + base.HResult = -2146233022; + } + + public ContractException(SDC.ContractFailureKind kind, string failure, string userMessage, string condition, Exception innerException) + : base(failure, innerException) + { + base.HResult = -2146233022; + this.Kind = kind; + this.UserMessage = userMessage; + this.Condition = condition; + } + + private ContractException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + this.Kind = (SDC.ContractFailureKind)info.GetInt32("Kind"); + this.UserMessage = info.GetString("UserMessage"); + this.Condition = info.GetString("Condition"); + } + + #endregion + + #region Public Properties... + + public string Condition { get; } + + public SDC.ContractFailureKind Kind { get; } + + public string UserMessage { get; } + + public string Failure { get { return this.Message; } } + + #endregion + + [SecurityCritical] + public override void GetObjectData(SerializationInfo info, StreamingContext context) + { + base.GetObjectData(info, context); + info.AddValue("Kind", (int) this.Kind); + info.AddValue("UserMessage", this.UserMessage); + info.AddValue("Condition", this.Condition); + } + + } + +} diff --git a/FoundationDB.Client/Converters/ConversionHelper.cs b/FoundationDB.Client/Shared/Converters/ComparisonHelper.cs similarity index 70% rename from FoundationDB.Client/Converters/ConversionHelper.cs rename to FoundationDB.Client/Shared/Converters/ComparisonHelper.cs index 6a5ad55e0..62da240e1 100644 --- a/FoundationDB.Client/Converters/ConversionHelper.cs +++ b/FoundationDB.Client/Shared/Converters/ComparisonHelper.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,21 +26,22 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client.Converters +namespace Doxense.Runtime.Converters { - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Globalization; + using Doxense.Collections.Tuples; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; /// Helper classe used to compare object of "compatible" types internal static class ComparisonHelper { /// Pair of types that can be used as a key in a dictionary - internal struct TypePair + internal readonly struct TypePair : IEquatable { public readonly Type Left; public readonly Type Right; @@ -53,18 +54,18 @@ public TypePair(Type left, Type right) public override bool Equals(object obj) { - if (obj == null) return false; - TypePair other = (TypePair)obj; - return this.Left == other.Left && this.Right == other.Right; + return obj is TypePair tp && Equals(tp); + } + + public bool Equals(TypePair other) + { + return this.Left == other.Left + && this.Right == other.Right; } public override int GetHashCode() { - // note: we cannot just xor both hash codes, because if left and right are the same, we will return 0 - int h = this.Left.GetHashCode(); - h = (h >> 13) | (h << 19); - h ^= this.Right.GetHashCode(); - return h; + return HashCodes.Combine(this.Left.GetHashCode(), this.Right.GetHashCode()); } } @@ -89,77 +90,80 @@ public int GetHashCode(TypePair obj) /// Cache of all the comparison lambda for a pair of types /// Contains lambda that can compare two objects (of different types) for "similarity" - private static readonly ConcurrentDictionary> EqualityComparers = new ConcurrentDictionary>(ComparisonHelper.TypePairComparer.Default); + private static readonly ConcurrentDictionary> EqualityComparers = new ConcurrentDictionary>(TypePairComparer.Default); /// Tries to convert an object into an equivalent string representation (for equality comparison) /// Object to adapt /// String equivalent of the object - internal static string TryAdaptToString(object value) + public static string TryAdaptToString(object value) { - if (value == null) return null; - - var s = value as string; - if (s != null) return s; - - if (value is char) return new string((char)value, 1); - - if (value is Slice) return ((Slice) value).ToAscii(); //REVIEW: or ToUnicode() ? - - var bstr = value as byte[]; - if (bstr != null) return Slice.Create(bstr).ToAscii(); //REVIEW: or ToUnicode() ? - - var fmt = value as IFormattable; - if (fmt != null) return fmt.ToString(null, CultureInfo.InvariantCulture); - + switch (value) + { + case null: + return null; + case string s: + return s; + case char c: + return new string(c, 1); + case Slice sl: + return sl.ToStringUtf8(); //BUGBUG: ASCII? Ansi? UTF8? + case byte[] bstr: + return bstr.AsSlice().ToStringUtf8(); //BUGBUG: ASCII? Ansi? UTF8? + case IFormattable fmt: + return fmt.ToString(null, CultureInfo.InvariantCulture); + } return null; } /// Tries to convert an object into an equivalent double representation (for equality comparison) /// Object to adapt /// Type of the object to adapt - /// Double equivalent of the object - internal static double? TryAdaptToDecimal(object value, [NotNull] Type type) + /// Double equivalent of the object + /// True if is compatible with a decimal. False if the type is not compatible + public static bool TryAdaptToDecimal(object value, [NotNull] Type type, out double result) { if (value != null) { switch (Type.GetTypeCode(type)) { - case TypeCode.Int16: return (short)value; - case TypeCode.UInt16: return (ushort)value; - case TypeCode.Int32: return (int)value; - case TypeCode.UInt32: return (uint)value; - case TypeCode.Int64: return (long)value; - case TypeCode.UInt64: return (ulong)value; - case TypeCode.Single: return (float)value; - case TypeCode.Double: return (double)value; + case TypeCode.Int16: { result = (short)value; return true; } + case TypeCode.UInt16: { result = (ushort)value; return true; } + case TypeCode.Int32: { result = (int)value; return true; } + case TypeCode.UInt32: { result = (uint)value; return true; } + case TypeCode.Int64: { result = (long)value; return true; } + case TypeCode.UInt64: { result = (ulong)value; return true; } + case TypeCode.Single: { result = (float)value; return true; } + case TypeCode.Double: { result = (double)value; return true; } //TODO: string? } } - return null; + result = 0; + return false; } /// Tries to convert an object into an equivalent Int64 representation (for equality comparison) /// Object to adapt /// Type of the object to adapt - /// Int64 equivalent of the object - internal static long? TryAdaptToInteger(object value, [NotNull] Type type) + /// Int64 equivalent of the object + /// True if is compatible with a decimal. False if the type is not compatible + public static bool TryAdaptToInteger(object value, [NotNull] Type type, out long result) { if (value != null) { switch (Type.GetTypeCode(type)) { - case TypeCode.Int16: return (short)value; - case TypeCode.UInt16: return (ushort)value; - case TypeCode.Int32: return (int)value; - case TypeCode.UInt32: return (uint)value; - case TypeCode.Int64: return (long)value; - case TypeCode.UInt64: return (long?)(ulong)value; - case TypeCode.Single: return (long?)(float)value; - case TypeCode.Double: return (long?)(double)value; - //TODO: string? + case TypeCode.Int16: { result = (short)value; return true; } + case TypeCode.UInt16: { result = (ushort)value; return true; } + case TypeCode.Int32: { result = (int)value; return true; } + case TypeCode.UInt32: { result = (uint)value; return true; } + case TypeCode.Int64: { result = (long)value; return true; } + case TypeCode.UInt64: { result = (long)(ulong)value; return true; } + case TypeCode.Single: { result = (long)(float)value; return true; } + case TypeCode.Double: { result = (long)(double)value; return true; } } } - return null; + result = 0; + return false; } [NotNull] @@ -198,31 +202,41 @@ private static Func CreateTypeComparator([NotNull] Type t1 if (IsStringType(t1) || IsStringType(t2)) { - return (x, y) => - { - if (x == null) return y == null; - if (y == null) return false; - return object.ReferenceEquals(x, y) || (TryAdaptToString(x) == TryAdaptToString(y)); - }; + return (x, y) => x == null ? y == null : y != null && (object.ReferenceEquals(x, y) || (TryAdaptToString(x) == TryAdaptToString(y))); } if (IsNumericType(t1) || IsNumericType(t2)) { if (IsDecimalType(t1) || IsDecimalType(t2)) { - return (x, y) => x == null ? y == null : y != null && TryAdaptToDecimal(x, t1) == TryAdaptToDecimal(y, t2); + return (x, y) => + { + double d1, d2; + // ReSharper disable once CompareOfFloatsByEqualityOperator + return x == null ? y == null : y != null && TryAdaptToDecimal(x, t1, out d1) && TryAdaptToDecimal(y, t2, out d2) && d1 == d2; + }; } + else + { + //TODO: handle UInt64 with values > long.MaxValue that will overflow to negative values when casted down to Int64 + return (x, y) => + { + long l1, l2; + return x == null ? y == null : y != null && TryAdaptToInteger(x, t1, out l1) && TryAdaptToInteger(y, t2, out l2) && l1 == l2; + }; + } + } - //TODO: handle UInt64 with values > long.MaxValue that will overflow to negative values when casted down to Int64 - - return (x, y) => x == null ? y == null : y != null && TryAdaptToInteger(x, t1) == TryAdaptToInteger(y, t2); + if (typeof(ITuple).IsAssignableFrom(t1) && typeof(ITuple).IsAssignableFrom(t2)) + { + return (x, y) => x == null ? y == null : y != null && ((ITuple) x).Equals((ITuple) y); } //TODO: some other way to compare ? return (x, y) => false; } - internal static Func GetTypeComparator(Type t1, Type t2) + public static Func GetTypeComparator(Type t1, Type t2) { var pair = new TypePair(t1, t2); Func comparator; @@ -245,7 +259,7 @@ internal static Func GetTypeComparator(Type t1, Type t2) /// AreSimilar(false, 0) => true /// AreSimilar(true, 1) => true ///
- internal static bool AreSimilar(object x, object y) + public static bool AreSimilar(object x, object y) { if (object.ReferenceEquals(x, y)) return true; if (x == null || y == null) return false; @@ -255,7 +269,7 @@ internal static bool AreSimilar(object x, object y) return comparator(x, y); } - internal static bool AreSimilar(T1 x, T2 y) + public static bool AreSimilar(T1 x, T2 y) { var comparator = GetTypeComparator(typeof(T1), typeof(T2)); Contract.Requires(comparator != null); @@ -317,6 +331,7 @@ private static bool IsNumericType([NotNull] Type t) private static bool IsDecimalType(Type t) { return t == typeof(double) || t == typeof(float); + //TODO: System.Decimal? } } diff --git a/FoundationDB.Client/Converters/IFdbConverter.cs b/FoundationDB.Client/Shared/Converters/ITypeConverter.cs similarity index 84% rename from FoundationDB.Client/Converters/IFdbConverter.cs rename to FoundationDB.Client/Shared/Converters/ITypeConverter.cs index 65a9fa941..fceb06dff 100644 --- a/FoundationDB.Client/Converters/IFdbConverter.cs +++ b/FoundationDB.Client/Shared/Converters/ITypeConverter.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,27 +26,35 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client.Converters +namespace Doxense.Runtime.Converters { using System; + using JetBrains.Annotations; /// Base class of all value converters - public interface IFdbConverter + public interface ITypeConverter { + /// Type of the instance to be converted + [NotNull] Type Source { get; } + + /// Type of the result of the conversion + [NotNull] Type Destination { get; } + [Pure] object ConvertBoxed(object value); } /// Class that can convert values of type into values of type /// Source type /// Destination type - public interface IFdbConverter : IFdbConverter + public interface ITypeConverter : ITypeConverter { /// Converts a into a /// Value to convert /// Converted value + [Pure] TDestination Convert(TSource value); } diff --git a/FoundationDB.Client/Converters/SimilarValueComparer.cs b/FoundationDB.Client/Shared/Converters/SimilarValueComparer.cs similarity index 90% rename from FoundationDB.Client/Converters/SimilarValueComparer.cs rename to FoundationDB.Client/Shared/Converters/SimilarValueComparer.cs index 8d52b855d..af2ccd86d 100644 --- a/FoundationDB.Client/Converters/SimilarValueComparer.cs +++ b/FoundationDB.Client/Shared/Converters/SimilarValueComparer.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,16 +26,18 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client.Converters +namespace Doxense.Runtime.Converters { using System; using System.Collections; using System.Collections.Generic; + using JetBrains.Annotations; /// Object comparer that returns true if both values are "similar" /// This comparer SHOULD NOT be used in a Dictioanry, because it violates on of the conditions: Two objects could be considered equal, but have different hashcode! - internal class SimilarValueComparer : IEqualityComparer, IEqualityComparer + internal sealed class SimilarValueComparer : IEqualityComparer, IEqualityComparer { + [NotNull] public static readonly IEqualityComparer Default = new SimilarValueComparer(); private SimilarValueComparer() @@ -48,7 +50,7 @@ bool IEqualityComparer.Equals(object x, object y) int IEqualityComparer.GetHashCode(object obj) { - return obj == null ? -1 : obj.GetHashCode(); + return obj?.GetHashCode() ?? -1; } bool IEqualityComparer.Equals(object x, object y) diff --git a/FoundationDB.Client/Converters/FdbConverters.cs b/FoundationDB.Client/Shared/Converters/TypeConverters.cs similarity index 58% rename from FoundationDB.Client/Converters/FdbConverters.cs rename to FoundationDB.Client/Shared/Converters/TypeConverters.cs index c5dae30e9..0f960ffdc 100644 --- a/FoundationDB.Client/Converters/FdbConverters.cs +++ b/FoundationDB.Client/Shared/Converters/TypeConverters.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,35 +26,45 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client.Converters +namespace Doxense.Runtime.Converters { - using FoundationDB.Client.Utils; - using FoundationDB.Layers.Tuples; - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.Globalization; using System.Linq.Expressions; + using System.Reflection; + using System.Runtime.CompilerServices; using System.Threading; + using Doxense.Diagnostics.Contracts; + using Doxense.Serialization; + using JetBrains.Annotations; /// Helper class to convert object from one type to another - public static class FdbConverters + public static class TypeConverters { + + /// Cache used to make the JIT inline all converters from ValueType to ValueType + private static class Cache + { + [NotNull] + public static readonly ITypeConverter Converter = GetConverter(); + } + #region Identity /// Simple converter where the source and destination types are the same /// Source and Destination type - private class Identity : IFdbConverter + private sealed class Identity : ITypeConverter { private static readonly bool IsReferenceType = typeof(T).IsClass; //TODO: nullables ? - public static readonly IFdbConverter Default = new Identity(); + public static readonly ITypeConverter Default = new Identity(); - public static readonly Func FromObject = (Func)FdbConverters.CreateCaster(typeof(T)); + public static readonly Func FromObject = (Func)TypeConverters.CreateCaster(typeof(T)); - public Type Source { get { return typeof(T); } } + public Type Source => typeof(T); - public Type Destination { get { return typeof(T); } } + public Type Destination => typeof(T); public T Convert(T value) { @@ -78,55 +88,56 @@ public static T Cast(object value) #region Anonymous /// Simple converter that wraps a lambda function - /// Source type - /// Destination type - private class Anonymous : IFdbConverter + /// Source type + /// Destination type + private sealed class Anonymous : ITypeConverter { - private Func Converter { get; set; } + [NotNull] + private Func Converter { get; } - public Anonymous([NotNull] Func converter) + public Anonymous([NotNull] Func converter) { - if (converter == null) throw new ArgumentNullException("converter"); + Contract.NotNull(converter, nameof(converter)); this.Converter = converter; } - public Type Source { get { return typeof(T); } } + public Type Source => typeof(TInput); - public Type Destination { get { return typeof(R); } } + public Type Destination => typeof(TOutput); - public R Convert(T value) + public TOutput Convert(TInput value) { return this.Converter(value); } public object ConvertBoxed(object value) { - return (object) this.Converter(Identity.FromObject(value)); + return this.Converter(Identity.FromObject(value)); } } - private class SubClass : IFdbConverter + private sealed class SubClass : ITypeConverter { - public static readonly IFdbConverter Default = new SubClass(); + public static readonly ITypeConverter Default = new SubClass(); private SubClass() { - if (!typeof(R).IsAssignableFrom(typeof(T))) throw new InvalidOperationException(String.Format("Type {0} is not a subclass of {1}", typeof(T).Name, typeof(R).Name)); + if (!typeof(TOutput).IsAssignableFrom(typeof(TInput))) throw new InvalidOperationException($"Type {typeof(TInput).Name} is not a subclass of {typeof(TOutput).Name}"); } - public R Convert(T value) + public TOutput Convert(TInput value) { - return (R)(object)value; + return (TOutput)(object)value; } public Type Source { - get { return typeof(T); } + get { return typeof(TInput); } } public Type Destination { - get { return typeof(R); } + get { return typeof(TOutput); } } public object ConvertBoxed(object value) @@ -138,22 +149,22 @@ public object ConvertBoxed(object value) #endregion /// Static ctor that initialize the default converters - static FdbConverters() + static TypeConverters() { RegisterDefaultConverters(); } /// Map of all known converters from T to R /// No locking required, because all changes will replace this instance with a new Dictionary - private static Dictionary Converters = new Dictionary(ComparisonHelper.TypePairComparer.Default); + private static Dictionary Converters = new Dictionary(ComparisonHelper.TypePairComparer.Default); /// Register all the default converters private static void RegisterDefaultConverters() { //TODO: there is too much generic type combinations! need to refactor this ... - RegisterUnsafe((value) => Slice.FromByte(value ? (byte)1 : default(byte))); - RegisterUnsafe((value) => Slice.FromByte(value ? (byte)1 : default(byte)).GetBytes()); + RegisterUnsafe((value) => Slice.FromByte(value ? (byte) 1 : default(byte))); + RegisterUnsafe((value) => Slice.FromByte(value ? (byte) 1 : default(byte)).GetBytes()); RegisterUnsafe((value) => value ? "true" : "false"); RegisterUnsafe((value) => value ? (sbyte)1 : default(sbyte)); RegisterUnsafe((value) => value ? (byte)1 : default(byte)); @@ -163,12 +174,13 @@ private static void RegisterDefaultConverters() RegisterUnsafe((value) => value ? 1U : default(uint)); RegisterUnsafe((value) => value ? 1L : default(long)); RegisterUnsafe((value) => value ? 1UL : default(ulong)); - RegisterUnsafe((value) => value ? 0.0d : 1.0d); - RegisterUnsafe((value) => value ? 0.0f : 1.0f); + RegisterUnsafe((value) => value ? 1.0d : default(double)); + RegisterUnsafe((value) => value ? 1.0f : default(float)); + RegisterUnsafe((value) => value ? 1m : default(decimal)); - RegisterUnsafe((value) => Slice.FromInt32(value)); + RegisterUnsafe(Slice.FromInt32); RegisterUnsafe((value) => Slice.FromInt32(value).GetBytes()); - RegisterUnsafe((value) => value.ToString(CultureInfo.InvariantCulture)); //TODO: string table! + RegisterUnsafe(StringConverters.ToString); RegisterUnsafe((value) => value != 0); RegisterUnsafe((value) => checked((sbyte)value)); RegisterUnsafe((value) => checked((byte)value)); @@ -178,12 +190,12 @@ private static void RegisterDefaultConverters() RegisterUnsafe((value) => value); RegisterUnsafe((value) => (ulong)value); RegisterUnsafe((value) => value); - RegisterUnsafe((value) => checked((float)value)); - RegisterUnsafe((value) => (FdbTupleAlias)value); + RegisterUnsafe((value) => value); // possible loss of precision + RegisterUnsafe((value) => value); - RegisterUnsafe((value) => Slice.FromUInt64(value)); - RegisterUnsafe((value) => Slice.FromUInt64(value).GetBytes()); - RegisterUnsafe((value) => value.ToString(CultureInfo.InvariantCulture)); //TODO: string table! + RegisterUnsafe(Slice.FromUInt32); + RegisterUnsafe((value) => Slice.FromUInt32(value).GetBytes()); + RegisterUnsafe(StringConverters.ToString); RegisterUnsafe((value) => value != 0); RegisterUnsafe((value) => checked((sbyte)value)); RegisterUnsafe((value) => checked((byte)value)); @@ -193,11 +205,12 @@ private static void RegisterDefaultConverters() RegisterUnsafe((value) => value); RegisterUnsafe((value) => value); RegisterUnsafe((value) => value); - RegisterUnsafe((value) => checked((float)value)); + RegisterUnsafe((value) => value); // possible loss of precision + RegisterUnsafe((value) => value); - RegisterUnsafe((value) => Slice.FromInt64(value)); + RegisterUnsafe(Slice.FromInt64); RegisterUnsafe((value) => Slice.FromInt64(value).GetBytes()); - RegisterUnsafe((value) => value.ToString(CultureInfo.InvariantCulture)); //TODO: string table! + RegisterUnsafe(StringConverters.ToString); RegisterUnsafe((value) => value != 0); RegisterUnsafe((value) => checked((sbyte)value)); RegisterUnsafe((value) => checked((byte)value)); @@ -206,15 +219,16 @@ private static void RegisterDefaultConverters() RegisterUnsafe((value) => checked((int)value)); RegisterUnsafe((value) => (uint)value); RegisterUnsafe((value) => (ulong)value); - RegisterUnsafe((value) => checked((double)value)); - RegisterUnsafe((value) => checked((float)value)); - RegisterUnsafe((value) => TimeSpan.FromTicks(value)); + RegisterUnsafe((value) => value); // possible loss of precision + RegisterUnsafe((value) => value); // possible loss of precision + RegisterUnsafe(TimeSpan.FromTicks); RegisterUnsafe((value) => new Uuid64(value)); RegisterUnsafe((value) => new System.Net.IPAddress(value)); + RegisterUnsafe((value) => value); - RegisterUnsafe((value) => Slice.FromUInt64(value)); + RegisterUnsafe(Slice.FromUInt64); RegisterUnsafe((value) => Slice.FromUInt64(value).GetBytes()); - RegisterUnsafe((value) => value.ToString(CultureInfo.InvariantCulture)); //TODO: string table! + RegisterUnsafe(StringConverters.ToString); RegisterUnsafe((value) => value != 0); RegisterUnsafe((value) => checked((sbyte)value)); RegisterUnsafe((value) => checked((byte)value)); @@ -223,14 +237,15 @@ private static void RegisterDefaultConverters() RegisterUnsafe((value) => checked((int)value)); RegisterUnsafe((value) => checked((uint)value)); RegisterUnsafe((value) => checked((long)value)); - RegisterUnsafe((value) => checked((double)value)); - RegisterUnsafe((value) => checked((float)value)); + RegisterUnsafe((value) => value); // possible loss of precision + RegisterUnsafe((value) => value); // possible loss of precision RegisterUnsafe((value) => new Uuid64(value)); - RegisterUnsafe((value) => TimeSpan.FromTicks(checked((long)value))); + RegisterUnsafe((value) => TimeSpan.FromTicks(checked((long) value))); + RegisterUnsafe((value) => value); - RegisterUnsafe((value) => Slice.FromInt32(value)); - RegisterUnsafe((value) => Slice.FromInt32(value).GetBytes()); - RegisterUnsafe((value) => value.ToString(CultureInfo.InvariantCulture)); //TODO: string table! + RegisterUnsafe(Slice.FromInt16); + RegisterUnsafe((value) => Slice.FromInt16(value).GetBytes()); + RegisterUnsafe((value) => StringConverters.ToString(value)); RegisterUnsafe((value) => value != 0); RegisterUnsafe((value) => checked((sbyte)value)); RegisterUnsafe((value) => checked((byte)value)); @@ -241,11 +256,11 @@ private static void RegisterDefaultConverters() RegisterUnsafe((value) => checked ((ulong)value)); RegisterUnsafe((value) => value); RegisterUnsafe((value) => value); - RegisterUnsafe((value) => (FdbTupleAlias)value); + RegisterUnsafe((value) => value); - RegisterUnsafe((value) => Slice.FromUInt64(value)); - RegisterUnsafe((value) => Slice.FromUInt64(value).GetBytes()); - RegisterUnsafe((value) => value.ToString(CultureInfo.InvariantCulture)); //TODO: string table! + RegisterUnsafe(Slice.FromUInt16); + RegisterUnsafe((value) => Slice.FromUInt16(value).GetBytes()); + RegisterUnsafe((value) => StringConverters.ToString(value)); RegisterUnsafe((value) => value != 0); RegisterUnsafe((value) => checked((byte)value)); RegisterUnsafe((value) => checked((sbyte)value)); @@ -256,10 +271,11 @@ private static void RegisterDefaultConverters() RegisterUnsafe((value) => value); RegisterUnsafe((value) => value); RegisterUnsafe((value) => value); + RegisterUnsafe((value) => value); - RegisterUnsafe((value) => Slice.FromInt32(value)); - RegisterUnsafe((value) => Slice.FromInt32(value).GetBytes()); - RegisterUnsafe((value) => value.ToString(CultureInfo.InvariantCulture)); //TODO: string table! + RegisterUnsafe(Slice.FromByte); + RegisterUnsafe((value) => Slice.FromByte(value).GetBytes()); + RegisterUnsafe((value) => StringConverters.ToString(value)); RegisterUnsafe((value) => value != 0); RegisterUnsafe((value) => checked((sbyte)value)); RegisterUnsafe((value) => value); @@ -270,7 +286,7 @@ private static void RegisterDefaultConverters() RegisterUnsafe((value) => value); RegisterUnsafe((value) => value); RegisterUnsafe((value) => value); - RegisterUnsafe((value) => (FdbTupleAlias)value); + RegisterUnsafe((value) => value); RegisterUnsafe((value) => Slice.FromInt64(value)); RegisterUnsafe((value) => Slice.FromInt64(value).GetBytes()); @@ -285,8 +301,9 @@ private static void RegisterDefaultConverters() RegisterUnsafe((value) => checked((ulong)value)); RegisterUnsafe((value) => value); RegisterUnsafe((value) => value); + RegisterUnsafe((value) => value); - RegisterUnsafe((value) => Slice.FromSingle(value)); + RegisterUnsafe(Slice.FromSingle); RegisterUnsafe((value) => Slice.FromSingle(value).GetBytes()); RegisterUnsafe((value) => value.ToString("R", CultureInfo.InvariantCulture)); RegisterUnsafe((value) => !(value == 0f || float.IsNaN(value))); @@ -299,6 +316,7 @@ private static void RegisterDefaultConverters() RegisterUnsafe((value) => checked((long)value)); RegisterUnsafe((value) => (ulong)value); RegisterUnsafe((value) => value); + RegisterUnsafe((value) => (decimal) value); // possible loss of precision RegisterUnsafe((value) => Slice.FromDouble(value)); RegisterUnsafe((value) => Slice.FromDouble(value).GetBytes()); @@ -312,41 +330,43 @@ private static void RegisterDefaultConverters() RegisterUnsafe((value) => (uint)value); RegisterUnsafe((value) => checked((long)value)); RegisterUnsafe((value) => (ulong)value); - RegisterUnsafe((value) => checked((float)value)); + RegisterUnsafe((value) => (float)value); // possible loss of precision + RegisterUnsafe((value) => (decimal) value); // possible loss of precision RegisterUnsafe((value) => Slice.FromString(value)); - RegisterUnsafe((value) => Slice.FromString(value).GetBytes()); + RegisterUnsafe((value) => Slice.FromString(value).GetBytes()); //REVIEW: string=>byte[] use UTF-8, but byte[]=>string uses Base64 ? RegisterUnsafe((value) => !string.IsNullOrEmpty(value)); - RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(sbyte) : SByte.Parse(value, CultureInfo.InvariantCulture)); - RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(byte) : Byte.Parse(value, CultureInfo.InvariantCulture)); - RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(short) : Int16.Parse(value, CultureInfo.InvariantCulture)); - RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(ushort) : UInt16.Parse(value, CultureInfo.InvariantCulture)); - RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(int) : Int32.Parse(value, CultureInfo.InvariantCulture)); - RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(uint) : UInt32.Parse(value, CultureInfo.InvariantCulture)); - RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(long) : Int64.Parse(value, CultureInfo.InvariantCulture)); - RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(ulong) : UInt64.Parse(value, CultureInfo.InvariantCulture)); - RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(float) : Single.Parse(value, NumberStyles.Float, CultureInfo.InvariantCulture)); - RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(double) : Double.Parse(value, NumberStyles.Float, CultureInfo.InvariantCulture)); + RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(sbyte) : sbyte.Parse(value, CultureInfo.InvariantCulture)); + RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(byte) : byte.Parse(value, CultureInfo.InvariantCulture)); + RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(short) : short.Parse(value, CultureInfo.InvariantCulture)); + RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(ushort) : ushort.Parse(value, CultureInfo.InvariantCulture)); + RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(int) : int.Parse(value, CultureInfo.InvariantCulture)); + RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(uint) : uint.Parse(value, CultureInfo.InvariantCulture)); + RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(long) : long.Parse(value, CultureInfo.InvariantCulture)); + RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(ulong) : ulong.Parse(value, CultureInfo.InvariantCulture)); + RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(float) : float.Parse(value, NumberStyles.Float, CultureInfo.InvariantCulture)); + RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(double) : double.Parse(value, NumberStyles.Float, CultureInfo.InvariantCulture)); + RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(decimal) : decimal.Parse(value, NumberStyles.Float, CultureInfo.InvariantCulture)); RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(Guid) : Guid.Parse(value)); RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(Uuid128) : Uuid128.Parse(value)); RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(Uuid64) : Uuid64.Parse(value)); RegisterUnsafe((value) => string.IsNullOrEmpty(value) ? default(System.Net.IPAddress) : System.Net.IPAddress.Parse(value)); - RegisterUnsafe((value) => Slice.Create(value)); - RegisterUnsafe((value) => value == null ? default(string) : value.Length == 0 ? String.Empty : System.Convert.ToBase64String(value)); + RegisterUnsafe((value) => value.AsSlice()); + RegisterUnsafe((value) => value == null ? default(string) : value.Length == 0 ? string.Empty : System.Convert.ToBase64String(value)); //REVIEW: string=>byte[] use UTF-8, but byte[]=>string uses Base64 ? RegisterUnsafe((value) => value != null && value.Length > 0); - RegisterUnsafe((value) => value == null ? default(sbyte) : Slice.Create(value).ToSByte()); - RegisterUnsafe((value) => value == null ? default(byte) : Slice.Create(value).ToByte()); - RegisterUnsafe((value) => value == null ? default(short) : Slice.Create(value).ToInt16()); - RegisterUnsafe((value) => value == null ? default(ushort) : Slice.Create(value).ToUInt16()); - RegisterUnsafe((value) => value == null ? 0 : Slice.Create(value).ToInt32()); - RegisterUnsafe((value) => value == null ? 0U : Slice.Create(value).ToUInt32()); - RegisterUnsafe((value) => value == null ? 0L : Slice.Create(value).ToInt64()); - RegisterUnsafe((value) => value == null ? 0UL : Slice.Create(value).ToUInt64()); + RegisterUnsafe((value) => value == null ? default(sbyte) : value.AsSlice().ToSByte()); + RegisterUnsafe((value) => value == null ? default(byte) : value.AsSlice().ToByte()); + RegisterUnsafe((value) => value == null ? default(short) : value.AsSlice().ToInt16()); + RegisterUnsafe((value) => value == null ? default(ushort) : value.AsSlice().ToUInt16()); + RegisterUnsafe((value) => value == null ? 0 : value.AsSlice().ToInt32()); + RegisterUnsafe((value) => value == null ? 0U : value.AsSlice().ToUInt32()); + RegisterUnsafe((value) => value == null ? 0L : value.AsSlice().ToInt64()); + RegisterUnsafe((value) => value == null ? 0UL : value.AsSlice().ToUInt64()); RegisterUnsafe((value) => value == null || value.Length == 0 ? default(Guid) : new Uuid128(value).ToGuid()); RegisterUnsafe((value) => value == null || value.Length == 0 ? default(Uuid128) : new Uuid128(value)); - RegisterUnsafe((value) => value == null || value.Length == 0 ? default(Uuid64) : new Uuid64(value)); - RegisterUnsafe((value) => value == null ? TimeSpan.Zero : TimeSpan.FromTicks(Slice.Create(value).ToInt64())); + RegisterUnsafe((value) => value != null ? Uuid64.Read(value) : default(Uuid64)); + RegisterUnsafe((value) => value == null ? TimeSpan.Zero : TimeSpan.FromTicks(value.AsSlice().ToInt64())); RegisterUnsafe((value) => value == null || value.Length == 0 ? default(System.Net.IPAddress) : new System.Net.IPAddress(value)); RegisterUnsafe((value) => Slice.FromGuid(value)); @@ -354,14 +374,14 @@ private static void RegisterDefaultConverters() RegisterUnsafe((value) => value.ToString("D", null)); RegisterUnsafe((value) => new Uuid128(value)); RegisterUnsafe((value) => value != Guid.Empty); - RegisterUnsafe((value) => new System.Net.IPAddress(new Uuid128(value).ToByteArray())); + RegisterUnsafe((value) => new System.Net.IPAddress(new Uuid128(value).ToByteArray())); //REVIEW: custom converter for Guid=>IPv6? RegisterUnsafe((value) => value.ToSlice()); RegisterUnsafe((value) => value.ToByteArray()); RegisterUnsafe((value) => value.ToString("D", null)); RegisterUnsafe((value) => value.ToGuid()); RegisterUnsafe((value) => value != Uuid128.Empty); - RegisterUnsafe((value) => new System.Net.IPAddress(value.ToByteArray())); + RegisterUnsafe((value) => new System.Net.IPAddress(value.ToByteArray())); //REVIEW: custom converter for Guid=>IPv6? RegisterUnsafe((value) => value.ToSlice()); RegisterUnsafe((value) => value.ToByteArray()); @@ -377,15 +397,13 @@ private static void RegisterDefaultConverters() RegisterUnsafe((value) => value.TotalSeconds); RegisterUnsafe((value) => value == TimeSpan.Zero); - RegisterUnsafe((value) => value != null ? Slice.Create(value.GetAddressBytes()) : Slice.Nil); - RegisterUnsafe((value) => value != null ? value.GetAddressBytes() : null); - RegisterUnsafe((value) => value != null ? value.ToString() : null); + RegisterUnsafe((value) => (value?.GetAddressBytes()).AsSlice()); + RegisterUnsafe((value) => value?.GetAddressBytes()); + RegisterUnsafe((value) => value?.ToString()); +#pragma warning disable 618 + RegisterUnsafe((value) => (int) (value?.Address ?? 0)); +#pragma warning restore 618 - RegisterUnsafe((value) => (byte)value); - RegisterUnsafe((value) => (int)value); - RegisterUnsafe((value) => Slice.FromByte((byte)value)); - - //REVIEW: this should go in the Tuples layer ! RegisterUnsafe((value) => value.GetBytes()); RegisterUnsafe((value) => value.ToUnicode()); RegisterUnsafe((value) => value.ToBool()); @@ -397,33 +415,36 @@ private static void RegisterDefaultConverters() RegisterUnsafe((value) => value.ToUInt32()); RegisterUnsafe((value) => value.ToInt64()); RegisterUnsafe((value) => value.ToUInt64()); + RegisterUnsafe((value) => value.ToSingle()); + RegisterUnsafe((value) => value.ToDouble()); + RegisterUnsafe((value) => value.ToDecimal()); RegisterUnsafe((value) => value.ToGuid()); RegisterUnsafe((value) => value.ToUuid128()); RegisterUnsafe((value) => value.ToUuid64()); RegisterUnsafe((value) => TimeSpan.FromTicks(value.ToInt64())); - RegisterUnsafe((value) => (FdbTupleAlias)value.ToByte()); - RegisterUnsafe((value) => !value.IsNullOrEmpty ? new System.Net.IPAddress(value.GetBytes()) : null); + RegisterUnsafe((value) => !value.IsNullOrEmpty ? new System.Net.IPAddress(value.GetBytesOrEmpty()) : null); } /// Helper method to throw an exception when we don't know how to convert from to /// Type of the source object /// Target type of the conversion - [ContractAnnotation("=> halt")] - private static void FailCannotConvert(Type source, Type destination) + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + private static Exception FailCannotConvert(Type source, Type destination) { // prettyprint nullable type names to have something more usefull than "Nullable`1" //TODO: extend this to all generic types ? var nt = Nullable.GetUnderlyingType(source); - string sourceName = nt == null ? source.Name : String.Format("Nullable<{0}>", nt.Name); + string sourceName = nt == null ? source.Name : ("Nullable<" + nt.Name + ">"); nt = Nullable.GetUnderlyingType(destination); - string destinationName = nt == null ? destination.Name : String.Format("Nullable<{0}>", nt.Name); + string destinationName = nt == null ? destination.Name : ("Nullable<" + nt.Name + ">"); - throw new InvalidOperationException(String.Format("Cannot convert values of type {0} into {1}", sourceName, destinationName)); + return new InvalidOperationException($"Cannot convert values of type {sourceName} into {destinationName}"); } /// Create a new delegate that cast a boxed valued of type T (object) into a T /// Delegate that is of type Func<object, > - private static Delegate CreateCaster(Type type) + [NotNull] + private static Delegate CreateCaster([NotNull] Type type) { var prm = Expression.Parameter(typeof(object), "value"); //TODO: valuetype vs ref type ? @@ -433,48 +454,51 @@ private static Delegate CreateCaster(Type type) } /// Helper method that wraps a lambda function into a converter - /// Source type - /// Destination type - /// Lambda that converts a value of type into a value of type + /// Source type + /// Destination type + /// Lambda that converts a value of type into a value of type /// Converters that wraps the lambda - public static IFdbConverter Create([NotNull] Func converter) + [NotNull] + public static ITypeConverter Create([NotNull] Func converter) { - if (converter == null) throw new ArgumentNullException("converter"); - return new Anonymous(converter); + Contract.NotNull(converter, nameof(converter)); + return new Anonymous(converter); } /// Add a new known converter (without locking) - /// Source type - /// Destination type - /// Lambda that converts a value of type into a value of type - internal static void RegisterUnsafe([NotNull] Func converter) + /// Source type + /// Destination type + /// Lambda that converts a value of type into a value of type + internal static void RegisterUnsafe([NotNull] Func converter) { Contract.Requires(converter != null); - Converters[new ComparisonHelper.TypePair(typeof(T), typeof(R))] = new Anonymous(converter); + Converters[new ComparisonHelper.TypePair(typeof(TInput), typeof(TOutput))] = new Anonymous(converter); } /// Registers a new type converter - /// Source type - /// Destination type - /// Lambda that converts a value of type into a value of type - public static void Register([NotNull] Func converter) + /// Source type + /// Destination type + /// Lambda that converts a value of type into a value of type + public static void Register([NotNull] Func converter) { Contract.Requires(converter != null); - Register(new Anonymous(converter)); + Register(new Anonymous(converter)); } /// Registers a new type converter - /// Source type - /// Destination type - /// Instance that can convert values of type into a values of type - public static void Register([NotNull] IFdbConverter converter) + /// Source type + /// Destination type + /// Instance that can convert values of type into a values of type + public static void Register([NotNull] ITypeConverter converter) { - if (converter == null) throw new ArgumentNullException("converter"); + Contract.NotNull(converter, nameof(converter)); while (true) { var previous = Converters; - var dic = new Dictionary(previous, previous.Comparer); - dic[new ComparisonHelper.TypePair(typeof(T), typeof(R))] = converter; + var dic = new Dictionary(previous, previous.Comparer) + { + [new ComparisonHelper.TypePair(typeof(TInput), typeof(TOutput))] = converter + }; if (Interlocked.CompareExchange(ref Converters, dic, previous) == previous) { break; @@ -482,102 +506,130 @@ public static void Register([NotNull] IFdbConverter converter) } } - /// Returns a converter that converts s into s - /// Source type - /// Destination type + /// Returns a converter that converts s into s + /// Source type + /// Destination type /// Valid convertir for this types, or an exception if there are no known convertions /// No valid converter for these types was found [NotNull] - public static IFdbConverter GetConverter() + public static ITypeConverter GetConverter() { - - if (typeof(T) == typeof(R)) + if (typeof(TInput) == typeof(TOutput)) { // R == T : identity function - return (IFdbConverter)Identity.Default; + return (ITypeConverter) Identity.Default; } // Try to get from the known converters - IFdbConverter converter; - if (!Converters.TryGetValue(new ComparisonHelper.TypePair(typeof(T), typeof(R)), out converter)) + if (!Converters.TryGetValue(new ComparisonHelper.TypePair(typeof(TInput), typeof(TOutput)), out ITypeConverter converter)) { - if (typeof(R).IsAssignableFrom(typeof(T))) + if (typeof(TOutput).IsAssignableFrom(typeof(TInput))) { // T is a subclass of R, so it should work fine - return SubClass.Default; + return SubClass.Default; } //TODO: ..? - FailCannotConvert(typeof(T), typeof(R)); + throw FailCannotConvert(typeof(TInput), typeof(TOutput)); } - return (IFdbConverter)converter; + return (ITypeConverter) converter; + } + + /// Wrap a Tye Converter into a corresponding Func<....> + /// Source type + /// Destination type + /// Instance that can convert from to + /// Lambda function that, when called, invokes + [Pure, NotNull] + public static Func AsFunc([NotNull] this ITypeConverter converter) + { + return converter.Convert; } - /// Convert a value of type into type - /// Source type - /// Destination type + /// Convert a value of type into type + /// Source type + /// Destination type /// Value to convert /// Converted value - public static R Convert(T value) + [Pure, ContractAnnotation("null=>null")] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static TOutput Convert(TInput value) { //note: most of the types, T will be equal to R. We should get an optimized converter that will not box the values - return GetConverter().Convert(value); + return Cache.Converter.Convert(value); } /// Cast a boxed value (known to be of type ) into an unboxed value /// Runtime type of the value /// Value that is known to be of type , but is boxed into an object /// Original value casted into its runtime type + [Pure, ContractAnnotation("null=>null")] + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static T Unbox(object value) { return Identity.FromObject(value); } - /// Convert a boxed value into type - /// Destination type + /// Convert a boxed value into type + /// Destination type /// Boxed value - /// Converted value, or an exception if there are no known convertions. The value null is converted into default() by convention + /// Converted value, or an exception if there are no known convertions. The value null is converted into default() by convention /// No valid converter for these types was found - [CanBeNull] - public static R ConvertBoxed(object value) + [Pure, CanBeNull] + public static T ConvertBoxed(object value) { - if (value == null) return default(R); + if (value == null) return default(T); var type = value.GetType(); - var targetType = typeof(R); + var targetType = typeof(T); // cast ! - if (targetType.IsAssignableFrom(type)) return (R)value; + if (targetType.IsAssignableFrom(type)) return (T) value; - IFdbConverter converter; - if (!Converters.TryGetValue(new ComparisonHelper.TypePair(type, targetType), out converter)) + if (!Converters.TryGetValue(new ComparisonHelper.TypePair(type, targetType), out ITypeConverter converter)) { // maybe it is a nullable type ? var nullableType = Nullable.GetUnderlyingType(targetType); - if (nullableType != null) - { // we already nullchecked value above, so we just have to convert it to the underlying type... + if (nullableType == null) throw FailCannotConvert(type, targetType); - // shortcut for converting a R into a Nullable ... - if (type == nullableType) return (R)value; + // we already nullchecked value above, so we just have to convert it to the underlying type... - // maybe we have a converter for the underlying type ? - if (Converters.TryGetValue(new ComparisonHelper.TypePair(type, nullableType), out converter)) - { - return (R)converter.ConvertBoxed(value); - } + // shortcut for converting a T into a Nullable ... + if (type == nullableType) return (T) value; + + // maybe we have a converter for the underlying type ? + if (Converters.TryGetValue(new ComparisonHelper.TypePair(type, nullableType), out converter)) + { + return (T) converter.ConvertBoxed(value); } - FailCannotConvert(type, targetType); } - return (R)converter.ConvertBoxed(value); + return (T) converter.ConvertBoxed(value); + } + + [NotNull] + private static MethodInfo GetConverterMethod(Type input, Type output) + { + var m = typeof(TypeConverters).GetMethod(nameof(GetConverter), BindingFlags.Static | BindingFlags.Public).MakeGenericMethod(input, output); + Contract.Assert(m != null); + return m; + } + + /// Create a boxed converter from to + [Pure, NotNull] + public static Func CreateBoxedConverter(Type outputType) + { + var converter = (ITypeConverter) GetConverterMethod(typeof(TInput), outputType).Invoke(null, Array.Empty()); + return (x) => converter.ConvertBoxed(x); } /// Converts all the elements of a sequence /// New sequence with all the converted elements - public static IEnumerable ConvertAll([NotNull] this IFdbConverter converter, [NotNull] IEnumerable items) + [Pure, NotNull] + public static IEnumerable ConvertAll([NotNull] this ITypeConverter converter, [NotNull] IEnumerable items) { - if (converter == null) throw new ArgumentNullException("converter"); - if (items == null) throw new ArgumentNullException("items"); + Contract.NotNull(converter, nameof(converter)); + Contract.NotNull(items, nameof(items)); foreach (var item in items) { @@ -588,32 +640,23 @@ public static IEnumerable ConvertAll([NotNull] this IFdbConverter /// Converts all the elements of a list /// New list with all the converted elements [NotNull] - public static List ConvertAll([NotNull] this IFdbConverter converter, [NotNull] List items) + public static List ConvertAll([NotNull] this ITypeConverter converter, [NotNull] List items) { - if (converter == null) throw new ArgumentNullException("converter"); - if (items == null) throw new ArgumentNullException("items"); + Contract.NotNull(converter, nameof(converter)); + Contract.NotNull(items, nameof(items)); -#if CORE_CLR - var list = new List(items.Count); - foreach (var item in items) - { - list.Add(converter.Convert(item)); - } - return list; -#else - return items.ConvertAll(converter.Convert); -#endif + return items.ConvertAll(converter.Convert); } /// Converts all the elements of an array /// New array with all the converted elements [NotNull] - public static R[] ConvertAll([NotNull] this IFdbConverter converter, [NotNull] T[] items) + public static TOutput[] ConvertAll([NotNull] this ITypeConverter converter, [NotNull] TInput[] items) { - if (converter == null) throw new ArgumentNullException("converter"); - if (items == null) throw new ArgumentNullException("items"); + Contract.NotNull(converter, nameof(converter)); + Contract.NotNull(items, nameof(items)); - var results = new R[items.Length]; + var results = new TOutput[items.Length]; for (int i = 0; i < items.Length; i++) { results[i] = converter.Convert(items[i]); @@ -621,6 +664,21 @@ public static R[] ConvertAll([NotNull] this IFdbConverter converter, return results; } + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string ToString(TInput value) + { + //note: raccourci pour Convert(..) dont le but est d'être inliné par le JIT en release + return Cache.Converter.Convert(value); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static TOutput FromString(string text) + { + //note: raccourci pour Convert(..) dont le but est d'être inliné par le JIT en release + return Cache.Converter.Convert(text); + } + } } diff --git a/FoundationDB.Client/Shared/ExceptionExtensions.cs b/FoundationDB.Client/Shared/ExceptionExtensions.cs new file mode 100644 index 000000000..a77734c54 --- /dev/null +++ b/FoundationDB.Client/Shared/ExceptionExtensions.cs @@ -0,0 +1,141 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense +{ + using Doxense.Diagnostics.Contracts; + using System; + using System.Collections.Generic; + using System.Reflection; + using JetBrains.Annotations; + + internal static class ExceptionExtensions + { + private static readonly MethodInfo s_preserveStackTrace; + private static readonly MethodInfo s_prepForRemoting; + + static ExceptionExtensions() + { + try + { + s_preserveStackTrace = typeof(Exception).GetMethod("InternalPreserveStackTrace", BindingFlags.Instance | BindingFlags.NonPublic); + s_prepForRemoting = typeof(Exception).GetMethod("PrepForRemoting", BindingFlags.Instance | BindingFlags.NonPublic); + } + catch { } + Contract.Ensures(s_preserveStackTrace != null, "Exception.InternalPreserveStackTrace not found?"); + Contract.Ensures(s_prepForRemoting != null, "Exception.PrepForRemoting not found?"); + } + + /// Dtermine s'il s'agit d'une erreur fatale (qu'il faudrait bouncer) + /// Exception tester + /// True s'il s'agit d'une ThreadAbortException, OutOfMemoryException ou StackOverflowException, ou une AggregateException qui contient une de ces erreurs + [Pure] + public static bool IsFatalError([CanBeNull] this Exception self) + { + return self is System.Threading.ThreadAbortException || self is OutOfMemoryException || self is StackOverflowException || (self is AggregateException && IsFatalError(self.InnerException)); + } + + /// Prserve la stacktrace lorsqu'on cre une exception, qui sera re-throw plus haut + /// Exception qui a t catche + /// La mme exception, mais avec la StackTrace prserve + [NotNull] + public static Exception PreserveStackTrace([NotNull] this Exception self) + { + self = UnwrapIfAggregate(self); + if (s_preserveStackTrace != null) s_preserveStackTrace.Invoke(self, null); + return self; + } + + /// Prserve la stacktrace lorsqu'on veut re-thrower une exception catche + /// Exception qui a t catche + /// La mme exception, mais avec la StackTrace prserve + /// Similaire l'extension mthode PrepareForRethrow prsente dans System.CoreEx.dll du Reactive Framework + [NotNull] + public static Exception PrepForRemoting([NotNull] this Exception self) + { + //TODO: cette extensions mthode est galement prsente dans System.CoreEx.dll du Reactive Framework! + // il faudra peut etre a terme rerouter vers cette version (si un jour Sioux refrence Rx directement...) + self = UnwrapIfAggregate(self); + if (s_prepForRemoting != null) s_prepForRemoting.Invoke(self, null); + return self; + } + + /// Retourne la premire exeception non-aggregate trouve dans l'arbre des InnerExceptions + /// AggregateException racine + /// Premire exception dans l'arbre des InnerExceptions qui ne soit pas de type AggregateException + [NotNull] + public static Exception GetFirstConcreteException([NotNull] this AggregateException self) + { + // dans la majorit des cas, on a une branche avec potentiellement plusieurs couches de AggEx mais une seule InnerException + var e = self.GetBaseException(); + if (!(e is AggregateException)) return e; + + // Sinon c'est qu'on a un arbre a plusieures branches, qu'on va devoir parcourir... + var list = new Queue(); + list.Enqueue(self); + while (list.Count > 0) + { + foreach (var e2 in list.Dequeue().InnerExceptions) + { + if (e2 == null) continue; + if (!(e2 is AggregateException x)) return e2; // on a trouv une exception concrte ! + list.Enqueue(x); + } + } + // uhoh ? + return self; + } + + /// Retourne la premire exception non-aggregate si c'est une AggregateException, ou l'exception elle mme dans les autres cas + /// + /// + [NotNull] + public static Exception UnwrapIfAggregate([NotNull] this Exception self) + { + return self is AggregateException aggEx ? GetFirstConcreteException(aggEx) : self; + } + + /// Rethrow la premire exception non-aggregate trouve, en jettant les autres s'il y en a + /// AggregateException racine + [ContractAnnotation("self:null => null")] + public static Exception Unwrap(this AggregateException self) + { + return self != null ? GetFirstConcreteException(self).PrepForRemoting() : null; + } + + /// Unwrap generic exceptions like or to return the inner exceptions + [NotNull] + public static Exception Unwrap([NotNull] this Exception self) + { + if (self is AggregateException aggEx) return GetFirstConcreteException(aggEx); + if (self is TargetInvocationException tiEx) return tiEx.InnerException ?? self; + //add other type of "container" exceptions as required + return self; + } + } +} diff --git a/FoundationDB.Client/Shared/HashCodes.cs b/FoundationDB.Client/Shared/HashCodes.cs new file mode 100644 index 000000000..11609e6b6 --- /dev/null +++ b/FoundationDB.Client/Shared/HashCodes.cs @@ -0,0 +1,216 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense +{ + using System; + using System.Collections; + using System.Runtime.CompilerServices; + using JetBrains.Annotations; + + /// Helper methods to work with hashcodes + internal static class HashCodes + { + //REVIEW: dplacer dans le namespace "Doxense" tout court? => c'est utilis dans des tonnes de classes Model POCO + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Compute(long value) + { + return unchecked((int) value) ^ (int) (value >> 32); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Compute(ulong value) + { + return unchecked((int)value) ^ (int)(value >> 32); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Compute(bool value) + { + return value ? 1 : 0; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Compute(Guid value) + { + return value.GetHashCode(); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Compute(int? value) + { + return value ?? -1; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Compute(long? value) + { + return value.HasValue ? Compute(value.Value) : -1; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Compute(ulong? value) + { + return value.HasValue ? Compute(value.Value) : -1; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Compute(bool? value) + { + return value.HasValue ? Compute(value.Value) : -1; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Compute(Guid? value) + { + return value?.GetHashCode() ?? -1; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Compute(string value) + { + return value?.GetHashCode() ?? 0; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Compute(T value) + where T : class + { + return value?.GetHashCode() ?? 0; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Compute(T? value) + where T : struct + { + return value.GetValueOrDefault().GetHashCode(); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Compute(T value, System.Collections.IEqualityComparer comparer) + where T : IStructuralEquatable + { + return value?.GetHashCode(comparer) ?? 0; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Combine(int h1, int h2) + { + return ((h1 << 5) + h1) ^ h2; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Combine(int h1, int h2, int h3) + { + int h = ((h1 << 5) + h1) ^ h2; + return ((h << 5) + h) ^ h3; + } + + [Pure] + public static int Combine(int h1, int h2, int h3, int h4) + { + return Combine(Combine(h1, h2), Combine(h3, h4)); + } + + [Pure] + public static int Combine(int h1, int h2, int h3, int h4, int h5) + { + return Combine(Combine(h1, h2, h3), Combine(h4, h5)); + } + + [Pure] + public static int Combine(int h1, int h2, int h3, int h4, int h5, int h6) + { + return Combine(Combine(h1, h2, h3), Combine(h4, h5, h6)); + } + + /// Test that both hash codes, if present, have the same value + /// False IIF h1 != nul && h2 != null && h1 != h2; otherisse, True + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static bool SameOrMissing(int? h1, int? h2) + { + return !h1.HasValue || !h2.HasValue || h1.Value == h2.Value; + } + + #region Flags... + + // Combines one or more booleans into a single value (one bit per boolean) + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Flags(bool a) + { + return (a ? 1 : 0); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Flags(bool a, bool b) + { + return (a ? 1 : 0) | (b ? 2 : 0); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Flags(bool a, bool b, bool c) + { + return (a ? 1 : 0) | (b ? 2 : 0) | (c ? 4 : 0); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Flags(bool a, bool b, bool c, bool d) + { + return (a ? 1 : 0) | (b ? 2 : 0) | (c ? 4 : 0) | (d ? 8 : 0); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Flags(bool a, bool b, bool c, bool d, bool e) + { + return (a ? 1 : 0) | (b ? 2 : 0) | (c ? 4 : 0) | (d ? 8 : 0) | (e ? 16 : 0); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Flags(bool a, bool b, bool c, bool d, bool e, bool f) + { + return (a ? 1 : 0) | (b ? 2 : 0) | (c ? 4 : 0) | (d ? 8 : 0) | (e ? 16 : 0) | (f ? 32 : 0); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Flags(bool a, bool b, bool c, bool d, bool e, bool f, bool g) + { + return (a ? 1 : 0) | (b ? 2 : 0) | (c ? 4 : 0) | (d ? 8 : 0) | (e ? 16 : 0) | (f ? 32 : 0) | (g ? 64 : 0); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Flags(bool a, bool b, bool c, bool d, bool e, bool f, bool g, bool h) + { + return (a ? 1 : 0) | (b ? 2 : 0) | (c ? 4 : 0) | (d ? 8 : 0) | (e ? 16 : 0) | (f ? 32 : 0) | (g ? 64 : 0) | (h ? 128 : 0); + } + + #endregion + + } +} diff --git a/FoundationDB.Client/Linq/FdbAsyncSequence.cs b/FoundationDB.Client/Shared/Linq/Async/AsyncSequence.cs similarity index 73% rename from FoundationDB.Client/Linq/FdbAsyncSequence.cs rename to FoundationDB.Client/Shared/Linq/Async/AsyncSequence.cs index 0836d6013..0bf9480a4 100644 --- a/FoundationDB.Client/Linq/FdbAsyncSequence.cs +++ b/FoundationDB.Client/Shared/Linq/Async/AsyncSequence.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,39 +26,35 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq + namespace Doxense.Linq.Async { - using FoundationDB.Async; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; + using System.Threading; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; /// Wraps an async sequence of items into another async sequence of items /// Type of elements of the inner async sequence /// Type of elements of the outer async sequence - internal sealed class FdbAsyncSequence : IFdbAsyncEnumerable + internal sealed class AsyncSequence : IAsyncEnumerable { - public readonly IFdbAsyncEnumerable Source; - public readonly Func, IFdbAsyncEnumerator> Factory; + public readonly IAsyncEnumerable Source; + public readonly Func, IAsyncEnumerator> Factory; - public FdbAsyncSequence([NotNull] IFdbAsyncEnumerable source, [NotNull] Func, IFdbAsyncEnumerator> factory) + public AsyncSequence([NotNull] IAsyncEnumerable source, [NotNull] Func, IAsyncEnumerator> factory) { Contract.Requires(source != null && factory != null); this.Source = source; this.Factory = factory; } - public IAsyncEnumerator GetEnumerator() - { - return this.GetEnumerator(FdbAsyncMode.Default); - } - - public IFdbAsyncEnumerator GetEnumerator(FdbAsyncMode mode) + public IAsyncEnumerator GetEnumerator(CancellationToken ct, AsyncIterationHint mode) { - IFdbAsyncEnumerator inner = null; + ct.ThrowIfCancellationRequested(); + IAsyncEnumerator inner = null; try { - inner = this.Source.GetEnumerator(mode); + inner = this.Source.GetEnumerator(ct, mode); Contract.Requires(inner != null, "The underlying async sequence returned an empty enumerator"); var outer = this.Factory(inner); @@ -69,7 +65,7 @@ public IFdbAsyncEnumerator GetEnumerator(FdbAsyncMode mode) catch (Exception) { //make sure that the inner iterator gets disposed if something went wrong - if (inner != null) inner.Dispose(); + inner?.Dispose(); throw; } } diff --git a/FoundationDB.Client/Linq/Expressions/AsyncFilterExpression.cs b/FoundationDB.Client/Shared/Linq/Async/Expressions/AsyncFilterExpression.cs similarity index 89% rename from FoundationDB.Client/Linq/Expressions/AsyncFilterExpression.cs rename to FoundationDB.Client/Shared/Linq/Async/Expressions/AsyncFilterExpression.cs index 18caf33d4..cf064d60a 100644 --- a/FoundationDB.Client/Linq/Expressions/AsyncFilterExpression.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Expressions/AsyncFilterExpression.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,30 +26,32 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq + +namespace Doxense.Linq.Async.Expressions { - using FoundationDB.Async; using JetBrains.Annotations; using System; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Threading.Tasks; /// Expression that evalute a condition on each item /// Type of the filtered elements - internal sealed class AsyncFilterExpression + public sealed class AsyncFilterExpression { private readonly Func m_filter; private readonly Func> m_asyncFilter; public AsyncFilterExpression(Func filter) { - if (filter == null) throw new ArgumentNullException("filter"); + Contract.NotNull(filter, nameof(filter)); m_filter = filter; } public AsyncFilterExpression(Func> asyncFilter) { - if (asyncFilter == null) throw new ArgumentNullException("asyncFilter"); + Contract.NotNull(asyncFilter, nameof(asyncFilter)); m_asyncFilter = asyncFilter; } @@ -94,8 +96,8 @@ public AsyncFilterExpression OrElse([NotNull] AsyncFilterExpression AndAlso([NotNull] AsyncFilterExpression left, [NotNull] AsyncFilterExpression right) { - if (left == null) throw new ArgumentNullException("left"); - if (right == null) throw new ArgumentNullException("right"); + Contract.NotNull(left, nameof(left)); + Contract.NotNull(right, nameof(right)); // combine two expressions into a logical AND expression. // Note: if the first expression returns false, the second one will NOT be evaluated @@ -111,7 +113,7 @@ public static AsyncFilterExpression AndAlso([NotNull] AsyncFilterExpres else { // next one is async var g = right.m_asyncFilter; - return new AsyncFilterExpression((x, ct) => f(x) ? g(x, ct) : TaskHelpers.FalseTask); + return new AsyncFilterExpression((x, ct) => f(x) ? g(x, ct) : TaskHelpers.False); } } else @@ -133,8 +135,8 @@ public static AsyncFilterExpression AndAlso([NotNull] AsyncFilterExpres [NotNull] public static AsyncFilterExpression OrElse([NotNull] AsyncFilterExpression left, [NotNull] AsyncFilterExpression right) { - if (left == null) throw new ArgumentNullException("left"); - if (right == null) throw new ArgumentNullException("right"); + Contract.NotNull(left, nameof(left)); + Contract.NotNull(right, nameof(right)); // combine two expressions into a logical OR expression. // Note: if the first expression returns true, the second one will NOT be evaluated @@ -150,7 +152,7 @@ public static AsyncFilterExpression OrElse([NotNull] AsyncFilterExpress else { // next one is async var g = right.m_asyncFilter; - return new AsyncFilterExpression((x, ct) => f(x) ? TaskHelpers.TrueTask : g(x, ct)); + return new AsyncFilterExpression((x, ct) => f(x) ? TaskHelpers.True : g(x, ct)); } } else @@ -159,11 +161,13 @@ public static AsyncFilterExpression OrElse([NotNull] AsyncFilterExpress if (right.m_asyncFilter != null) { // so is the next one var g = left.m_asyncFilter; + Contract.Assert(g != null); return new AsyncFilterExpression(async (x, ct) => (await f(x, ct).ConfigureAwait(false)) || (await g(x, ct).ConfigureAwait(false))); } else { var g = left.m_filter; + Contract.Assert(g != null); return new AsyncFilterExpression(async (x, ct) => (await f(x, ct).ConfigureAwait(false)) || g(x)); } } diff --git a/FoundationDB.Client/Linq/Expressions/AsyncObserverExpression.cs b/FoundationDB.Client/Shared/Linq/Async/Expressions/AsyncObserverExpression.cs similarity index 90% rename from FoundationDB.Client/Linq/Expressions/AsyncObserverExpression.cs rename to FoundationDB.Client/Shared/Linq/Async/Expressions/AsyncObserverExpression.cs index f292a930f..4168fc1b6 100644 --- a/FoundationDB.Client/Linq/Expressions/AsyncObserverExpression.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Expressions/AsyncObserverExpression.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,29 +26,30 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq.Async.Expressions { using JetBrains.Annotations; using System; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; /// Expression that execute an action on each item, but does not change the source expression in anyway /// Type of observed items - internal sealed class AsyncObserverExpression + public sealed class AsyncObserverExpression { private readonly Action m_handler; private readonly Func m_asyncHandler; public AsyncObserverExpression(Action handler) { - if (handler == null) throw new ArgumentNullException("handler"); + Contract.NotNull(handler, nameof(handler)); m_handler = handler; } public AsyncObserverExpression(Func asyncHandler) { - if (asyncHandler == null) throw new ArgumentNullException("asyncHandler"); + Contract.NotNull(asyncHandler, nameof(asyncHandler)); m_asyncHandler = asyncHandler; } @@ -93,8 +94,8 @@ public AsyncObserverExpression Then([NotNull] AsyncObserverExpression Then([NotNull] AsyncObserverExpression left, [NotNull] AsyncObserverExpression right) { - if (left == null) throw new ArgumentNullException("left"); - if (right == null) throw new ArgumentNullException("right"); + Contract.NotNull(left, nameof(left)); + Contract.NotNull(right, nameof(right)); if (left.m_handler != null) { diff --git a/FoundationDB.Client/Linq/Expressions/AsyncTransformExpression.cs b/FoundationDB.Client/Shared/Linq/Async/Expressions/AsyncTransformExpression.cs similarity index 90% rename from FoundationDB.Client/Linq/Expressions/AsyncTransformExpression.cs rename to FoundationDB.Client/Shared/Linq/Async/Expressions/AsyncTransformExpression.cs index e35ac589c..c138b254f 100644 --- a/FoundationDB.Client/Linq/Expressions/AsyncTransformExpression.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Expressions/AsyncTransformExpression.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,43 +26,41 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq.Async.Expressions { - using FoundationDB.Async; - using JetBrains.Annotations; using System; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Threading.Tasks; + using JetBrains.Annotations; /// Expression that applies a transformation on each item /// Type of the source items /// Type of the transformed items - internal sealed class AsyncTransformExpression + public sealed class AsyncTransformExpression { private readonly Func m_transform; private readonly Func> m_asyncTransform; public AsyncTransformExpression(Func transform) { - if (transform == null) throw new ArgumentNullException("transform"); + Contract.NotNull(transform, nameof(transform)); m_transform = transform; } public AsyncTransformExpression(Func> asyncTransform) { - if (asyncTransform == null) throw new ArgumentNullException("asyncTransform"); + Contract.NotNull(asyncTransform, nameof(asyncTransform)); m_asyncTransform = asyncTransform; } - public bool Async - { - get { return m_asyncTransform != null; } - } + public bool Async => m_asyncTransform != null; public bool IsIdentity() { //note: Identity Function is not async, and is only possible if TSource == TResult, so we can skip checking the types ourselves... - return m_transform != null && object.ReferenceEquals(m_transform, TaskHelpers.Cache.Identity); + return m_transform != null && object.ReferenceEquals(m_transform, TaskHelpers.CachedTasks.Identity); } public TResult Invoke(TSource item) @@ -121,8 +119,8 @@ public AsyncTransformExpression Then([NotNull] AsyncTra [NotNull] public static AsyncTransformExpression Then([NotNull] AsyncTransformExpression left, [NotNull] AsyncTransformExpression right) { - if (left == null) throw new ArgumentNullException("left"); - if (right == null) throw new ArgumentNullException("right"); + Contract.NotNull(left, nameof(left)); + Contract.NotNull(right, nameof(right)); if (left.IsIdentity()) { // we can optimize the left expression away, since we know that TSource == TResult ! diff --git a/FoundationDB.Client/Linq/Iterators/FdbAnonymousAsyncGenerator.cs b/FoundationDB.Client/Shared/Linq/Async/Iterators/AnonymousAsyncGenerator.cs similarity index 66% rename from FoundationDB.Client/Linq/Iterators/FdbAnonymousAsyncGenerator.cs rename to FoundationDB.Client/Shared/Linq/Async/Iterators/AnonymousAsyncGenerator.cs index ac5cc6964..966bf96bd 100644 --- a/FoundationDB.Client/Linq/Iterators/FdbAnonymousAsyncGenerator.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Iterators/AnonymousAsyncGenerator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,17 +26,17 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq.Async.Iterators { - using FoundationDB.Async; - using FoundationDB.Client.Utils; using System; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Threading.Tasks; /// Generate items asynchronously, using a user-provided lambda /// Type of the items produced by this generator - internal class FdbAnonymousAsyncGenerator : FdbAsyncIterator + public class AnonymousAsyncGenerator : AsyncIterator { // use a custom lambda that returns Maybe results, asynchronously // => as long as the result has a value, continue iterating @@ -45,39 +45,58 @@ internal class FdbAnonymousAsyncGenerator : FdbAsyncIterator // ITERABLE - private readonly Func>> m_generator; + private readonly Delegate m_generator; + // can be either one of: + // - Func>> + // - Func>> // ITERATOR private long m_index; - public FdbAnonymousAsyncGenerator(Func>> generator) + public AnonymousAsyncGenerator(Func>> generator) + : this((Delegate) generator) + { } + + private AnonymousAsyncGenerator(Delegate generator) { Contract.Requires(generator != null); m_generator = generator; m_index = -1; } - protected override FdbAsyncIterator Clone() + protected override AsyncIterator Clone() { - return new FdbAnonymousAsyncGenerator(m_generator); + return new AnonymousAsyncGenerator(m_generator); } - protected override Task OnFirstAsync(CancellationToken ct) + protected override Task OnFirstAsync() { m_index = 0; - return TaskHelpers.TrueTask; + return TaskHelpers.True; } - protected override async Task OnNextAsync(CancellationToken ct) + protected override async Task OnNextAsync() { - ct.ThrowIfCancellationRequested(); + m_ct.ThrowIfCancellationRequested(); if (m_index < 0) return false; long index = m_index; - var res = await m_generator(index, ct); + Maybe res; + if (m_generator is Func>> genT) + { + res = await genT(index, m_ct); + } + else if (m_generator is Func>> genV) + { + res = await genV(index, m_ct); + } + else + { + throw new InvalidOperationException(); + } - if (res.HasFailed) res.ThrowForNonSuccess(); + if (res.Failed) res.ThrowForNonSuccess(); if (res.IsEmpty) return Completed(); m_index = checked(index + 1); return Publish(res.Value); diff --git a/FoundationDB.Client/Linq/Iterators/FdbAsyncFilterIterator.cs b/FoundationDB.Client/Shared/Linq/Async/Iterators/AsyncFilterIterator.cs similarity index 75% rename from FoundationDB.Client/Linq/Iterators/FdbAsyncFilterIterator.cs rename to FoundationDB.Client/Shared/Linq/Async/Iterators/AsyncFilterIterator.cs index 6c1eff46e..b1b7919fd 100644 --- a/FoundationDB.Client/Linq/Iterators/FdbAsyncFilterIterator.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Iterators/AsyncFilterIterator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,39 +26,40 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq.Async.Iterators { - using FoundationDB.Async; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Threading.Tasks; + using JetBrains.Annotations; - internal abstract class FdbAsyncFilterIterator : FdbAsyncIterator + public abstract class AsyncFilterIterator : AsyncIterator { /// Source sequence (when in iterable mode) - protected IFdbAsyncEnumerable m_source; + protected IAsyncEnumerable m_source; /// Active iterator on the source (when in iterator mode) - protected IFdbAsyncEnumerator m_iterator; + protected IAsyncEnumerator m_iterator; protected bool m_innerHasCompleted; - protected FdbAsyncFilterIterator([NotNull] IFdbAsyncEnumerable source) + protected AsyncFilterIterator([NotNull] IAsyncEnumerable source) { Contract.Requires(source != null); m_source = source; } /// Start the inner iterator - protected virtual IFdbAsyncEnumerator StartInner() + protected virtual IAsyncEnumerator StartInner(CancellationToken ct) { + ct.ThrowIfCancellationRequested(); // filtering changes the number of items, so that means that, even if the underlying caller wants one item, we may need to read more. // => change all "Head" requests into "Iterator" to prevent any wrong optimizations by the underlying source (ex: using a too small batch size) var mode = m_mode; - if (mode == FdbAsyncMode.Head) mode = FdbAsyncMode.Iterator; + if (mode == AsyncIterationHint.Head) mode = AsyncIterationHint.Iterator; - return m_source.GetEnumerator(mode); + return m_source.GetEnumerator(m_ct, mode); } protected void MarkInnerAsCompleted() @@ -66,24 +67,20 @@ protected void MarkInnerAsCompleted() m_innerHasCompleted = true; // we don't need the inerator, so we can dispose of it immediately - var iterator = Interlocked.Exchange(ref m_iterator, null); - if (iterator != null) - { - iterator.Dispose(); - } + Interlocked.Exchange(ref m_iterator, null)?.Dispose(); } - protected override Task OnFirstAsync(CancellationToken ct) + protected override Task OnFirstAsync() { // on the first call to MoveNext, we have to hook up with the source iterator - IFdbAsyncEnumerator iterator = null; + IAsyncEnumerator iterator = null; try { - iterator = StartInner(); - if (iterator == null) return TaskHelpers.FalseTask; + iterator = StartInner(m_ct); + if (iterator == null) return TaskHelpers.False; OnStarted(iterator); - return TaskHelpers.TrueTask; + return TaskHelpers.True; } catch (Exception) { @@ -101,7 +98,7 @@ protected override Task OnFirstAsync(CancellationToken ct) } } - protected virtual void OnStarted(IFdbAsyncEnumerator iterator) + protected virtual void OnStarted(IAsyncEnumerator iterator) { //override this to add custom starting logic once we know that the inner iterator is ready } diff --git a/FoundationDB.Client/Linq/Iterators/FdbAsyncIterator.cs b/FoundationDB.Client/Shared/Linq/Async/Iterators/AsyncIterator.cs similarity index 52% rename from FoundationDB.Client/Linq/Iterators/FdbAsyncIterator.cs rename to FoundationDB.Client/Shared/Linq/Async/Iterators/AsyncIterator.cs index 016d67af5..0cf8545fe 100644 --- a/FoundationDB.Client/Linq/Iterators/FdbAsyncIterator.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Iterators/AsyncIterator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,21 +26,22 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq.Async.Iterators { - using FoundationDB.Async; - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Linq.Async.Expressions; + using JetBrains.Annotations; /// Base class for all async iterators /// Type of elements of the outer async sequence - internal abstract class FdbAsyncIterator : IFdbAsyncEnumerable, IFdbAsyncEnumerator + public abstract class AsyncIterator : IAsyncEnumerable, IAsyncEnumerator { - //REVIEW: we could need an IFdbAsyncIterator interface that holds all the Select(),Where(),Take(),... so that it can be used by FdbAsyncEnumerable to either call them directly (if the query supports it) or use a generic implementation - // => this would be implemented by FdbAsyncIterator as well as FdbRangeQuery (and ony other 'self optimizing' class) + //REVIEW: we could need an IAsyncIterator interface that holds all the Select(),Where(),Take(),... so that it can be used by AsyncEnumerable to either call them directly (if the query supports it) or use a generic implementation + // => this would be implemented by AsyncIterator as well as FdbRangeQuery (and ony other 'self optimizing' class) private const int STATE_SEQ = 0; private const int STATE_INIT = 1; @@ -50,35 +51,35 @@ internal abstract class FdbAsyncIterator : IFdbAsyncEnumerable protected TResult m_current; protected int m_state; - protected FdbAsyncMode m_mode; + protected AsyncIterationHint m_mode; + protected CancellationToken m_ct; - #region IFdbAsyncEnumerable... + #region IAsyncEnumerable... - public IAsyncEnumerator GetEnumerator() + public IAsyncEnumerator GetEnumerator(CancellationToken ct, AsyncIterationHint mode) { - return this.GetEnumerator(FdbAsyncMode.Default); - } + ct.ThrowIfCancellationRequested(); - public IFdbAsyncEnumerator GetEnumerator(FdbAsyncMode mode) - { // reuse the same instance the first time if (Interlocked.CompareExchange(ref m_state, STATE_INIT, STATE_SEQ) == STATE_SEQ) { m_mode = mode; + m_ct = ct; return this; } // create a new one var iter = Clone(); iter.m_mode = mode; + iter.m_ct = ct; Volatile.Write(ref iter.m_state, STATE_INIT); return iter; } - protected abstract FdbAsyncIterator Clone(); + protected abstract AsyncIterator Clone(); #endregion - #region IFdbAsyncEnumerator... + #region IAsyncEnumerator... public TResult Current { @@ -89,7 +90,7 @@ public TResult Current } } - public async Task MoveNext(CancellationToken ct) + public async Task MoveNextAsync() { var state = Volatile.Read(ref m_state); @@ -103,16 +104,16 @@ public async Task MoveNext(CancellationToken ct) return false; } - if (ct.IsCancellationRequested) + if (m_ct.IsCancellationRequested) { - return Canceled(ct); + return Canceled(); } try { if (state == STATE_INIT) { - if (!await OnFirstAsync(ct).ConfigureAwait(false)) + if (!await OnFirstAsync().ConfigureAwait(false)) { // did not start at all ? return Completed(); } @@ -123,7 +124,7 @@ public async Task MoveNext(CancellationToken ct) } } - return await OnNextAsync(ct).ConfigureAwait(false); + return await OnNextAsync().ConfigureAwait(false); } catch (Exception) { @@ -137,109 +138,109 @@ public async Task MoveNext(CancellationToken ct) #region LINQ... [NotNull] - public virtual FdbAsyncIterator Where([NotNull] Func predicate) + public virtual AsyncIterator Where([NotNull] Func predicate) { - if (predicate == null) throw new ArgumentNullException("predicate"); + Contract.NotNull(predicate, nameof(predicate)); - return FdbAsyncEnumerable.Filter(this, new AsyncFilterExpression(predicate)); + return AsyncEnumerable.Filter(this, new AsyncFilterExpression(predicate)); } [NotNull] - public virtual FdbAsyncIterator Where([NotNull] Func> asyncPredicate) + public virtual AsyncIterator Where([NotNull] Func> asyncPredicate) { - if (asyncPredicate == null) throw new ArgumentNullException("asyncPredicate"); + Contract.NotNull(asyncPredicate, nameof(asyncPredicate)); - return FdbAsyncEnumerable.Filter(this, new AsyncFilterExpression(asyncPredicate)); + return AsyncEnumerable.Filter(this, new AsyncFilterExpression(asyncPredicate)); } [NotNull] - public virtual FdbAsyncIterator Select([NotNull] Func selector) + public virtual AsyncIterator Select([NotNull] Func selector) { - if (selector == null) throw new ArgumentNullException("selector"); + Contract.NotNull(selector, nameof(selector)); - return FdbAsyncEnumerable.Map(this, new AsyncTransformExpression(selector)); + return AsyncEnumerable.Map(this, new AsyncTransformExpression(selector)); } [NotNull] - public virtual FdbAsyncIterator Select([NotNull] Func> asyncSelector) + public virtual AsyncIterator Select([NotNull] Func> asyncSelector) { - if (asyncSelector == null) throw new ArgumentNullException("asyncSelector"); + Contract.NotNull(asyncSelector, nameof(asyncSelector)); - return FdbAsyncEnumerable.Map(this, new AsyncTransformExpression(asyncSelector)); + return AsyncEnumerable.Map(this, new AsyncTransformExpression(asyncSelector)); } [NotNull] - public virtual FdbAsyncIterator SelectMany([NotNull] Func> selector) + public virtual AsyncIterator SelectMany([NotNull] Func> selector) { - if (selector == null) throw new ArgumentNullException("selector"); + Contract.NotNull(selector, nameof(selector)); - return FdbAsyncEnumerable.Flatten(this, new AsyncTransformExpression>(selector)); + return AsyncEnumerable.Flatten(this, new AsyncTransformExpression>(selector)); } [NotNull] - public virtual FdbAsyncIterator SelectMany([NotNull] Func>> asyncSelector) + public virtual AsyncIterator SelectMany([NotNull] Func>> asyncSelector) { - if (asyncSelector == null) throw new ArgumentNullException("asyncSelector"); + Contract.NotNull(asyncSelector, nameof(asyncSelector)); - return FdbAsyncEnumerable.Flatten(this, new AsyncTransformExpression>(asyncSelector)); + return AsyncEnumerable.Flatten(this, new AsyncTransformExpression>(asyncSelector)); } [NotNull] - public virtual FdbAsyncIterator SelectMany([NotNull] Func> collectionSelector, [NotNull] Func resultSelector) + public virtual AsyncIterator SelectMany([NotNull] Func> collectionSelector, [NotNull] Func resultSelector) { - if (collectionSelector == null) throw new ArgumentNullException("collectionSelector"); - if (resultSelector == null) throw new ArgumentNullException("resultSelector"); + Contract.NotNull(collectionSelector, nameof(collectionSelector)); + Contract.NotNull(resultSelector, nameof(resultSelector)); - return FdbAsyncEnumerable.Flatten(this, new AsyncTransformExpression>(collectionSelector), resultSelector); + return AsyncEnumerable.Flatten(this, new AsyncTransformExpression>(collectionSelector), resultSelector); } [NotNull] - public virtual FdbAsyncIterator SelectMany([NotNull] Func>> asyncCollectionSelector, [NotNull] Func resultSelector) + public virtual AsyncIterator SelectMany([NotNull] Func>> asyncCollectionSelector, [NotNull] Func resultSelector) { - if (asyncCollectionSelector == null) throw new ArgumentNullException("asyncCollectionSelector"); - if (resultSelector == null) throw new ArgumentNullException("resultSelector"); + Contract.NotNull(asyncCollectionSelector, nameof(asyncCollectionSelector)); + Contract.NotNull(resultSelector, nameof(resultSelector)); - return FdbAsyncEnumerable.Flatten(this, new AsyncTransformExpression>(asyncCollectionSelector), resultSelector); + return AsyncEnumerable.Flatten(this, new AsyncTransformExpression>(asyncCollectionSelector), resultSelector); } [NotNull] - public virtual FdbAsyncIterator Take(int count) + public virtual AsyncIterator Take(int count) { - return FdbAsyncEnumerable.Limit(this, count); + return AsyncEnumerable.Limit(this, count); } [NotNull] - public virtual FdbAsyncIterator TakeWhile([NotNull] Func condition) + public virtual AsyncIterator TakeWhile([NotNull] Func condition) { - return FdbAsyncEnumerable.Limit(this, condition); + return AsyncEnumerable.Limit(this, condition); } [NotNull] - public virtual FdbAsyncIterator Skip(int count) + public virtual AsyncIterator Skip(int count) { - return FdbAsyncEnumerable.Offset(this, count); + return AsyncEnumerable.Offset(this, count); } /// Execute an action on the result of this async sequence [NotNull] public virtual Task ExecuteAsync([NotNull] Action action, CancellationToken ct) { - return FdbAsyncEnumerable.Run(this, FdbAsyncMode.All, action, ct); + return AsyncEnumerable.Run(this, AsyncIterationHint.All, action, ct); } [NotNull] public virtual Task ExecuteAsync([NotNull] Func asyncAction, CancellationToken ct) { - return FdbAsyncEnumerable.Run(this, FdbAsyncMode.All, asyncAction, ct); + return AsyncEnumerable.Run(this, AsyncIterationHint.All, asyncAction, ct); } #endregion #region Iterator Impl... - protected abstract Task OnFirstAsync(CancellationToken ct); + protected abstract Task OnFirstAsync(); - protected abstract Task OnNextAsync(CancellationToken ct); + protected abstract Task OnNextAsync(); protected bool Publish(TResult current) { @@ -259,7 +260,7 @@ protected bool Completed() } else if (Interlocked.CompareExchange(ref m_state, STATE_COMPLETED, STATE_ITERATING) == STATE_ITERATING) { // the iterator has done at least something, so we can clean it up - this.Cleanup(); + Cleanup(); } return false; } @@ -268,15 +269,15 @@ protected bool Completed() protected void MarkAsFailed() { //TODO: store the state "failed" somewhere? - this.Dispose(); + Dispose(); } - protected bool Canceled(CancellationToken cancellationToken) + protected bool Canceled() { //TODO: store the state "canceled" somewhere? - this.Dispose(); - cancellationToken.ThrowIfCancellationRequested(); - return false; + Dispose(); + m_ct.ThrowIfCancellationRequested(); // should throw here! + return false; //note: should not be reached } protected void ThrowInvalidState() diff --git a/FoundationDB.Client/Linq/Iterators/FdbAsyncIteratorPump.cs b/FoundationDB.Client/Shared/Linq/Async/Iterators/AsyncIteratorPump.cs similarity index 91% rename from FoundationDB.Client/Linq/Iterators/FdbAsyncIteratorPump.cs rename to FoundationDB.Client/Shared/Linq/Async/Iterators/AsyncIteratorPump.cs index 36dd46750..4620d6bf0 100644 --- a/FoundationDB.Client/Linq/Iterators/FdbAsyncIteratorPump.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Iterators/AsyncIteratorPump.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,22 +26,22 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -#undef FULL_DEBUG +//#define FULL_DEBUG -namespace FoundationDB.Linq +namespace Doxense.Linq.Async.Iterators { - using FoundationDB.Async; - using FoundationDB.Client.Utils; using System; using System.Diagnostics; using System.Runtime.ExceptionServices; using System.Threading; using System.Threading.Tasks; + using Doxense.Async; + using Doxense.Diagnostics.Contracts; /// Pump that repeatedly calls MoveNext on an iterator and tries to publish the values in a Producer/Consumer queue /// [DebuggerDisplay("State={m_state}")] - internal sealed class FdbAsyncIteratorPump + public sealed class AsyncIteratorPump { private const int STATE_IDLE = 0; private const int STATE_WAITING_FOR_NEXT = 1; @@ -50,11 +50,11 @@ internal sealed class FdbAsyncIteratorPump private const int STATE_DONE = 4; private volatile int m_state; - private readonly IFdbAsyncEnumerator m_iterator; + private readonly IAsyncEnumerator m_iterator; private readonly IAsyncTarget m_target; - public FdbAsyncIteratorPump( - IFdbAsyncEnumerator iterator, + public AsyncIteratorPump( + IAsyncEnumerator iterator, IAsyncTarget target ) { @@ -79,7 +79,9 @@ internal int State [Conditional("FULL_DEBUG")] private static void LogDebug(string msg) { +#if FULL_DEBUG Console.WriteLine("[pump] " + msg); +#endif } /// Run the pump until the inner iterator is done, an error occurs, or the cancellation token is fired @@ -94,12 +96,12 @@ public async Task PumpAsync(CancellationToken ct) } try - { + { while (!ct.IsCancellationRequested) { LogDebug("waiting for next"); m_state = STATE_WAITING_FOR_NEXT; - if (!(await m_iterator.MoveNext(ct).ConfigureAwait(false))) + if (!(await m_iterator.MoveNextAsync().ConfigureAwait(false))) { LogDebug("completed"); m_state = STATE_DONE; diff --git a/FoundationDB.Client/Linq/Iterators/FdbBatchingAsyncIterator.cs b/FoundationDB.Client/Shared/Linq/Async/Iterators/BatchingAsyncIterator.cs similarity index 79% rename from FoundationDB.Client/Linq/Iterators/FdbBatchingAsyncIterator.cs rename to FoundationDB.Client/Shared/Linq/Async/Iterators/BatchingAsyncIterator.cs index 4fa05b116..37fe20308 100644 --- a/FoundationDB.Client/Linq/Iterators/FdbBatchingAsyncIterator.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Iterators/BatchingAsyncIterator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,17 +26,16 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq.Async.Iterators { - using FoundationDB.Client.Utils; using System; using System.Collections.Generic; - using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; /// Packs items from an inner sequence, into a sequence of fixed-size arrays. /// Type the the items from the source sequence - internal class FdbBatchingAsyncIterator : FdbAsyncFilterIterator + public class BatchingAsyncIterator : AsyncFilterIterator { // Typical use cas: to merge incoming streams of items into a sequence of arrays. This is basically the inverse of the SelectMany() operator. // This iterator should mostly be used on sequence that have either no latency (reading from an in-memory buffer) or where the latency is the same for each items. @@ -49,12 +48,12 @@ internal class FdbBatchingAsyncIterator : FdbAsyncFilterIterator m_buffer; + private List m_buffer; /// Create a new batching iterator /// Source sequence of items that must be batched by waves /// Maximum size of a batch to return down the line - public FdbBatchingAsyncIterator(IFdbAsyncEnumerable source, int batchSize) + public BatchingAsyncIterator(IAsyncEnumerable source, int batchSize) : base(source) { Contract.Requires(batchSize > 0); @@ -63,18 +62,18 @@ public FdbBatchingAsyncIterator(IFdbAsyncEnumerable source, int batchSiz m_batchSize = batchSize; } - protected override FdbAsyncIterator Clone() + protected override AsyncIterator Clone() { - return new FdbBatchingAsyncIterator(m_source, m_batchSize); + return new BatchingAsyncIterator(m_source, m_batchSize); } - protected override void OnStarted(IFdbAsyncEnumerator iterator) + protected override void OnStarted(IAsyncEnumerator iterator) { // pre-allocate the inner buffer, if it is not too big m_buffer = new List(Math.Min(m_batchSize, 1024)); } - protected override async Task OnNextAsync(CancellationToken ct) + protected override async Task OnNextAsync() { // read items from the source until the buffer is full, or the source has completed @@ -86,16 +85,16 @@ protected override async Task OnNextAsync(CancellationToken ct) var iterator = m_iterator; var buffer = m_buffer; - bool hasMore = await iterator.MoveNext(ct).ConfigureAwait(false); + bool hasMore = await iterator.MoveNextAsync().ConfigureAwait(false); - while(hasMore && !ct.IsCancellationRequested) + while(hasMore && !m_ct.IsCancellationRequested) { buffer.Add(iterator.Current); if (buffer.Count >= m_batchSize) break; - hasMore = await iterator.MoveNext(ct).ConfigureAwait(false); + hasMore = await iterator.MoveNextAsync().ConfigureAwait(false); } - ct.ThrowIfCancellationRequested(); + m_ct.ThrowIfCancellationRequested(); if (!hasMore) { diff --git a/FoundationDB.Client/Linq/Iterators/FdbDistinctAsyncIterator.cs b/FoundationDB.Client/Shared/Linq/Async/Iterators/DistinctAsyncIterator.cs similarity index 70% rename from FoundationDB.Client/Linq/Iterators/FdbDistinctAsyncIterator.cs rename to FoundationDB.Client/Shared/Linq/Async/Iterators/DistinctAsyncIterator.cs index 57678dba7..4d932cfe5 100644 --- a/FoundationDB.Client/Linq/Iterators/FdbDistinctAsyncIterator.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Iterators/DistinctAsyncIterator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,24 +26,24 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq.Async.Iterators { - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; /// Filters duplicate items from an async sequence /// Type of elements of the async sequence - internal sealed class FdbDistinctAsyncIterator : FdbAsyncFilterIterator + public sealed class DistinctAsyncIterator : AsyncFilterIterator { private readonly IEqualityComparer m_comparer; private HashSet m_set; - public FdbDistinctAsyncIterator([NotNull] IFdbAsyncEnumerable source, IEqualityComparer comparer) + public DistinctAsyncIterator([NotNull] IAsyncEnumerable source, IEqualityComparer comparer) : base(source) { Contract.Requires(comparer != null); @@ -51,30 +51,30 @@ public FdbDistinctAsyncIterator([NotNull] IFdbAsyncEnumerable source, I m_comparer = comparer; } - protected override FdbAsyncIterator Clone() + protected override AsyncIterator Clone() { - return new FdbDistinctAsyncIterator(m_source, m_comparer); + return new DistinctAsyncIterator(m_source, m_comparer); } - protected override Task OnFirstAsync(CancellationToken ct) + protected override Task OnFirstAsync() { // we start with an empty set... m_set = new HashSet(m_comparer); - return base.OnFirstAsync(ct); + return base.OnFirstAsync(); } - protected override async Task OnNextAsync(CancellationToken cancellationToken) + protected override async Task OnNextAsync() { - while (!cancellationToken.IsCancellationRequested) + while (!m_ct.IsCancellationRequested) { - if (!await m_iterator.MoveNext(cancellationToken).ConfigureAwait(false)) + if (!await m_iterator.MoveNextAsync().ConfigureAwait(false)) { // completed m_set = null; return Completed(); } - if (cancellationToken.IsCancellationRequested) break; + if (m_ct.IsCancellationRequested) break; TSource current = m_iterator.Current; if (!m_set.Add(current)) @@ -86,23 +86,23 @@ protected override async Task OnNextAsync(CancellationToken cancellationTo } m_set = null; - return Canceled(cancellationToken); + return Canceled(); } public override async Task ExecuteAsync(Action handler, CancellationToken ct) { - if (handler == null) throw new ArgumentNullException("handler"); + Contract.NotNull(handler, nameof(handler)); if (ct.IsCancellationRequested) ct.ThrowIfCancellationRequested(); var mode = m_mode; - if (mode == FdbAsyncMode.Head) mode = FdbAsyncMode.Iterator; + if (mode == AsyncIterationHint.Head) mode = AsyncIterationHint.Iterator; - using (var iter = m_source.GetEnumerator(mode)) + using (var iter = m_source.GetEnumerator(ct, mode)) { var set = new HashSet(m_comparer); - while (!ct.IsCancellationRequested && (await iter.MoveNext(ct).ConfigureAwait(false))) + while (!ct.IsCancellationRequested && (await iter.MoveNextAsync().ConfigureAwait(false))) { var current = iter.Current; if (set.Add(current)) @@ -118,18 +118,18 @@ public override async Task ExecuteAsync(Action handler, CancellationTok public override async Task ExecuteAsync(Func asyncHandler, CancellationToken ct) { - if (asyncHandler == null) throw new ArgumentNullException("asyncHandler"); + Contract.NotNull(asyncHandler, nameof(asyncHandler)); if (ct.IsCancellationRequested) ct.ThrowIfCancellationRequested(); var mode = m_mode; - if (mode == FdbAsyncMode.Head) mode = FdbAsyncMode.Iterator; + if (mode == AsyncIterationHint.Head) mode = AsyncIterationHint.Iterator; - using (var iter = m_source.GetEnumerator(mode)) + using (var iter = m_source.GetEnumerator(ct, mode)) { var set = new HashSet(m_comparer); - while (!ct.IsCancellationRequested && (await iter.MoveNext(ct).ConfigureAwait(false))) + while (!ct.IsCancellationRequested && (await iter.MoveNextAsync().ConfigureAwait(false))) { var current = iter.Current; if (set.Add(current)) diff --git a/FoundationDB.Client/FdbExceptIterator.cs b/FoundationDB.Client/Shared/Linq/Async/Iterators/ExceptAsyncIterator.cs similarity index 76% rename from FoundationDB.Client/FdbExceptIterator.cs rename to FoundationDB.Client/Shared/Linq/Async/Iterators/ExceptAsyncIterator.cs index 77e280671..d14c5269b 100644 --- a/FoundationDB.Client/FdbExceptIterator.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Iterators/ExceptAsyncIterator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,29 +26,27 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client +namespace Doxense.Linq.Async.Iterators { - using FoundationDB.Linq; using System; using System.Collections.Generic; - using System.Threading; /// Returns only the values for the keys that are in the first sub query, but not in the others /// Type of the elements from the source async sequences /// Type of the keys extracted from the source elements /// Type of the elements of resulting async sequence - internal sealed class FdbExceptIterator : FdbQueryMergeIterator + public sealed class ExceptAsyncIterator : MergeAsyncIterator { - public FdbExceptIterator(IEnumerable> sources, int? limit, Func keySelector, Func resultSelector, IComparer comparer) + public ExceptAsyncIterator(IEnumerable> sources, int? limit, Func keySelector, Func resultSelector, IComparer comparer) : base(sources, limit, keySelector, resultSelector, comparer) { } - protected override FdbAsyncIterator Clone() + protected override AsyncIterator Clone() { - return new FdbExceptIterator(m_sources, m_limit, m_keySelector, m_resultSelector, m_keyComparer); + return new ExceptAsyncIterator(m_sources, m_limit, m_keySelector, m_resultSelector, m_keyComparer); } - protected override bool FindNext(CancellationToken cancellationToken, out int index, out TSource current) + protected override bool FindNext(out int index, out TSource current) { index = -1; current = default(TSource); @@ -83,7 +81,7 @@ protected override bool FindNext(CancellationToken cancellationToken, out int in { output = false; if (cmp == 0) discard = true; - AdvanceIterator(i, cancellationToken); + AdvanceIterator(i); } } @@ -95,16 +93,16 @@ protected override bool FindNext(CancellationToken cancellationToken, out int in if (output || discard) { - AdvanceIterator(0, cancellationToken); + AdvanceIterator(0); } return true; } /// Apply a transformation on the results of the intersection - public override FdbAsyncIterator Select(Func selector) + public override AsyncIterator Select(Func selector) { - return new FdbExceptIterator( + return new ExceptAsyncIterator( m_sources, m_limit, m_keySelector, @@ -116,13 +114,13 @@ public override FdbAsyncIterator Select(Func selector /// Limit the number of elements returned by the intersection /// Maximum number of results to return /// New Intersect that will only return the specified number of results - public override FdbAsyncIterator Take(int limit) + public override AsyncIterator Take(int limit) { - if (limit < 0) throw new ArgumentOutOfRangeException("limit", "Value cannot be less than zero"); + if (limit < 0) throw new ArgumentOutOfRangeException(nameof(limit), "Value cannot be less than zero"); if (m_limit != null && m_limit < limit) return this; - return new FdbExceptIterator( + return new ExceptAsyncIterator( m_sources, limit, m_keySelector, diff --git a/FoundationDB.Client/FdbIntersectIterator.cs b/FoundationDB.Client/Shared/Linq/Async/Iterators/IntersectAsyncIterator.cs similarity index 76% rename from FoundationDB.Client/FdbIntersectIterator.cs rename to FoundationDB.Client/Shared/Linq/Async/Iterators/IntersectAsyncIterator.cs index f7f3445ba..117d8da61 100644 --- a/FoundationDB.Client/FdbIntersectIterator.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Iterators/IntersectAsyncIterator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,29 +26,27 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client +namespace Doxense.Linq.Async.Iterators { - using FoundationDB.Linq; using System; using System.Collections.Generic; - using System.Threading; /// Returns only the values for the keys that are in all the sub queries /// Type of the elements from the source async sequences /// Type of the keys extracted from the source elements /// Type of the elements of resulting async sequence - internal sealed class FdbIntersectIterator : FdbQueryMergeIterator + public sealed class IntersectAsyncIterator : MergeAsyncIterator { - public FdbIntersectIterator(IEnumerable> sources, int? limit, Func keySelector, Func resultSelector, IComparer comparer) + public IntersectAsyncIterator(IEnumerable> sources, int? limit, Func keySelector, Func resultSelector, IComparer comparer) : base(sources, limit, keySelector, resultSelector, comparer) { } - protected override FdbAsyncIterator Clone() + protected override AsyncIterator Clone() { - return new FdbIntersectIterator(m_sources, m_limit, m_keySelector, m_resultSelector, m_keyComparer); + return new IntersectAsyncIterator(m_sources, m_limit, m_keySelector, m_resultSelector, m_keyComparer); } - protected override bool FindNext(CancellationToken cancellationToken, out int index, out TSource current) + protected override bool FindNext(out int index, out TSource current) { index = -1; current = default(TSource); @@ -92,7 +90,7 @@ protected override bool FindNext(CancellationToken cancellationToken, out int in // advance everyone ! for (int i = 0; i < m_iterators.Length;i++) { - if (m_iterators[i].Active) AdvanceIterator(i, cancellationToken); + if (m_iterators[i].Active) AdvanceIterator(i); } return true; } @@ -102,7 +100,7 @@ protected override bool FindNext(CancellationToken cancellationToken, out int in { if (m_iterators[i].Active && m_keyComparer.Compare(m_iterators[i].Current, max) < 0) { - AdvanceIterator(i, cancellationToken); + AdvanceIterator(i); } } @@ -112,9 +110,9 @@ protected override bool FindNext(CancellationToken cancellationToken, out int in } /// Apply a transformation on the results of the intersection - public override FdbAsyncIterator Select(Func selector) + public override AsyncIterator Select(Func selector) { - return new FdbIntersectIterator( + return new IntersectAsyncIterator( m_sources, m_limit, m_keySelector, @@ -126,13 +124,13 @@ public override FdbAsyncIterator Select(Func selector /// Limit the number of elements returned by the intersection /// Maximum number of results to return /// New Intersect that will only return the specified number of results - public override FdbAsyncIterator Take(int limit) + public override AsyncIterator Take(int limit) { - if (limit < 0) throw new ArgumentOutOfRangeException("limit", "Value cannot be less than zero"); + if (limit < 0) throw new ArgumentOutOfRangeException(nameof(limit), "Value cannot be less than zero"); if (m_limit != null && m_limit < limit) return this; - return new FdbIntersectIterator( + return new IntersectAsyncIterator( m_sources, limit, m_keySelector, diff --git a/FoundationDB.Client/FdbQueryMergeIterator.cs b/FoundationDB.Client/Shared/Linq/Async/Iterators/MergeAsyncIterator.cs similarity index 79% rename from FoundationDB.Client/FdbQueryMergeIterator.cs rename to FoundationDB.Client/Shared/Linq/Async/Iterators/MergeAsyncIterator.cs index 1205aed78..f58c96ee4 100644 --- a/FoundationDB.Client/FdbQueryMergeIterator.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Iterators/MergeAsyncIterator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,31 +26,29 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client +namespace Doxense.Linq.Async.Iterators { - using FoundationDB.Async; - using FoundationDB.Client.Utils; - using FoundationDB.Linq; using System; using System.Collections.Generic; using System.Linq; - using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Threading.Tasks; /// Performs a Merge Sort on several concurrent range queries /// Type of the elements in the source queries /// Type of values extracted from the keys, that will be used for sorting /// Type of results returned - internal abstract class FdbQueryMergeIterator : FdbAsyncIterator + public abstract class MergeAsyncIterator : AsyncIterator { // Takes several range queries that return **SORTED** lists of items // - Make all querie's iterators run concurrently // - At each step, finds the "smallest" value from all remaining iterators, transform it into a TResult and expose it as the current element - // - Extract a TKey value from the keys and compare them with the provided comparer + // - Extract a TKey value from the keys and compare them with the provided comparer // The order of the extracted keys MUST be the same as the order of the binary keys ! This algorithm will NOT work if extracted keys are not in the same order as there binary representation ! - protected IEnumerable> m_sources; + protected IEnumerable> m_sources; protected Func m_keySelector; protected IComparer m_keyComparer; protected Func m_resultSelector; @@ -62,13 +60,13 @@ internal abstract class FdbQueryMergeIterator : FdbAsync protected struct IteratorState { public bool Active; - public IFdbAsyncEnumerator Iterator; + public IAsyncEnumerator Iterator; public Task Next; public bool HasCurrent; public TKey Current; } - protected FdbQueryMergeIterator(IEnumerable> sources, int? limit, Func keySelector, Func resultSelector, IComparer comparer) + protected MergeAsyncIterator(IEnumerable> sources, int? limit, Func keySelector, Func resultSelector, IComparer comparer) { Contract.Requires(sources != null && (limit == null || limit >= 0) && keySelector != null && resultSelector != null); m_sources = sources; @@ -78,16 +76,16 @@ protected FdbQueryMergeIterator(IEnumerable> source m_resultSelector = resultSelector; } - protected override Task OnFirstAsync(CancellationToken cancellationToken) + protected override Task OnFirstAsync() { if (m_remaining != null && m_remaining.Value < 0) - { // empty list ?? + { // empty list ?? return TaskHelpers.FromResult(Completed()); } // even if the caller only wants the first, we will probably need to read more than that... var mode = m_mode; - if (mode == FdbAsyncMode.Head) mode = FdbAsyncMode.Iterator; + if (mode == AsyncIterationHint.Head) mode = AsyncIterationHint.Iterator; var sources = m_sources.ToArray(); var iterators = new IteratorState[sources.Length]; @@ -96,10 +94,12 @@ protected override Task OnFirstAsync(CancellationToken cancellationToken) // start all the iterators for (int i = 0; i < sources.Length;i++) { - var state = new IteratorState(); - state.Active = true; - state.Iterator = sources[i].GetEnumerator(mode); - state.Next = state.Iterator.MoveNext(cancellationToken); + var state = new IteratorState + { + Active = true, + Iterator = sources[i].GetEnumerator(m_ct, mode) + }; + state.Next = state.Iterator.MoveNextAsync(); iterators[i] = state; } @@ -112,8 +112,8 @@ protected override Task OnFirstAsync(CancellationToken cancellationToken) // dispose already opened iterators var tmp = iterators; iterators = null; - try { Cleanup(tmp); } catch (Exception) { } - return TaskHelpers.FromException(e); + try { Cleanup(tmp); } catch { } + return Task.FromException(e); } finally { @@ -122,7 +122,7 @@ protected override Task OnFirstAsync(CancellationToken cancellationToken) } /// Finds the next smallest item from all the active iterators - protected override async Task OnNextAsync(CancellationToken cancellationToken) + protected override async Task OnNextAsync() { if (m_remaining != null && m_remaining.Value <= 0) { @@ -155,7 +155,7 @@ protected override async Task OnNextAsync(CancellationToken cancellationTo } // find the next value to advance - if (!FindNext(cancellationToken, out index, out current)) + if (!FindNext(out index, out current)) { // nothing left anymore ? return Completed(); } @@ -171,21 +171,18 @@ protected override async Task OnNextAsync(CancellationToken cancellationTo } // advance the current iterator - if (m_remaining != null) - { - m_remaining = m_remaining.Value - 1; - } + m_remaining = m_remaining - 1; return true; } - protected abstract bool FindNext(CancellationToken cancellationToken, out int index, out TSource current); + protected abstract bool FindNext(out int index, out TSource current); - protected void AdvanceIterator(int index, CancellationToken cancellationToken) + protected void AdvanceIterator(int index) { m_iterators[index].HasCurrent = false; m_iterators[index].Current = default(TKey); - m_iterators[index].Next = m_iterators[index].Iterator.MoveNext(cancellationToken); + m_iterators[index].Next = m_iterators[index].Iterator.MoveNextAsync(); } private static void Cleanup(IteratorState[] iterators) diff --git a/FoundationDB.Client/FdbMergeSortIterator.cs b/FoundationDB.Client/Shared/Linq/Async/Iterators/MergeSortAsyncIterator.cs similarity index 73% rename from FoundationDB.Client/FdbMergeSortIterator.cs rename to FoundationDB.Client/Shared/Linq/Async/Iterators/MergeSortAsyncIterator.cs index 87b3555a4..dc8b2c031 100644 --- a/FoundationDB.Client/FdbMergeSortIterator.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Iterators/MergeSortAsyncIterator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,30 +26,28 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client +namespace Doxense.Linq.Async.Iterators { - using FoundationDB.Linq; using System; using System.Collections.Generic; - using System.Threading; /// Merge all the elements of several ordered queries into one single async sequence /// Type of the elements from the source async sequences /// Type of the keys extracted from the source elements /// Type of the elements of resulting async sequence - internal sealed class FdbMergeSortIterator : FdbQueryMergeIterator + public sealed class MergeSortAsyncIterator : MergeAsyncIterator { - public FdbMergeSortIterator(IEnumerable> sources, int? limit, Func keySelector, Func resultSelector, IComparer comparer) + public MergeSortAsyncIterator(IEnumerable> sources, int? limit, Func keySelector, Func resultSelector, IComparer comparer) : base(sources, limit, keySelector, resultSelector, comparer) { } - protected override FdbAsyncIterator Clone() + protected override AsyncIterator Clone() { - return new FdbMergeSortIterator(m_sources, m_limit, m_keySelector, m_resultSelector, m_keyComparer); + return new MergeSortAsyncIterator(m_sources, m_limit, m_keySelector, m_resultSelector, m_keyComparer); } - protected override bool FindNext(CancellationToken cancellationToken, out int index, out TSource current) + protected override bool FindNext(out int index, out TSource current) { index = -1; current = default(TSource); @@ -71,7 +69,7 @@ protected override bool FindNext(CancellationToken cancellationToken, out int in current = m_iterators[index].Iterator.Current; if (m_remaining == null || m_remaining.Value > 1) { // start getting the next value on this iterator - AdvanceIterator(index, cancellationToken); + AdvanceIterator(index); } } @@ -80,9 +78,9 @@ protected override bool FindNext(CancellationToken cancellationToken, out int in /// Apply a transformation on the results of the merge sort - public override FdbAsyncIterator Select(Func selector) + public override AsyncIterator Select(Func selector) { - return new FdbMergeSortIterator( + return new MergeSortAsyncIterator( m_sources, m_limit, m_keySelector, @@ -94,13 +92,13 @@ public override FdbAsyncIterator Select(Func selector /// Limit the number of elements returned by the MergeSort /// Maximum number of results to return /// New MergeSort that will only return the specified number of results - public override FdbAsyncIterator Take(int limit) + public override AsyncIterator Take(int limit) { - if (limit < 0) throw new ArgumentOutOfRangeException("limit", "Value cannot be less than zero"); + if (limit < 0) throw new ArgumentOutOfRangeException(nameof(limit), "Value cannot be less than zero"); if (m_limit != null && m_limit < limit) return this; - return new FdbMergeSortIterator( + return new MergeSortAsyncIterator( m_sources, limit, m_keySelector, diff --git a/FoundationDB.Client/Linq/Iterators/FdbObserverIterator.cs b/FoundationDB.Client/Shared/Linq/Async/Iterators/ObserverAsyncIterator.cs similarity index 70% rename from FoundationDB.Client/Linq/Iterators/FdbObserverIterator.cs rename to FoundationDB.Client/Shared/Linq/Async/Iterators/ObserverAsyncIterator.cs index a19101a92..1a06eb70f 100644 --- a/FoundationDB.Client/Linq/Iterators/FdbObserverIterator.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Iterators/ObserverAsyncIterator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,42 +26,42 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq.Async.Iterators { - using FoundationDB.Client.Utils; using System; - using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Linq.Async.Expressions; /// Observe the items of an async sequence /// Type of the observed elements - internal sealed class FdbObserverIterator : FdbAsyncFilterIterator + public sealed class ObserverAsyncIterator : AsyncFilterIterator { private readonly AsyncObserverExpression m_observer; - public FdbObserverIterator(IFdbAsyncEnumerable source, AsyncObserverExpression observer) + public ObserverAsyncIterator(IAsyncEnumerable source, AsyncObserverExpression observer) : base(source) { Contract.Requires(observer != null); m_observer = observer; } - protected override FdbAsyncIterator Clone() + protected override AsyncIterator Clone() { - return new FdbObserverIterator(m_source, m_observer); + return new ObserverAsyncIterator(m_source, m_observer); } - protected override async Task OnNextAsync(CancellationToken cancellationToken) + protected override async Task OnNextAsync() { - while (!cancellationToken.IsCancellationRequested) + while (!m_ct.IsCancellationRequested) { - if (!await m_iterator.MoveNext(cancellationToken).ConfigureAwait(false)) + if (!await m_iterator.MoveNextAsync().ConfigureAwait(false)) { // completed return Completed(); } - if (cancellationToken.IsCancellationRequested) break; + if (m_ct.IsCancellationRequested) break; TSource current = m_iterator.Current; if (!m_observer.Async) @@ -70,13 +70,13 @@ protected override async Task OnNextAsync(CancellationToken cancellationTo } else { - await m_observer.InvokeAsync(current, cancellationToken).ConfigureAwait(false); + await m_observer.InvokeAsync(current, m_ct).ConfigureAwait(false); } return Publish(current); } - return Canceled(cancellationToken); + return Canceled(); } } diff --git a/FoundationDB.Client/Linq/Iterators/FdbParallelSelectAsyncIterator.cs b/FoundationDB.Client/Shared/Linq/Async/Iterators/ParallelSelectAsyncIterator.cs similarity index 82% rename from FoundationDB.Client/Linq/Iterators/FdbParallelSelectAsyncIterator.cs rename to FoundationDB.Client/Shared/Linq/Async/Iterators/ParallelSelectAsyncIterator.cs index 04748f9bd..7e82e3966 100644 --- a/FoundationDB.Client/Linq/Iterators/FdbParallelSelectAsyncIterator.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Iterators/ParallelSelectAsyncIterator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,22 +26,23 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -#undef FULL_DEBUG +//#define FULL_DEBUG -namespace FoundationDB.Linq +namespace Doxense.Linq.Async.Iterators { - using FoundationDB.Async; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Diagnostics; using System.Threading; using System.Threading.Tasks; + using Doxense.Async; + using Doxense.Diagnostics.Contracts; + using Doxense.Threading.Tasks; + using JetBrains.Annotations; /// [EXPERIMENTAL] Iterates over an async sequence of items, kick off an async task in parallel, and returning the results in order /// Type of elements of the inner async sequence /// Type of elements of the outer async sequence - internal sealed class FdbParallelSelectAsyncIterator : FdbAsyncFilterIterator + public sealed class ParallelSelectAsyncIterator : AsyncFilterIterator { /// Default max concurrency when doing batch queries /// TODO: this is a placeholder value ! @@ -52,23 +53,23 @@ internal sealed class FdbParallelSelectAsyncIterator : FdbAsyn // Since we can't spin out too many tasks, we also want to be able to put a cap no the max number of pending tasks private readonly Func> m_taskSelector; - private readonly FdbParallelQueryOptions m_options; + private readonly ParallelAsyncQueryOptions m_options; private CancellationTokenSource m_cts; private CancellationToken m_token; private volatile bool m_done; /// Pump that reads values from the inner iterator - private FdbAsyncIteratorPump m_pump; + private AsyncIteratorPump m_pump; /// Inner pump task private Task m_pumpTask; /// Queue that holds items that are being processed private AsyncTransformQueue m_processingQueue; - public FdbParallelSelectAsyncIterator( - [NotNull] IFdbAsyncEnumerable source, + public ParallelSelectAsyncIterator( + [NotNull] IAsyncEnumerable source, [NotNull] Func> taskSelector, - [NotNull] FdbParallelQueryOptions options + [NotNull] ParallelAsyncQueryOptions options ) : base(source) { @@ -78,13 +79,13 @@ [NotNull] FdbParallelQueryOptions options m_options = options; } - protected override FdbAsyncIterator Clone() + protected override AsyncIterator Clone() { - return new FdbParallelSelectAsyncIterator(m_source, m_taskSelector, m_options); + return new ParallelSelectAsyncIterator(m_source, m_taskSelector, m_options); } - protected override async Task OnFirstAsync(CancellationToken ct) + protected override async Task OnFirstAsync() { - if (!await base.OnFirstAsync(ct)) + if (!await base.OnFirstAsync()) { return false; } @@ -99,12 +100,13 @@ protected override async Task OnFirstAsync(CancellationToken ct) m_processingQueue = new AsyncTransformQueue(m_taskSelector, m_options.MaxConcurrency ?? DefaultMaxConcurrency, m_options.Scheduler); // we also need a pump that will work on the inner sequence - m_pump = new FdbAsyncIteratorPump(m_iterator, m_processingQueue); + m_pump = new AsyncIteratorPump(m_iterator, m_processingQueue); // start pumping m_pumpTask = m_pump.PumpAsync(m_token).ContinueWith((t) => - { - var e = t.Exception; + { + // ReSharper disable once RedundantAssignment + var e = t.Exception; // observe the exception LogDebug("Pump stopped with error: " + e.Message); }, TaskContinuationOptions.OnlyOnFaulted); @@ -115,7 +117,7 @@ protected override async Task OnFirstAsync(CancellationToken ct) return true; } - protected override async Task OnNextAsync(CancellationToken cancellationToken) + protected override async Task OnNextAsync() { try { @@ -123,13 +125,13 @@ protected override async Task OnNextAsync(CancellationToken cancellationTo if (m_done) return false; - var next = await m_processingQueue.ReceiveAsync(cancellationToken).ConfigureAwait(false); + var next = await m_processingQueue.ReceiveAsync(m_ct).ConfigureAwait(false); LogDebug("[OnNextAsync] got result from queue"); if (!next.HasValue) { m_done = true; - if (next.HasFailed) + if (next.Failed) { LogDebug("[OnNextAsync] received failure"); // we want to make sure that the exception callstack is as clean as possible, @@ -179,7 +181,9 @@ protected override void Dispose(bool disposing) [Conditional("FULL_DEBUG")] private static void LogDebug(string msg) { +#if FULL_DEBUG Console.WriteLine("[SelectAsync] " + msg); +#endif } } diff --git a/FoundationDB.Client/Linq/Iterators/FdbPrefetchingAsyncIterator.cs b/FoundationDB.Client/Shared/Linq/Async/Iterators/PrefetchingAsyncIterator.cs similarity index 76% rename from FoundationDB.Client/Linq/Iterators/FdbPrefetchingAsyncIterator.cs rename to FoundationDB.Client/Shared/Linq/Async/Iterators/PrefetchingAsyncIterator.cs index bea0581f4..344fc493e 100644 --- a/FoundationDB.Client/Linq/Iterators/FdbPrefetchingAsyncIterator.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Iterators/PrefetchingAsyncIterator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,18 +26,18 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq.Async.Iterators { - using FoundationDB.Async; - using FoundationDB.Client.Utils; using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Threading.Tasks; /// Prefetches items from the inner sequence, before outputing them down the line. /// Type the the items from the source sequence - internal class FdbPrefetchingAsyncIterator : FdbAsyncFilterIterator + public class PrefetchingAsyncIterator : AsyncFilterIterator { // This iterator can be used to already ask for the next few items, while they are being processed somewhere down the line of the query. // This can be usefull, when combined with Batching or Windowing, to maximize the throughput of db queries that read pages of results at a time. @@ -52,30 +52,30 @@ internal class FdbPrefetchingAsyncIterator : FdbAsyncFilterIterator m_buffer; // holds on to the last pending call to m_iterator.MoveNext() when our buffer is full - private Task m_nextTask; + private Task m_nextTask; /// Create a new batching iterator /// Source sequence of items that must be batched by waves /// Maximum size of a batch to return down the line - public FdbPrefetchingAsyncIterator(IFdbAsyncEnumerable source, int prefetchCount) + public PrefetchingAsyncIterator(IAsyncEnumerable source, int prefetchCount) : base(source) { Contract.Requires(prefetchCount > 0); m_prefetchCount = prefetchCount; } - protected override FdbAsyncIterator Clone() + protected override AsyncIterator Clone() { - return new FdbPrefetchingAsyncIterator(m_source, m_prefetchCount); + return new PrefetchingAsyncIterator(m_source, m_prefetchCount); } - protected override void OnStarted(IFdbAsyncEnumerator iterator) + protected override void OnStarted(IAsyncEnumerator iterator) { // pre-allocate the buffer with the number of slot we expect to use m_buffer = new Queue(m_prefetchCount); } - protected override Task OnNextAsync(CancellationToken ct) + protected override Task OnNextAsync() { var buffer = m_buffer; if (buffer != null && buffer.Count > 0) @@ -83,48 +83,47 @@ protected override Task OnNextAsync(CancellationToken ct) var nextTask = m_nextTask; if (nextTask == null || !m_nextTask.IsCompleted) { - var current = buffer.Dequeue(); - return Publish(current) ? TaskHelpers.TrueTask : TaskHelpers.FalseTask; + return TaskHelpers.FromResult(Publish(buffer.Dequeue())); } } - return PrefetchNextItemsAsync(ct); + return PrefetchNextItemsAsync(); } - protected virtual async Task PrefetchNextItemsAsync(CancellationToken ct) + protected virtual async Task PrefetchNextItemsAsync() { // read items from the source until the next call to Inner.MoveNext() is not already complete, or we have filled our prefetch buffer, then returns the first item in the buffer. - var t = Interlocked.Exchange(ref m_nextTask, null); - if (t == null) + var ft = Interlocked.Exchange(ref m_nextTask, null); + if (ft == null) { // read the next item from the inner iterator if (m_innerHasCompleted) return Completed(); - t = m_iterator.MoveNext(ct); + ft = m_iterator.MoveNextAsync(); } // always wait for the first item (so that we have at least something in the batch) - bool hasMore = await t.ConfigureAwait(false); + bool hasMore = await ft.ConfigureAwait(false); // most db queries will read items by chunks, so there is a high chance the the next following calls to MoveNext() will already be completed // as long as this is the case, and that our buffer is not full, continue eating items. Stop only when we end up with a pending task. - while (hasMore && !ct.IsCancellationRequested) + while (hasMore && !m_ct.IsCancellationRequested) { if (m_buffer == null) m_buffer = new Queue(m_prefetchCount); m_buffer.Enqueue(m_iterator.Current); - t = m_iterator.MoveNext(ct); - if (m_buffer.Count >= m_prefetchCount || !t.IsCompleted) + var vt = m_iterator.MoveNextAsync(); + if (m_buffer.Count >= m_prefetchCount || !vt.IsCompleted) { // save it for next time - m_nextTask = t; + m_nextTask = vt; break; } // we know the task is already completed, so we will immediately get the next result, or blow up if the inner iterator failed - hasMore = t.GetAwaiter().GetResult(); + hasMore = vt.Result; //note: if inner blows up, we won't send any previously read items down the line. This may change the behavior of queries with a .Take(N) that would have stopped before reading the (N+1)th item that would have failed. } - ct.ThrowIfCancellationRequested(); + m_ct.ThrowIfCancellationRequested(); if (!hasMore) { @@ -144,8 +143,7 @@ protected override void OnStopped() m_buffer = null; // defuse the task, which should fail once we dispose the inner iterator below... - var nextTask = Interlocked.Exchange(ref m_nextTask, null); - if (nextTask != null) TaskHelpers.Observe(nextTask); + Interlocked.Exchange(ref m_nextTask, null)?.Observed(); } } diff --git a/FoundationDB.Client/Linq/Iterators/FdbSelectManyAsyncIterator.cs b/FoundationDB.Client/Shared/Linq/Async/Iterators/SelectManyAsyncIterator.cs similarity index 70% rename from FoundationDB.Client/Linq/Iterators/FdbSelectManyAsyncIterator.cs rename to FoundationDB.Client/Shared/Linq/Async/Iterators/SelectManyAsyncIterator.cs index fbf8ef6ce..96d3e7c63 100644 --- a/FoundationDB.Client/Linq/Iterators/FdbSelectManyAsyncIterator.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Iterators/SelectManyAsyncIterator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,24 +26,24 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq.Async.Iterators { - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Collections.Generic; - using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Linq.Async.Expressions; + using JetBrains.Annotations; /// Iterates over an async sequence of items /// Type of elements of the inner async sequence /// Type of elements of the outer async sequence - internal sealed class FdbSelectManyAsyncIterator : FdbAsyncFilterIterator + public sealed class SelectManyAsyncIterator : AsyncFilterIterator { private readonly AsyncTransformExpression> m_selector; private IEnumerator m_batch; - public FdbSelectManyAsyncIterator([NotNull] IFdbAsyncEnumerable source, AsyncTransformExpression> selector) + public SelectManyAsyncIterator([NotNull] IAsyncEnumerable source, AsyncTransformExpression> selector) : base(source) { // Must have at least one, but not both @@ -52,28 +52,28 @@ public FdbSelectManyAsyncIterator([NotNull] IFdbAsyncEnumerable source, m_selector = selector; } - protected override FdbAsyncIterator Clone() + protected override AsyncIterator Clone() { - return new FdbSelectManyAsyncIterator(m_source, m_selector); + return new SelectManyAsyncIterator(m_source, m_selector); } - protected override async Task OnNextAsync(CancellationToken cancellationToken) + protected override async Task OnNextAsync() { // if we are in a batch, iterate over it // if not, wait for the next batch - while (!cancellationToken.IsCancellationRequested) + while (!m_ct.IsCancellationRequested) { if (m_batch == null) { - if (!await m_iterator.MoveNext(cancellationToken).ConfigureAwait(false)) + if (!await m_iterator.MoveNextAsync().ConfigureAwait(false)) { // inner completed return Completed(); } - if (cancellationToken.IsCancellationRequested) break; + if (m_ct.IsCancellationRequested) break; IEnumerable sequence; if (!m_selector.Async) @@ -82,7 +82,7 @@ protected override async Task OnNextAsync(CancellationToken cancellationTo } else { - sequence = await m_selector.InvokeAsync(m_iterator.Current, cancellationToken).ConfigureAwait(false); + sequence = await m_selector.InvokeAsync(m_iterator.Current, m_ct).ConfigureAwait(false); } if (sequence == null) throw new InvalidOperationException("The inner sequence returned a null collection"); @@ -100,17 +100,14 @@ protected override async Task OnNextAsync(CancellationToken cancellationTo return Publish(m_batch.Current); } - return Canceled(cancellationToken); + return Canceled(); } protected override void Cleanup() { try { - if (m_batch != null) - { - m_batch.Dispose(); - } + m_batch?.Dispose(); } finally { @@ -124,15 +121,15 @@ protected override void Cleanup() /// Type of elements of the inner async sequence /// Type of the elements of the sequences produced from each elements /// Type of elements of the outer async sequence - internal sealed class FdbSelectManyAsyncIterator : FdbAsyncFilterIterator + internal sealed class SelectManyAsyncIterator : AsyncFilterIterator { private readonly AsyncTransformExpression> m_collectionSelector; private readonly Func m_resultSelector; private TSource m_sourceCurrent; private IEnumerator m_batch; - public FdbSelectManyAsyncIterator( - [NotNull] IFdbAsyncEnumerable source, + public SelectManyAsyncIterator( + [NotNull] IAsyncEnumerable source, AsyncTransformExpression> collectionSelector, [NotNull] Func resultSelector ) @@ -144,28 +141,28 @@ [NotNull] Func resultSelector m_resultSelector = resultSelector; } - protected override FdbAsyncIterator Clone() + protected override AsyncIterator Clone() { - return new FdbSelectManyAsyncIterator(m_source, m_collectionSelector, m_resultSelector); + return new SelectManyAsyncIterator(m_source, m_collectionSelector, m_resultSelector); } - protected override async Task OnNextAsync(CancellationToken cancellationToken) + protected override async Task OnNextAsync() { // if we are in a batch, iterate over it // if not, wait for the next batch - while (!cancellationToken.IsCancellationRequested) + while (!m_ct.IsCancellationRequested) { - - if (m_batch == null) + var batch = m_batch; + if (batch == null) { - if (!await m_iterator.MoveNext(cancellationToken).ConfigureAwait(false)) + if (!await m_iterator.MoveNextAsync().ConfigureAwait(false)) { // inner completed return Completed(); } - if (cancellationToken.IsCancellationRequested) break; + if (m_ct.IsCancellationRequested) break; m_sourceCurrent = m_iterator.Current; @@ -177,36 +174,33 @@ protected override async Task OnNextAsync(CancellationToken cancellationTo } else { - sequence = await m_collectionSelector.InvokeAsync(m_sourceCurrent, cancellationToken).ConfigureAwait(false); + sequence = await m_collectionSelector.InvokeAsync(m_sourceCurrent, m_ct).ConfigureAwait(false); } if (sequence == null) throw new InvalidOperationException("The inner sequence returned a null collection"); - m_batch = sequence.GetEnumerator(); - Contract.Requires(m_batch != null); + m_batch = batch = sequence.GetEnumerator(); + Contract.Requires(batch != null); } - if (!m_batch.MoveNext()) + if (!batch.MoveNext()) { // the current batch is exhausted, move to the next - m_batch.Dispose(); + batch.Dispose(); m_batch = null; m_sourceCurrent = default(TSource); continue; } - return Publish(m_resultSelector(m_sourceCurrent, m_batch.Current)); + return Publish(m_resultSelector(m_sourceCurrent, batch.Current)); } - return Canceled(cancellationToken); + return Canceled(); } protected override void Cleanup() { try - { - if (m_batch != null) - { - m_batch.Dispose(); - } + { // cleanup any pending batch + m_batch?.Dispose(); } finally { diff --git a/FoundationDB.Client/Linq/Iterators/FdbTakeWhileAsyncIterator.cs b/FoundationDB.Client/Shared/Linq/Async/Iterators/TakeWhileAsyncIterator.cs similarity index 75% rename from FoundationDB.Client/Linq/Iterators/FdbTakeWhileAsyncIterator.cs rename to FoundationDB.Client/Shared/Linq/Async/Iterators/TakeWhileAsyncIterator.cs index efb9dd840..c63b143b6 100644 --- a/FoundationDB.Client/Linq/Iterators/FdbTakeWhileAsyncIterator.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Iterators/TakeWhileAsyncIterator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,22 +26,21 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq.Async.Iterators { - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; - using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; /// Reads an async sequence of items until a condition becomes false /// Type of elements of the async sequence - internal sealed class FdbTakeWhileAsyncIterator : FdbAsyncFilterIterator + public sealed class TakeWhileAsyncIterator : AsyncFilterIterator { private readonly Func m_condition; //TODO: also accept a Func> ? - public FdbTakeWhileAsyncIterator([NotNull] IFdbAsyncEnumerable source, [NotNull] Func condition) + public TakeWhileAsyncIterator([NotNull] IAsyncEnumerable source, [NotNull] Func condition) : base(source) { Contract.Requires(condition != null); @@ -49,21 +48,21 @@ public FdbTakeWhileAsyncIterator([NotNull] IFdbAsyncEnumerable source, m_condition = condition; } - protected override FdbAsyncIterator Clone() + protected override AsyncIterator Clone() { - return new FdbTakeWhileAsyncIterator(m_source, m_condition); + return new TakeWhileAsyncIterator(m_source, m_condition); } - protected override async Task OnNextAsync(CancellationToken ct) + protected override async Task OnNextAsync() { - while (!ct.IsCancellationRequested) + while (!m_ct.IsCancellationRequested) { - if (!await m_iterator.MoveNext(ct).ConfigureAwait(false)) + if (!await m_iterator.MoveNextAsync().ConfigureAwait(false)) { // completed return Completed(); } - if (ct.IsCancellationRequested) break; + if (m_ct.IsCancellationRequested) break; TSource current = m_iterator.Current; if (!m_condition(current)) @@ -73,7 +72,7 @@ protected override async Task OnNextAsync(CancellationToken ct) return Publish(current); } - return Canceled(ct); + return Canceled(); } } diff --git a/FoundationDB.Client/Linq/Iterators/FdbWhereAsyncIterator.cs b/FoundationDB.Client/Shared/Linq/Async/Iterators/WhereAsyncIterator.cs similarity index 64% rename from FoundationDB.Client/Linq/Iterators/FdbWhereAsyncIterator.cs rename to FoundationDB.Client/Shared/Linq/Async/Iterators/WhereAsyncIterator.cs index acafcb712..eebec9391 100644 --- a/FoundationDB.Client/Linq/Iterators/FdbWhereAsyncIterator.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Iterators/WhereAsyncIterator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,22 +26,23 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq.Async.Iterators { - using FoundationDB.Async; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Linq.Async.Expressions; + using Doxense.Threading.Tasks; + using JetBrains.Annotations; /// Filters an async sequence of items /// Type of elements of the async sequence - internal sealed class FdbWhereAsyncIterator : FdbAsyncFilterIterator + public sealed class WhereAsyncIterator : AsyncFilterIterator { private readonly AsyncFilterExpression m_filter; - public FdbWhereAsyncIterator([NotNull] IFdbAsyncEnumerable source, AsyncFilterExpression filter) + public WhereAsyncIterator([NotNull] IAsyncEnumerable source, AsyncFilterExpression filter) : base(source) { Contract.Requires(filter != null, "there can be only one kind of filter specified"); @@ -49,21 +50,21 @@ public FdbWhereAsyncIterator([NotNull] IFdbAsyncEnumerable source, Asyn m_filter = filter; } - protected override FdbAsyncIterator Clone() + protected override AsyncIterator Clone() { - return new FdbWhereAsyncIterator(m_source, m_filter); + return new WhereAsyncIterator(m_source, m_filter); } - protected override async Task OnNextAsync(CancellationToken cancellationToken) + protected override async Task OnNextAsync() { - while (!cancellationToken.IsCancellationRequested) + while (!m_ct.IsCancellationRequested) { - if (!await m_iterator.MoveNext(cancellationToken).ConfigureAwait(false)) + if (!await m_iterator.MoveNextAsync().ConfigureAwait(false)) { // completed return Completed(); } - if (cancellationToken.IsCancellationRequested) break; + if (m_ct.IsCancellationRequested) break; TSource current = m_iterator.Current; if (!m_filter.Async) @@ -75,7 +76,7 @@ protected override async Task OnNextAsync(CancellationToken cancellationTo } else { - if (!await m_filter.InvokeAsync(current, cancellationToken).ConfigureAwait(false)) + if (!await m_filter.InvokeAsync(current, m_ct).ConfigureAwait(false)) { continue; } @@ -84,28 +85,28 @@ protected override async Task OnNextAsync(CancellationToken cancellationTo return Publish(current); } - return Canceled(cancellationToken); + return Canceled(); } - public override FdbAsyncIterator Where(Func predicate) + public override AsyncIterator Where(Func predicate) { - return FdbAsyncEnumerable.Filter( + return AsyncEnumerable.Filter( m_source, m_filter.AndAlso(new AsyncFilterExpression(predicate)) ); } - public override FdbAsyncIterator Where(Func> asyncPredicate) + public override AsyncIterator Where(Func> asyncPredicate) { - return FdbAsyncEnumerable.Filter( + return AsyncEnumerable.Filter( m_source, m_filter.AndAlso(new AsyncFilterExpression(asyncPredicate)) ); } - public override FdbAsyncIterator Select(Func selector) + public override AsyncIterator Select(Func selector) { - return new FdbWhereSelectAsyncIterator( + return new WhereSelectAsyncIterator( m_source, m_filter, new AsyncTransformExpression(selector), @@ -114,9 +115,9 @@ public override FdbAsyncIterator Select(Func selector ); } - public override FdbAsyncIterator Select(Func> asyncSelector) + public override AsyncIterator Select(Func> asyncSelector) { - return new FdbWhereSelectAsyncIterator( + return new WhereSelectAsyncIterator( m_source, m_filter, new AsyncTransformExpression(asyncSelector), @@ -125,14 +126,14 @@ public override FdbAsyncIterator Select(Func Take(int limit) + public override AsyncIterator Take(int limit) { - if (limit < 0) throw new ArgumentOutOfRangeException("limit", "Limit cannot be less than zero"); + if (limit < 0) throw new ArgumentOutOfRangeException(nameof(limit), "Limit cannot be less than zero"); - return new FdbWhereSelectAsyncIterator( + return new WhereSelectAsyncIterator( m_source, m_filter, - new AsyncTransformExpression(TaskHelpers.Cache.Identity), + new AsyncTransformExpression(TaskHelpers.CachedTasks.Identity), limit: limit, offset: null ); @@ -140,15 +141,15 @@ public override FdbAsyncIterator Take(int limit) public override async Task ExecuteAsync(Action handler, CancellationToken ct) { - if (handler == null) throw new ArgumentNullException("handler"); + Contract.NotNull(handler, nameof(handler)); if (ct.IsCancellationRequested) ct.ThrowIfCancellationRequested(); - using (var iter = StartInner()) + using (var iter = StartInner(ct)) { if (!m_filter.Async) { - while (!ct.IsCancellationRequested && (await iter.MoveNext(ct).ConfigureAwait(false))) + while (!ct.IsCancellationRequested && (await iter.MoveNextAsync().ConfigureAwait(false))) { var current = iter.Current; if (m_filter.Invoke(current)) @@ -159,7 +160,7 @@ public override async Task ExecuteAsync(Action handler, CancellationTok } else { - while (!ct.IsCancellationRequested && (await iter.MoveNext(ct).ConfigureAwait(false))) + while (!ct.IsCancellationRequested && (await iter.MoveNextAsync().ConfigureAwait(false))) { var current = iter.Current; if (await m_filter.InvokeAsync(current, ct).ConfigureAwait(false)) @@ -175,15 +176,15 @@ public override async Task ExecuteAsync(Action handler, CancellationTok public override async Task ExecuteAsync(Func asyncHandler, CancellationToken ct) { - if (asyncHandler == null) throw new ArgumentNullException("asyncHandler"); + Contract.NotNull(asyncHandler, nameof(asyncHandler)); if (ct.IsCancellationRequested) ct.ThrowIfCancellationRequested(); - using (var iter = StartInner()) + using (var iter = StartInner(ct)) { if (!m_filter.Async) { - while (!ct.IsCancellationRequested && (await iter.MoveNext(ct).ConfigureAwait(false))) + while (!ct.IsCancellationRequested && (await iter.MoveNextAsync().ConfigureAwait(false))) { var current = iter.Current; if (m_filter.Invoke(current)) @@ -194,7 +195,7 @@ public override async Task ExecuteAsync(Func a } else { - while (!ct.IsCancellationRequested && (await iter.MoveNext(ct).ConfigureAwait(false))) + while (!ct.IsCancellationRequested && (await iter.MoveNextAsync().ConfigureAwait(false))) { var current = iter.Current; if (await m_filter.InvokeAsync(current, ct).ConfigureAwait(false)) diff --git a/FoundationDB.Client/Linq/Iterators/FdbWhereSelectAsyncIterator.cs b/FoundationDB.Client/Shared/Linq/Async/Iterators/WhereSelectAsyncIterator.cs similarity index 70% rename from FoundationDB.Client/Linq/Iterators/FdbWhereSelectAsyncIterator.cs rename to FoundationDB.Client/Shared/Linq/Async/Iterators/WhereSelectAsyncIterator.cs index 5ea8a7dcc..6ae309a58 100644 --- a/FoundationDB.Client/Linq/Iterators/FdbWhereSelectAsyncIterator.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Iterators/WhereSelectAsyncIterator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,19 +26,20 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq.Async.Iterators { - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Linq.Async.Expressions; + using JetBrains.Annotations; /// Iterates over an async sequence of items /// Type of elements of the inner async sequence /// Type of elements of the outer async sequence - internal sealed class FdbWhereSelectAsyncIterator : FdbAsyncFilterIterator + public sealed class WhereSelectAsyncIterator : AsyncFilterIterator { private readonly AsyncFilterExpression m_filter; private readonly AsyncTransformExpression m_transform; @@ -50,8 +51,8 @@ internal sealed class FdbWhereSelectAsyncIterator : FdbAsyncFi private int? m_remaining; private int? m_skipped; - public FdbWhereSelectAsyncIterator( - [NotNull] IFdbAsyncEnumerable source, + public WhereSelectAsyncIterator( + [NotNull] IAsyncEnumerable source, AsyncFilterExpression filter, AsyncTransformExpression transform, int? limit, @@ -68,32 +69,32 @@ public FdbWhereSelectAsyncIterator( m_offset = offset; } - protected override FdbAsyncIterator Clone() + protected override AsyncIterator Clone() { - return new FdbWhereSelectAsyncIterator(m_source, m_filter, m_transform, m_limit, m_offset); + return new WhereSelectAsyncIterator(m_source, m_filter, m_transform, m_limit, m_offset); } - protected override Task OnFirstAsync(CancellationToken ct) + protected override Task OnFirstAsync() { m_remaining = m_limit; m_skipped = m_offset; - return base.OnFirstAsync(ct); + return base.OnFirstAsync(); } - protected override async Task OnNextAsync(CancellationToken cancellationToken) + protected override async Task OnNextAsync() { if (m_remaining != null && m_remaining.Value <= 0) { // reached limit! return Completed(); } - while (!cancellationToken.IsCancellationRequested) + while (!m_ct.IsCancellationRequested) { - if (!await m_iterator.MoveNext(cancellationToken).ConfigureAwait(false)) + if (!await m_iterator.MoveNextAsync().ConfigureAwait(false)) { // completed return Completed(); } - if (cancellationToken.IsCancellationRequested) break; + if (m_ct.IsCancellationRequested) break; #region Filtering... @@ -106,7 +107,7 @@ protected override async Task OnNextAsync(CancellationToken cancellationTo } else { - if (!await m_filter.InvokeAsync(current, cancellationToken).ConfigureAwait(false)) continue; + if (!await m_filter.InvokeAsync(current, m_ct).ConfigureAwait(false)) continue; } } @@ -136,31 +137,29 @@ protected override async Task OnNextAsync(CancellationToken cancellationTo } else { - result = await m_transform.InvokeAsync(current, cancellationToken).ConfigureAwait(false); + result = await m_transform.InvokeAsync(current, m_ct).ConfigureAwait(false); } #endregion #region Publishing... - if (m_remaining != null) - { // decrement remaining quota - m_remaining = m_remaining.Value - 1; - } + // decrement remaining quota + m_remaining = m_remaining - 1; return Publish(result); #endregion } - return Canceled(cancellationToken); + return Canceled(); } - public override FdbAsyncIterator Select(Func selector) + public override AsyncIterator Select(Func selector) { - if (selector == null) throw new ArgumentNullException("selector"); + Contract.NotNull(selector, nameof(selector)); - return new FdbWhereSelectAsyncIterator( + return new WhereSelectAsyncIterator( m_source, m_filter, m_transform.Then(new AsyncTransformExpression(selector)), @@ -169,11 +168,11 @@ public override FdbAsyncIterator Select(Func selector ); } - public override FdbAsyncIterator Select(Func> asyncSelector) + public override AsyncIterator Select(Func> asyncSelector) { - if (asyncSelector == null) throw new ArgumentNullException("asyncSelector"); + Contract.NotNull(asyncSelector, nameof(asyncSelector)); - return new FdbWhereSelectAsyncIterator( + return new WhereSelectAsyncIterator( m_source, m_filter, m_transform.Then(new AsyncTransformExpression(asyncSelector)), @@ -182,13 +181,13 @@ public override FdbAsyncIterator Select(Func SelectMany(Func> selector) + public override AsyncIterator SelectMany(Func> selector) { - if (selector == null) throw new ArgumentNullException("selector"); + Contract.NotNull(selector, nameof(selector)); if (m_filter == null && m_limit == null && m_offset == null) { - return new FdbSelectManyAsyncIterator( + return new SelectManyAsyncIterator( m_source, m_transform.Then(new AsyncTransformExpression>(selector)) ); @@ -198,13 +197,13 @@ public override FdbAsyncIterator SelectMany(Func(selector); } - public override FdbAsyncIterator SelectMany(Func>> asyncSelector) + public override AsyncIterator SelectMany(Func>> asyncSelector) { - if (asyncSelector == null) throw new ArgumentNullException("asyncSelector"); + Contract.NotNull(asyncSelector, nameof(asyncSelector)); if (m_filter == null && m_limit == null && m_offset == null) { - return new FdbSelectManyAsyncIterator( + return new SelectManyAsyncIterator( m_source, m_transform.Then(new AsyncTransformExpression>(asyncSelector)) ); @@ -214,9 +213,9 @@ public override FdbAsyncIterator SelectMany(Func(asyncSelector); } - public override FdbAsyncIterator Take(int limit) + public override AsyncIterator Take(int limit) { - if (limit < 0) throw new ArgumentOutOfRangeException("limit", "Limit cannot be less than zero"); + if (limit < 0) throw new ArgumentOutOfRangeException(nameof(limit), "Limit cannot be less than zero"); if (m_limit != null && m_limit.Value <= limit) { @@ -224,7 +223,7 @@ public override FdbAsyncIterator Take(int limit) return this; } - return new FdbWhereSelectAsyncIterator( + return new WhereSelectAsyncIterator( m_source, m_filter, m_transform, @@ -233,15 +232,15 @@ public override FdbAsyncIterator Take(int limit) ); } - public override FdbAsyncIterator Skip(int offset) + public override AsyncIterator Skip(int offset) { - if (offset < 0) throw new ArgumentOutOfRangeException("offset", "Offset cannot be less than zero"); + if (offset < 0) throw new ArgumentOutOfRangeException(nameof(offset), "Offset cannot be less than zero"); if (offset == 0) return this; if (m_offset != null) offset += m_offset.Value; - return new FdbWhereSelectAsyncIterator( + return new WhereSelectAsyncIterator( m_source, m_filter, m_transform, @@ -250,9 +249,9 @@ public override FdbAsyncIterator Skip(int offset) ); } - public override FdbAsyncIterator Where(Func predicate) + public override AsyncIterator Where(Func predicate) { - if (predicate == null) throw new ArgumentNullException("predicate"); + Contract.NotNull(predicate, nameof(predicate)); // note: the only possible optimization here is if TSource == TResult, then we can combine both predicates // remember: limit/offset are applied AFTER the filtering, so can only combine if they are null @@ -266,9 +265,9 @@ public override FdbAsyncIterator Where(Func predicate) if (m_filter != null) filter = m_filter.AndAlso(filter); //BUGBUG: if the query already has a select, it should be evaluated BEFORE the new filter, - // but currently FdbWhereSelectAsyncIterator<> filters before transformations ! + // but currently WhereSelectAsyncIterator<> filters before transformations ! - return new FdbWhereSelectAsyncIterator( + return new WhereSelectAsyncIterator( m_source, filter, m_transform, @@ -281,9 +280,9 @@ public override FdbAsyncIterator Where(Func predicate) return base.Where(predicate); } - public override FdbAsyncIterator Where(Func> asyncPredicate) + public override AsyncIterator Where(Func> asyncPredicate) { - if (asyncPredicate == null) throw new ArgumentNullException("asyncPredicate"); + Contract.NotNull(asyncPredicate, nameof(asyncPredicate)); // note: the only possible optimization here is if TSource == TResult, then we can combine both predicates // remember: limit/offset are applied AFTER the filtering, so can only combine if they are null @@ -293,9 +292,9 @@ public override FdbAsyncIterator Where(Func filters before transformations ! + // but currently WhereSelectAsyncIterator<> filters before transformations ! - return new FdbWhereSelectAsyncIterator( + return new WhereSelectAsyncIterator( m_source, asyncFilter, m_transform, @@ -310,16 +309,16 @@ public override FdbAsyncIterator Where(Func action, CancellationToken ct) { - if (action == null) throw new ArgumentNullException("action"); + Contract.NotNull(action, nameof(action)); int? remaining = m_limit; int? skipped = m_offset; - using (var iterator = StartInner()) + using (var iterator = StartInner(ct)) { while (remaining == null || remaining.Value > 0) { - if (!await iterator.MoveNext(ct).ConfigureAwait(false)) + if (!await iterator.MoveNextAsync().ConfigureAwait(false)) { // completed break; } @@ -366,10 +365,8 @@ public override async Task ExecuteAsync(Action action, CancellationToke // Publish... - if (remaining != null) - { // decrement remaining quota - remaining = remaining.Value - 1; - } + // decrement remaining quota + remaining = remaining - 1; action(result); } @@ -379,16 +376,16 @@ public override async Task ExecuteAsync(Action action, CancellationToke public override async Task ExecuteAsync(Func asyncAction, CancellationToken ct) { - if (asyncAction == null) throw new ArgumentNullException("asyncAction"); + Contract.NotNull(asyncAction, nameof(asyncAction)); int? remaining = m_limit; int? skipped = m_offset; - using (var iterator = StartInner()) + using (var iterator = StartInner(ct)) { while (remaining == null || remaining.Value > 0) { - if (!await iterator.MoveNext(ct).ConfigureAwait(false)) + if (!await iterator.MoveNextAsync().ConfigureAwait(false)) { // completed break; } @@ -435,10 +432,9 @@ public override async Task ExecuteAsync(Func a // Publish... - if (remaining != null) - { // decrement remaining quota - remaining = remaining.Value - 1; - } + // decrement remaining quota + remaining = remaining - 1; + await asyncAction(result, ct).ConfigureAwait(false); } @@ -447,4 +443,4 @@ public override async Task ExecuteAsync(Func a } } -} \ No newline at end of file +} diff --git a/FoundationDB.Client/Linq/Iterators/FdbWindowingAsyncIterator.cs b/FoundationDB.Client/Shared/Linq/Async/Iterators/WindowingAsyncIterator.cs similarity index 86% rename from FoundationDB.Client/Linq/Iterators/FdbWindowingAsyncIterator.cs rename to FoundationDB.Client/Shared/Linq/Async/Iterators/WindowingAsyncIterator.cs index 69dd2f4ad..0c3100777 100644 --- a/FoundationDB.Client/Linq/Iterators/FdbWindowingAsyncIterator.cs +++ b/FoundationDB.Client/Shared/Linq/Async/Iterators/WindowingAsyncIterator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,18 +26,18 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq.Async.Iterators { - using FoundationDB.Async; - using FoundationDB.Client.Utils; using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Threading.Tasks; /// Merges bursts of already-completed items from a source async sequence, into a sequence of batches. /// Type the the items from the source sequence - internal class FdbWindowingAsyncIterator : FdbAsyncFilterIterator + public class WindowingAsyncIterator : AsyncFilterIterator { // Typical use cas: to merge back into arrays the result of readers that read one page at a time from the database, but return each item individually. // This iterator will attempt to reconstruct full batches from sequences of items that where all produced at the same time, so that asynchronous operations @@ -94,69 +94,69 @@ internal class FdbWindowingAsyncIterator : FdbAsyncFilterIterator m_buffer; // holds on to the last pending call to m_iterator.MoveNext() when our buffer is full - private Task m_nextTask; + private Task m_nextTask; /// Create a new batching iterator /// Source sequence of items that must be batched by waves /// Maximum size of a batch to return down the line - public FdbWindowingAsyncIterator(IFdbAsyncEnumerable source, int maxWindowSize) + public WindowingAsyncIterator(IAsyncEnumerable source, int maxWindowSize) : base(source) { Contract.Requires(maxWindowSize > 0); m_maxWindowSize = maxWindowSize; } - protected override FdbAsyncIterator Clone() + protected override AsyncIterator Clone() { - return new FdbWindowingAsyncIterator(m_source, m_maxWindowSize); + return new WindowingAsyncIterator(m_source, m_maxWindowSize); } - protected override void OnStarted(IFdbAsyncEnumerator iterator) + protected override void OnStarted(IAsyncEnumerator iterator) { // pre-allocate the inner buffer, if it is not too big m_buffer = new List(Math.Min(m_maxWindowSize, 1024)); } - protected override async Task OnNextAsync(CancellationToken ct) + protected override async Task OnNextAsync() { // read items from the source until the next call to Inner.MoveNext() is not already complete, or we have filled our buffer var iterator = m_iterator; var buffer = m_buffer; - var t = Interlocked.Exchange(ref m_nextTask, null); - if (t == null) + var ft = Interlocked.Exchange(ref m_nextTask, null); + if (ft == null) { // read the next item from the inner iterator if (m_innerHasCompleted) return Completed(); - t = iterator.MoveNext(ct); + ft = iterator.MoveNextAsync(); } // always wait for the first item (so that we have at least something in the batch) - bool hasMore = await t.ConfigureAwait(false); + bool hasMore = await ft.ConfigureAwait(false); // most db queries will read items by chunks, so there is a high chance the the next following calls to MoveNext() will already be completed // as long as this is the case, and that our buffer is not full, continue eating items. Stop only when we end up with a pending task. - while (hasMore && !ct.IsCancellationRequested) + while (hasMore && !m_ct.IsCancellationRequested) { buffer.Add(iterator.Current); - t = iterator.MoveNext(ct); - if (buffer.Count >= m_maxWindowSize || !t.IsCompleted) + var vt = iterator.MoveNextAsync(); + if (buffer.Count >= m_maxWindowSize || !vt.IsCompleted) { // save it for next time //TODO: add heuristics to check if the batch is large enough to stop there, or if we should eat the latency and wait for the next wave of items to arrive! // ex: we batch by 10, inner return 11 consecutive items. We will transform the first 10, then only fill the next batch with the 11th item because the 12th item is still not ready. - m_nextTask = t; + m_nextTask = vt; break; } // we know the task is already completed, so we will immediately get the next result, or blow up if the inner iterator failed - hasMore = t.GetAwaiter().GetResult(); + hasMore = vt.Result; //note: if inner blows up, we won't send any previously read items down the line. This may change the behavior of queries with a .Take(N) that would have stopped before reading the (N+1)th item that would have failed. } - ct.ThrowIfCancellationRequested(); + m_ct.ThrowIfCancellationRequested(); if (!hasMore) { @@ -180,8 +180,7 @@ protected override void OnStopped() m_buffer = null; // defuse the task, which should fail once we dispose the inner iterator below... - var nextTask = Interlocked.Exchange(ref m_nextTask, null); - if (nextTask != null) TaskHelpers.Observe(nextTask); + Interlocked.Exchange(ref m_nextTask, null)?.Observed(); } } diff --git a/FoundationDB.Client/Linq/FdbAsyncEnumerable.EmptySequence.cs b/FoundationDB.Client/Shared/Linq/AsyncEnumerable.EmptySequence.cs similarity index 51% rename from FoundationDB.Client/Linq/FdbAsyncEnumerable.EmptySequence.cs rename to FoundationDB.Client/Shared/Linq/AsyncEnumerable.EmptySequence.cs index be338d299..ea5629ac3 100644 --- a/FoundationDB.Client/Linq/FdbAsyncEnumerable.EmptySequence.cs +++ b/FoundationDB.Client/Shared/Linq/AsyncEnumerable.EmptySequence.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,58 +26,47 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq { - using FoundationDB.Async; - using FoundationDB.Client.Utils; using System; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Threading.Tasks; - public static partial class FdbAsyncEnumerable + public static partial class AsyncEnumerable { /// An empty sequence - private sealed class EmptySequence : IFdbAsyncEnumerable, IFdbAsyncEnumerator + private sealed class EmptySequence : IAsyncEnumerable, IAsyncEnumerator { public static readonly EmptySequence Default = new EmptySequence(); private EmptySequence() { } - Task IAsyncEnumerator.MoveNext(CancellationToken cancellationToken) + public IAsyncEnumerator GetEnumerator(CancellationToken ct, AsyncIterationHint mode) { - cancellationToken.ThrowIfCancellationRequested(); - return TaskHelpers.FalseTask; + ct.ThrowIfCancellationRequested(); + return this; } - TSource IAsyncEnumerator.Current + Task IAsyncEnumerator.MoveNextAsync() { - get { throw new InvalidOperationException("This sequence is emty"); } + return TaskHelpers.False; } - void IDisposable.Dispose() - { - // NOOP! - } + TSource IAsyncEnumerator.Current => default(TSource); - public IAsyncEnumerator GetEnumerator() - { - return this; - } + void IDisposable.Dispose() + { } - public IFdbAsyncEnumerator GetEnumerator(FdbAsyncMode mode) - { - return this; - } } - private sealed class SingletonSequence : IFdbAsyncEnumerable, IFdbAsyncEnumerator + private sealed class SingletonSequence : IAsyncEnumerable { private readonly Delegate m_lambda; - private TElement m_current; - private bool m_called; private SingletonSequence(Delegate lambda) { @@ -97,54 +86,67 @@ public SingletonSequence(Func> lambda) : this((Delegate)lambda) { } - public IFdbAsyncEnumerator GetEnumerator(FdbAsyncMode mode = FdbAsyncMode.Default) + public IAsyncEnumerator GetEnumerator(CancellationToken ct, AsyncIterationHint mode) { - return new SingletonSequence(m_lambda); + ct.ThrowIfCancellationRequested(); + return new Enumerator(m_lambda, ct); } - IAsyncEnumerator IAsyncEnumerable.GetEnumerator() + private sealed class Enumerator : IAsyncEnumerator { - return this.GetEnumerator(); - } + //REVIEW: we could have specialized version for Task returning vs non-Task returning lambdas - async Task IAsyncEnumerator.MoveNext(CancellationToken cancellationToken) - { - cancellationToken.ThrowIfCancellationRequested(); - if (m_called) return false; + private CancellationToken m_ct; + private readonly Delegate m_lambda; + private bool m_called; + private TElement m_current; - //note: avoid using local variables as much as possible! - m_called = true; - var lambda = m_lambda; - if (lambda is Func) + public Enumerator(Delegate lambda, CancellationToken ct) { - m_current = ((Func)lambda)(); - return true; + m_ct = ct; + m_lambda = lambda; } - if (lambda is Func>) + public async Task MoveNextAsync() { - m_current = await ((Func>)lambda)().ConfigureAwait(false); - return true; + m_ct.ThrowIfCancellationRequested(); + if (m_called) + { + m_current = default(TElement); + return false; + } + + //note: avoid using local variables as much as possible! + m_called = true; + var lambda = m_lambda; + if (lambda is Func f) + { + m_current = f(); + return true; + } + + if (lambda is Func> ft) + { + m_current = await ft().ConfigureAwait(false); + return true; + } + + if (lambda is Func> fct) + { + m_current = await fct(m_ct).ConfigureAwait(false); + return true; + } + + throw new InvalidOperationException("Unsupported delegate type"); } - if (lambda is Func>) + public TElement Current => m_current; + + public void Dispose() { - m_current = await ((Func>)lambda)(cancellationToken).ConfigureAwait(false); - return true; + m_called = true; + m_current = default(TElement); } - - throw new InvalidOperationException("Unsupported delegate type"); - } - - TElement IAsyncEnumerator.Current - { - get { return m_current; } - } - - void IDisposable.Dispose() - { - m_called = true; - m_current = default(TElement); } } } diff --git a/FoundationDB.Client/Linq/FdbAsyncEnumerable.EnumerableIterator.cs b/FoundationDB.Client/Shared/Linq/AsyncEnumerable.EnumerableIterator.cs similarity index 86% rename from FoundationDB.Client/Linq/FdbAsyncEnumerable.EnumerableIterator.cs rename to FoundationDB.Client/Shared/Linq/AsyncEnumerable.EnumerableIterator.cs index 2085bbb61..85eacc712 100644 --- a/FoundationDB.Client/Linq/FdbAsyncEnumerable.EnumerableIterator.cs +++ b/FoundationDB.Client/Shared/Linq/AsyncEnumerable.EnumerableIterator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,37 +26,40 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq { - using FoundationDB.Client.Utils; using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; + using Doxense.Async; + using Doxense.Diagnostics.Contracts; - public static partial class FdbAsyncEnumerable + public static partial class AsyncEnumerable { /// Iterates over a sequence of items /// Type of elements of the inner sequence /// Type of elements of the outer async sequence - internal sealed class EnumerableIterator : IFdbAsyncEnumerator + internal sealed class EnumerableIterator : IAsyncEnumerator { private IEnumerator m_iterator; private Func> m_transform; private bool m_disposed; private TResult m_current; + private CancellationToken m_ct; - public EnumerableIterator(IEnumerator iterator, Func> transform) + public EnumerableIterator(IEnumerator iterator, Func> transform, CancellationToken ct) { Contract.Requires(iterator != null && transform != null); m_iterator = iterator; m_transform = transform; + m_ct = ct; } - public async Task MoveNext(CancellationToken cancellationToken) + public async Task MoveNextAsync() { if (m_disposed) { @@ -64,7 +67,7 @@ public async Task MoveNext(CancellationToken cancellationToken) return false; } - cancellationToken.ThrowIfCancellationRequested(); + m_ct.ThrowIfCancellationRequested(); if (m_iterator.MoveNext()) { @@ -88,14 +91,12 @@ public TResult Current public void Dispose() { - if (m_iterator != null) - { - m_iterator.Dispose(); - } + m_iterator?.Dispose(); m_iterator = null; m_transform = null; m_disposed = true; m_current = default(TResult); + m_ct = default(CancellationToken); } } diff --git a/FoundationDB.Client/Linq/FdbAsyncEnumerable.EnumerableSequence.cs b/FoundationDB.Client/Shared/Linq/AsyncEnumerable.EnumerableSequence.cs similarity index 80% rename from FoundationDB.Client/Linq/FdbAsyncEnumerable.EnumerableSequence.cs rename to FoundationDB.Client/Shared/Linq/AsyncEnumerable.EnumerableSequence.cs index d55aa3c52..224873260 100644 --- a/FoundationDB.Client/Linq/FdbAsyncEnumerable.EnumerableSequence.cs +++ b/FoundationDB.Client/Shared/Linq/AsyncEnumerable.EnumerableSequence.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,36 +26,32 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq { - using FoundationDB.Async; - using FoundationDB.Client.Utils; using System; using System.Collections.Generic; + using System.Threading; + using Doxense.Diagnostics.Contracts; + using Doxense.Async; - public static partial class FdbAsyncEnumerable + public static partial class AsyncEnumerable { /// Wraps a sequence of items into an async sequence of items /// Type of elements of the inner sequence /// Type of elements of the outer async sequence - internal sealed class EnumerableSequence : IFdbAsyncEnumerable + internal sealed class EnumerableSequence : IAsyncEnumerable { public readonly IEnumerable Source; - public readonly Func, IFdbAsyncEnumerator> Factory; + public readonly Func, CancellationToken, IAsyncEnumerator> Factory; - public EnumerableSequence(IEnumerable source, Func, IFdbAsyncEnumerator> factory) + public EnumerableSequence(IEnumerable source, Func, CancellationToken, IAsyncEnumerator> factory) { this.Source = source; this.Factory = factory; } - public IAsyncEnumerator GetEnumerator() - { - return this.GetEnumerator(FdbAsyncMode.Default); - } - - public IFdbAsyncEnumerator GetEnumerator(FdbAsyncMode _) + public IAsyncEnumerator GetEnumerator(CancellationToken ct, AsyncIterationHint _) { IEnumerator inner = null; try @@ -63,7 +59,7 @@ public IFdbAsyncEnumerator GetEnumerator(FdbAsyncMode _) inner = this.Source.GetEnumerator(); Contract.Assert(inner != null, "The underlying sequence returned an empty enumerator"); - var outer = this.Factory(inner); + var outer = this.Factory(inner, ct); if (outer == null) throw new InvalidOperationException("The async factory returned en empty enumerator"); return outer; @@ -71,7 +67,7 @@ public IFdbAsyncEnumerator GetEnumerator(FdbAsyncMode _) catch (Exception) { //make sure that the inner iterator gets disposed if something went wrong - if (inner != null) inner.Dispose(); + inner?.Dispose(); throw; } } diff --git a/FoundationDB.Client/Linq/FdbAsyncEnumerable.Iterators.cs b/FoundationDB.Client/Shared/Linq/AsyncEnumerable.Iterators.cs similarity index 51% rename from FoundationDB.Client/Linq/FdbAsyncEnumerable.Iterators.cs rename to FoundationDB.Client/Shared/Linq/AsyncEnumerable.Iterators.cs index ffe927d0c..34594ec52 100644 --- a/FoundationDB.Client/Linq/FdbAsyncEnumerable.Iterators.cs +++ b/FoundationDB.Client/Shared/Linq/AsyncEnumerable.Iterators.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,19 +26,21 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion - -namespace FoundationDB.Linq +namespace Doxense.Linq { - using FoundationDB.Async; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.Diagnostics; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Linq.Async; + using Doxense.Linq.Async.Expressions; + using Doxense.Linq.Async.Iterators; + using Doxense.Threading.Tasks; + using JetBrains.Annotations; - public static partial class FdbAsyncEnumerable + public static partial class AsyncEnumerable { #region Create... @@ -49,12 +51,12 @@ public static partial class FdbAsyncEnumerable /// Source async sequence that will be wrapped /// Factory method called when the outer sequence starts iterating. Must return an async enumerator /// New async sequence - internal static FdbAsyncSequence Create( - IFdbAsyncEnumerable source, - Func, - IFdbAsyncEnumerator> factory) + internal static AsyncSequence Create( + IAsyncEnumerable source, + Func, + IAsyncEnumerator> factory) { - return new FdbAsyncSequence(source, factory); + return new AsyncSequence(source, factory); } /// Create a new async sequence that will transform an inner sequence @@ -65,41 +67,36 @@ internal static FdbAsyncSequence Create( /// New async sequence internal static EnumerableSequence Create( IEnumerable source, - Func, - IFdbAsyncEnumerator> factory) + Func, CancellationToken, IAsyncEnumerator> factory) { return new EnumerableSequence(source, factory); } /// Create a new async sequence from a factory method - public static IFdbAsyncEnumerable Create( - Func> factory, + public static IAsyncEnumerable Create( + Func> factory, object state = null) { return new AnonymousIterable(factory, state); } - internal sealed class AnonymousIterable : IFdbAsyncEnumerable + internal sealed class AnonymousIterable : IAsyncEnumerable { - private readonly Func> m_factory; + private readonly Func> m_factory; private readonly object m_state; - public AnonymousIterable(Func> factory, object state) + public AnonymousIterable(Func> factory, object state) { Contract.Requires(factory != null); m_factory = factory; m_state = state; } - public IAsyncEnumerator GetEnumerator() + public IAsyncEnumerator GetEnumerator(CancellationToken ct, AsyncIterationHint _) { - return this.GetEnumerator(FdbAsyncMode.Default); - } - - public IFdbAsyncEnumerator GetEnumerator(FdbAsyncMode _) - { - return m_factory(m_state); + ct.ThrowIfCancellationRequested(); + return m_factory(m_state, ct); } } @@ -108,20 +105,20 @@ public IFdbAsyncEnumerator GetEnumerator(FdbAsyncMode _) #region Helpers... [NotNull] - internal static FdbSelectManyAsyncIterator Flatten( - [NotNull] IFdbAsyncEnumerable source, + internal static SelectManyAsyncIterator Flatten( + [NotNull] IAsyncEnumerable source, [NotNull] AsyncTransformExpression> selector) { - return new FdbSelectManyAsyncIterator(source, selector); + return new SelectManyAsyncIterator(source, selector); } [NotNull] - internal static FdbSelectManyAsyncIterator Flatten( - [NotNull] IFdbAsyncEnumerable source, + internal static SelectManyAsyncIterator Flatten( + [NotNull] IAsyncEnumerable source, [NotNull] AsyncTransformExpression> collectionSelector, [NotNull] Func resultSelector) { - return new FdbSelectManyAsyncIterator( + return new SelectManyAsyncIterator( source, collectionSelector, resultSelector @@ -129,212 +126,51 @@ internal static FdbSelectManyAsyncIterator Flatte } [NotNull] - internal static FdbWhereSelectAsyncIterator Map( - [NotNull] IFdbAsyncEnumerable source, + internal static WhereSelectAsyncIterator Map( + [NotNull] IAsyncEnumerable source, [NotNull] AsyncTransformExpression selector, int? limit = null, int? offset = null) { - return new FdbWhereSelectAsyncIterator(source, filter: null, transform: selector, limit: limit, offset: offset); + return new WhereSelectAsyncIterator(source, filter: null, transform: selector, limit: limit, offset: offset); } [NotNull] - internal static FdbWhereAsyncIterator Filter( - [NotNull] IFdbAsyncEnumerable source, + internal static WhereAsyncIterator Filter( + [NotNull] IAsyncEnumerable source, [NotNull] AsyncFilterExpression filter) { - return new FdbWhereAsyncIterator(source, filter); + return new WhereAsyncIterator(source, filter); } [NotNull] - internal static FdbWhereSelectAsyncIterator Offset( - [NotNull] IFdbAsyncEnumerable source, + internal static WhereSelectAsyncIterator Offset( + [NotNull] IAsyncEnumerable source, int offset) { - return new FdbWhereSelectAsyncIterator(source, filter: null, transform: new AsyncTransformExpression(TaskHelpers.Cache.Identity), limit: null, offset: offset); + return new WhereSelectAsyncIterator(source, filter: null, transform: new AsyncTransformExpression(TaskHelpers.CachedTasks.Identity), limit: null, offset: offset); } [NotNull] - internal static FdbWhereSelectAsyncIterator Limit( - [NotNull] IFdbAsyncEnumerable source, + internal static WhereSelectAsyncIterator Limit( + [NotNull] IAsyncEnumerable source, int limit) { - return new FdbWhereSelectAsyncIterator(source, filter: null, transform: new AsyncTransformExpression(TaskHelpers.Cache.Identity), limit: limit, offset: null); + return new WhereSelectAsyncIterator(source, filter: null, transform: new AsyncTransformExpression(TaskHelpers.CachedTasks.Identity), limit: limit, offset: null); } [NotNull] - internal static FdbTakeWhileAsyncIterator Limit( - [NotNull] IFdbAsyncEnumerable source, + internal static TakeWhileAsyncIterator Limit( + [NotNull] IAsyncEnumerable source, [NotNull] Func condition) { - return new FdbTakeWhileAsyncIterator(source, condition); + return new TakeWhileAsyncIterator(source, condition); } #endregion #region Run... - /// Small buffer that keeps a list of chunks that are larger and larger - /// Type of elements stored in the buffer - [DebuggerDisplay("Count={Count}, Chunks={this.Chunks.Length}, Current={Index}/{Current.Length}")] - internal class Buffer - { - // We want to avoid growing the same array again and again ! - // Instead, we grow list of chunks, that grow in size (until a max), and concatenate all the chunks together at the end, once we know the final size - - /// Default intial capacity, if not specified - const int DefaultCapacity = 16; - //REVIEW: should we use a power of 2 or of 10 for initial capacity? - // Since humans prefer the decimal system, it is more likely that query limit count be set to something like 10, 50, 100 or 1000 - // but most "human friendly" limits are close to the next power of 2, like 10 ~= 16, 50 ~= 64, 100 ~= 128, 500 ~= 512, 1000 ~= 1024, so we don't waste that much space... - - /// Maximum size of a chunk - const int MaxChunkSize = 4096; - - /// Number of items in the buffer - public int Count; - /// Index in the current chunk - public int Index; - /// List of chunks - public T[][] Chunks; - /// Current (and last) chunk - public T[] Current; - - public Buffer(int capacity = 0) - { - if (capacity <= 0) capacity = DefaultCapacity; - - this.Count = 0; - this.Index = 0; - this.Chunks = new T[1][]; - this.Current = new T[capacity]; - this.Chunks[0] = this.Current; - } - - public void Add(T item) - { - if (this.Index == this.Current.Length) - { - Grow(); - } - - checked { ++this.Count; } - this.Current[this.Index++] = item; - } - - private void Grow() - { - // Growth rate: - // - newly created chunk is always half the total size - // - except the first chunk who is set to the inital capacity - - Array.Resize(ref this.Chunks, this.Chunks.Length + 1); - this.Current = new T[Math.Min(this.Count, MaxChunkSize)]; - this.Chunks[this.Chunks.Length - 1] = this.Current; - this.Index = 0; - } - - [NotNull] - private T[] MergeChunks() - { - var tmp = new T[this.Count]; - int count = this.Count; - int index = 0; - for (int i = 0; i < this.Chunks.Length - 1; i++) - { - var chunk = this.Chunks[i]; - Array.Copy(chunk, 0, tmp, index, chunk.Length); - index += chunk.Length; - count -= chunk.Length; - } - Array.Copy(this.Current, 0, tmp, index, count); - return tmp; - } - - /// Return a buffer containing all of the items - /// Buffer that contains all the items, and may be larger than required - /// This is equivalent to calling ToArray(), except that if the buffer is empty, or if it consists of a single page, then no new allocations will be performed. - [NotNull] - public T[] GetBuffer() - { - //note: this is called by internal operator like OrderBy - // In this case we want to reduce the copying as much as possible, - // and we can suppose that the buffer won't be exposed to the application - - if (this.Count == 0) - { // empty - return new T[0]; - } - else if (this.Chunks.Length == 1) - { // everything fits in a single chunk - return this.Current; - } - else - { // we need to stitch all the buffers together - return MergeChunks(); - } - } - - /// Return the content of the buffer - /// Array of size containing all the items in this buffer - [NotNull] - public T[] ToArray() - { - if (this.Count == 0) - { // empty sequence - return new T[0]; - } - else if (this.Chunks.Length == 1 && this.Current.Length == this.Count) - { // a single buffer page was used - return this.Current; - } - else - { // concatenate all the buffer pages into one big array - return MergeChunks(); - } - } - - /// Return the content of the buffer - /// List of size containing all the items in this buffer - [NotNull] - public List ToList() - { - int count = this.Count; - if (count == 0) - { // empty sequence - return new List(); - } - - var list = new List(count); - if (count > 0) - { - var chunks = this.Chunks; - for (int i = 0; i < chunks.Length - 1; i++) - { - list.AddRange(chunks[i]); - count -= chunks[i].Length; - } - - var current = this.Current; - if (count == current.Length) - { // the last chunk fits perfectly - list.AddRange(current); - } - else - { // there is no List.AddRange(buffer, offset, count), and copying in a tmp buffer would waste the memory we tried to save with the buffer - // also, for most of the small queries, like FirstOrDefault()/SingleOrDefault(), count will be 1 (or very small) so calling Add(T) will still be optimum - for (int i = 0; i < count; i++) - { - list.Add(current[i]); - } - } - } - - return list; - } - - } - /// Immediately execute an action on each element of an async sequence /// Type of elements of the async sequence /// Source async sequence @@ -343,24 +179,24 @@ public List ToList() /// Cancellation token that can be used to cancel the operation /// Number of items that have been processed internal static async Task Run( - [NotNull] IFdbAsyncEnumerable source, - FdbAsyncMode mode, + [NotNull] IAsyncEnumerable source, + AsyncIterationHint mode, [NotNull, InstantHandle] Action action, CancellationToken ct) { - if (source == null) throw new ArgumentNullException("source"); - if (action == null) throw new ArgumentNullException("action"); + Contract.NotNull(source, nameof(source)); + Contract.NotNull(action, nameof(action)); ct.ThrowIfCancellationRequested(); //note: we should not use "ConfigureAwait(false)" here because we would like to execute the action in the original synchronization context if possible... long count = 0; - using (var iterator = source.GetEnumerator(mode)) + using (var iterator = source.GetEnumerator(ct, mode)) { Contract.Assert(iterator != null, "The underlying sequence returned a null async iterator"); - while (await iterator.MoveNext(ct)) + while (await iterator.MoveNextAsync()) { action(iterator.Current); ++count; @@ -377,24 +213,24 @@ internal static async Task Run( /// Cancellation token that can be used to cancel the operation /// Number of items that have been processed successfully internal static async Task Run( - [NotNull] IFdbAsyncEnumerable source, - FdbAsyncMode mode, + [NotNull] IAsyncEnumerable source, + AsyncIterationHint mode, [NotNull] Func action, CancellationToken ct) { - if (source == null) throw new ArgumentNullException("source"); - if (action == null) throw new ArgumentNullException("action"); + Contract.NotNull(source, nameof(source)); + Contract.NotNull(action, nameof(action)); ct.ThrowIfCancellationRequested(); //note: we should not use "ConfigureAwait(false)" here because we would like to execute the action in the original synchronization context if possible... long count = 0; - using (var iterator = source.GetEnumerator(mode)) + using (var iterator = source.GetEnumerator(ct, mode)) { Contract.Assert(iterator != null, "The underlying sequence returned a null async iterator"); - while (await iterator.MoveNext(ct)) + while (await iterator.MoveNextAsync()) { if (!action(iterator.Current)) { @@ -414,8 +250,8 @@ internal static async Task Run( /// Cancellation token that can be used to cancel the operation /// Number of items that have been processed internal static async Task Run( - [NotNull] IFdbAsyncEnumerable source, - FdbAsyncMode mode, + [NotNull] IAsyncEnumerable source, + AsyncIterationHint mode, [NotNull] Func action, CancellationToken ct) { @@ -424,11 +260,11 @@ internal static async Task Run( //note: we should not use "ConfigureAwait(false)" here because we would like to execute the action in the original synchronization context if possible... long count = 0; - using (var iterator = source.GetEnumerator(mode)) + using (var iterator = source.GetEnumerator(ct, mode)) { Contract.Assert(iterator != null, "The underlying sequence returned a null async iterator"); - while (await iterator.MoveNext(ct)) + while (await iterator.MoveNextAsync()) { await action(iterator.Current, ct); ++count; @@ -445,8 +281,8 @@ internal static async Task Run( /// Cancellation token that can be used to cancel the operation /// Number of items that have been processed internal static async Task Run( - [NotNull] IFdbAsyncEnumerable source, - FdbAsyncMode mode, + [NotNull] IAsyncEnumerable source, + AsyncIterationHint mode, [NotNull] Func action, CancellationToken ct) { @@ -455,11 +291,11 @@ internal static async Task Run( //note: we should not use "ConfigureAwait(false)" here because we would like to execute the action in the original synchronization context if possible... long count = 0; - using (var iterator = source.GetEnumerator(mode)) + using (var iterator = source.GetEnumerator(ct, mode)) { Contract.Assert(iterator != null, "The underlying sequence returned a null async iterator"); - while (await iterator.MoveNext(ct)) + while (await iterator.MoveNextAsync()) { ct.ThrowIfCancellationRequested(); await action(iterator.Current); @@ -476,8 +312,8 @@ internal static async Task Run( /// When the sequence is empty: If true then returns the default value for the type. Otherwise, throws an exception /// Cancellation token that can be used to cancel the operation /// Value of the first element of the sequence, or the default value, or an exception (depending on and - internal static async Task Head( - [NotNull] IFdbAsyncEnumerable source, + public static async Task Head( + [NotNull] IAsyncEnumerable source, bool single, bool orDefault, CancellationToken ct) @@ -486,16 +322,16 @@ internal static async Task Head( //note: we should not use "ConfigureAwait(false)" here because we would like to execute the action in the original synchronization context if possible... - using (var iterator = source.GetEnumerator(FdbAsyncMode.Head)) + using (var iterator = source.GetEnumerator(ct, AsyncIterationHint.Head)) { Contract.Assert(iterator != null, "The underlying sequence returned a null async iterator"); - if (await iterator.MoveNext(ct)) + if (await iterator.MoveNextAsync()) { TSource first = iterator.Current; if (single) { - if (await iterator.MoveNext(ct)) throw new InvalidOperationException("The sequence contained more than one element"); + if (await iterator.MoveNextAsync()) throw new InvalidOperationException("The sequence contained more than one element"); } return first; } @@ -507,4 +343,208 @@ internal static async Task Head( #endregion } + + /// Small buffer that keeps a list of chunks that are larger and larger + /// Type of elements stored in the buffer + [DebuggerDisplay("Count={Count}, Chunks={this.Chunks.Length}, Current={Index}/{Current.Length}")] + public sealed class Buffer + { + // We want to avoid growing the same array again and again ! + // Instead, we grow list of chunks, that grow in size (until a max), and concatenate all the chunks together at the end, once we know the final size + + /// Default intial capacity, if not specified + const int DefaultCapacity = 16; + //REVIEW: should we use a power of 2 or of 10 for initial capacity? + // Since humans prefer the decimal system, it is more likely that query limit count be set to something like 10, 50, 100 or 1000 + // but most "human friendly" limits are close to the next power of 2, like 10 ~= 16, 50 ~= 64, 100 ~= 128, 500 ~= 512, 1000 ~= 1024, so we don't waste that much space... + + /// Maximum size of a chunk + const int MaxChunkSize = 4096; + + /// Number of items in the buffer + public int Count; + /// Index in the current chunk + public int Index; + /// List of chunks + public T[][] Chunks; + /// Current (and last) chunk + public T[] Current; + + public Buffer(int capacity = 0) + { + if (capacity <= 0) capacity = DefaultCapacity; + + this.Count = 0; + this.Index = 0; + this.Chunks = new T[1][]; + this.Current = new T[capacity]; + this.Chunks[0] = this.Current; + } + + public void Add(T item) + { + if (this.Index == this.Current.Length) + { + Grow(); + } + + checked { ++this.Count; } + this.Current[this.Index++] = item; + } + + private void Grow() + { + // Growth rate: + // - newly created chunk is always half the total size + // - except the first chunk who is set to the inital capacity + + Array.Resize(ref this.Chunks, this.Chunks.Length + 1); + this.Current = new T[Math.Min(this.Count, MaxChunkSize)]; + this.Chunks[this.Chunks.Length - 1] = this.Current; + this.Index = 0; + } + + [NotNull] + private T[] MergeChunks() + { + var tmp = new T[this.Count]; + int count = this.Count; + int index = 0; + for (int i = 0; i < this.Chunks.Length - 1; i++) + { + var chunk = this.Chunks[i]; + Array.Copy(chunk, 0, tmp, index, chunk.Length); + index += chunk.Length; + count -= chunk.Length; + } + Array.Copy(this.Current, 0, tmp, index, count); + return tmp; + } + + /// Return a buffer containing all of the items + /// Buffer that contains all the items, and may be larger than required + /// This is equivalent to calling ToArray(), except that if the buffer is empty, or if it consists of a single page, then no new allocations will be performed. + [NotNull] + public T[] GetBuffer() + { + //note: this is called by internal operator like OrderBy + // In this case we want to reduce the copying as much as possible, + // and we can suppose that the buffer won't be exposed to the application + + if (this.Count == 0) + { // empty + return new T[0]; + } + else if (this.Chunks.Length == 1) + { // everything fits in a single chunk + return this.Current; + } + else + { // we need to stitch all the buffers together + return MergeChunks(); + } + } + + /// Return the content of the buffer + /// Array of size containing all the items in this buffer + [NotNull] + public T[] ToArray() + { + if (this.Count == 0) + { // empty sequence + return new T[0]; + } + else if (this.Chunks.Length == 1 && this.Current.Length == this.Count) + { // a single buffer page was used + return this.Current; + } + else + { // concatenate all the buffer pages into one big array + return MergeChunks(); + } + } + + /// Return the content of the buffer + /// List of size containing all the items in this buffer + [NotNull] + public List ToList() + { + int count = this.Count; + if (count == 0) + { // empty sequence + return new List(); + } + + var list = new List(count); + if (count > 0) + { + var chunks = this.Chunks; + for (int i = 0; i < chunks.Length - 1; i++) + { + list.AddRange(chunks[i]); + count -= chunks[i].Length; + } + + var current = this.Current; + if (count == current.Length) + { // the last chunk fits perfectly + list.AddRange(current); + } + else + { // there is no List.AddRange(buffer, offset, count), and copying in a tmp buffer would waste the memory we tried to save with the buffer + // also, for most of the small queries, like FirstOrDefault()/SingleOrDefault(), count will be 1 (or very small) so calling Add(T) will still be optimum + for (int i = 0; i < count; i++) + { + list.Add(current[i]); + } + } + } + + return list; + } + + /// Return the content of the buffer + /// List of size containing all the items in this buffer + [NotNull] + public HashSet ToHashSet(IEqualityComparer comparer = null) + { + int count = this.Count; + var hashset = new HashSet(comparer); + if (count == 0) + { + return hashset; + } + + var chunks = this.Chunks; + + for (int i = 0; i < chunks.Length - 1; i++) + { + foreach (var item in chunks[i]) + { + hashset.Add(item); + } + count -= chunks[i].Length; + } + + var current = this.Current; + if (count == current.Length) + { // the last chunk fits perfectly + foreach (var item in current) + { + hashset.Add(item); + } + } + else + { // there is no List.AddRange(buffer, offset, count), and copying in a tmp buffer would waste the memory we tried to save with the buffer + // also, for most of the small queries, like FirstOrDefault()/SingleOrDefault(), count will be 1 (or very small) so calling Add(T) will still be optimum + for (int i = 0; i < count; i++) + { + hashset.Add(current[i]); + } + } + return hashset; + } + + } + } diff --git a/FoundationDB.Client/Linq/FdbAsyncEnumerable.OrderedSequence.cs b/FoundationDB.Client/Shared/Linq/AsyncEnumerable.OrderedSequence.cs similarity index 73% rename from FoundationDB.Client/Linq/FdbAsyncEnumerable.OrderedSequence.cs rename to FoundationDB.Client/Shared/Linq/AsyncEnumerable.OrderedSequence.cs index 21b0216a7..5031beeec 100644 --- a/FoundationDB.Client/Linq/FdbAsyncEnumerable.OrderedSequence.cs +++ b/FoundationDB.Client/Shared/Linq/AsyncEnumerable.OrderedSequence.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,32 +26,33 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq { - using FoundationDB.Async; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Linq.Async.Iterators; + using Doxense.Threading.Tasks; + using JetBrains.Annotations; - public static partial class FdbAsyncEnumerable + public static partial class AsyncEnumerable { /// Represent an async sequence that returns its elements according to a specific sort order /// Type of the elements of the sequence - internal class OrderedSequence : IFdbAsyncOrderedEnumerable + internal class OrderedSequence : IAsyncOrderedEnumerable { // If an instance of the base class is constructed, it will sort by the items themselves (using a Comparer) // If an instance of the derived class is constructed, then it will sort the a key extracted the each item (sing a Comparer) - protected readonly IFdbAsyncEnumerable m_source; + protected readonly IAsyncEnumerable m_source; private readonly IComparer m_comparer; // null if comparing using keys protected readonly bool m_descending; protected readonly OrderedSequence m_parent;// null if primary sort key - public OrderedSequence(IFdbAsyncEnumerable source, IComparer comparer, bool descending, OrderedSequence parent) + public OrderedSequence(IAsyncEnumerable source, IComparer comparer, bool descending, OrderedSequence parent) { Contract.Requires(source != null); @@ -61,7 +62,7 @@ public OrderedSequence(IFdbAsyncEnumerable source, IComparer c m_parent = parent; } - protected OrderedSequence(IFdbAsyncEnumerable source, bool descending, OrderedSequence parent) + protected OrderedSequence(IAsyncEnumerable source, bool descending, OrderedSequence parent) { Contract.Requires(source != null); @@ -78,31 +79,26 @@ internal virtual SequenceSorter GetEnumerableSorter(SequenceSorter GetEnumerator(FdbAsyncMode mode = FdbAsyncMode.Default) + public IAsyncEnumerator GetEnumerator(CancellationToken ct, AsyncIterationHint mode) { + ct.ThrowIfCancellationRequested(); var sorter = GetEnumerableSorter(null); - var enumerator = default(IFdbAsyncEnumerator); + var enumerator = default(IAsyncEnumerator); try { - enumerator = m_source.GetEnumerator(mode); - return new OrderedEnumerator(enumerator, sorter); + enumerator = m_source.GetEnumerator(ct, mode); + return new OrderedEnumerator(enumerator, sorter, ct); } catch (Exception) { - if (enumerator != null) enumerator.Dispose(); + enumerator?.Dispose(); throw; } } - IAsyncEnumerator IAsyncEnumerable.GetEnumerator() - { - return GetEnumerator(FdbAsyncMode.All); - } - - [NotNull] - public IFdbAsyncOrderedEnumerable CreateOrderedEnumerable([NotNull] Func keySelector, IComparer comparer, bool descending) + public IAsyncOrderedEnumerable CreateOrderedEnumerable(Func keySelector, IComparer comparer, bool descending) { - if (keySelector == null) throw new ArgumentNullException("keySelector"); + Contract.NotNull(keySelector, nameof(keySelector)); return new OrderedSequence(this, keySelector, comparer, descending, this); } @@ -117,7 +113,7 @@ internal sealed class OrderedSequence : OrderedSequence private readonly Func m_keySelector; private readonly IComparer m_keyComparer; - public OrderedSequence(IFdbAsyncEnumerable source, Func keySelector, IComparer comparer, bool descending, OrderedSequence parent) + public OrderedSequence(IAsyncEnumerable source, Func keySelector, IComparer comparer, bool descending, OrderedSequence parent) : base(source, descending, parent) { Contract.Requires(keySelector != null); @@ -135,43 +131,44 @@ internal override SequenceSorter GetEnumerableSorter(SequenceSorterIterator that will sort all the items produced by an inner iterator, before outputting the results all at once - internal sealed class OrderedEnumerator : IFdbAsyncEnumerator + internal sealed class OrderedEnumerator : IAsyncEnumerator { // This iterator must first before EVERY items of the source in memory, before being able to sort them. // The first MoveNext() will return only once the inner sequence has finished (succesfully), which can take some time! // Ordering is done in-memory using QuickSort - private readonly IFdbAsyncEnumerator m_inner; + private readonly IAsyncEnumerator m_inner; private readonly SequenceSorter m_sorter; private TSource[] m_items; private int[] m_map; private int m_offset; private TSource m_current; + private readonly CancellationToken m_ct; - public OrderedEnumerator(IFdbAsyncEnumerator enumerator, SequenceSorter sorter) + public OrderedEnumerator(IAsyncEnumerator enumerator, SequenceSorter sorter, CancellationToken ct) { Contract.Requires(enumerator != null && sorter != null); m_inner = enumerator; m_sorter = sorter; + m_ct = ct; } - private async Task ReadAllThenSort(CancellationToken ct) + private async Task ReadAllThenSort() { if (m_offset == -1) return false; // already EOF or Disposed // first we need to spool everything from the inner iterator into memory - var buffer = new FdbAsyncEnumerable.Buffer(); + var buffer = new Buffer(); var inner = m_inner; - var iterator = inner as FdbAsyncIterator; - if (iterator != null) + if (inner is AsyncIterator iterator) { - await iterator.ExecuteAsync((x) => buffer.Add(x), ct).ConfigureAwait(false); + await iterator.ExecuteAsync((x) => buffer.Add(x), m_ct).ConfigureAwait(false); } else { - while (await inner.MoveNext(ct).ConfigureAwait(false)) + while (await inner.MoveNextAsync().ConfigureAwait(false)) { buffer.Add(inner.Current); } @@ -193,25 +190,25 @@ private async Task ReadAllThenSort(CancellationToken ct) return true; } - public Task MoveNext(CancellationToken cancellationToken) + public Task MoveNextAsync() { // Firt call will be slow (and async), but the rest of the calls will use the results already sorted in memory, and should be as fast as possible! if (m_map == null) { - return ReadAllThenSort(cancellationToken); + return ReadAllThenSort(); } int pos = checked(m_offset + 1); if (pos < m_map.Length) { Publish(pos); - return TaskHelpers.TrueTask; + return TaskHelpers.True; } else { Completed(); - return TaskHelpers.FalseTask; + return TaskHelpers.False; } } @@ -231,10 +228,7 @@ private void Completed() } - public TSource Current - { - get { return m_current; } - } + public TSource Current => m_current; public void Dispose() { diff --git a/FoundationDB.Client/Linq/FdbAsyncEnumerable.Sorters.cs b/FoundationDB.Client/Shared/Linq/AsyncEnumerable.Sorters.cs similarity index 96% rename from FoundationDB.Client/Linq/FdbAsyncEnumerable.Sorters.cs rename to FoundationDB.Client/Shared/Linq/AsyncEnumerable.Sorters.cs index 9a0532dda..58d79beb9 100644 --- a/FoundationDB.Client/Linq/FdbAsyncEnumerable.Sorters.cs +++ b/FoundationDB.Client/Shared/Linq/AsyncEnumerable.Sorters.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,14 +26,14 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq { - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Collections.Generic; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; - public static partial class FdbAsyncEnumerable + public static partial class AsyncEnumerable { // These classes contain the logic to sort items (by themselves or by keys) // They are single-use and constructed at runtime, when an ordered sequence starts enumerating. @@ -171,10 +171,7 @@ internal override void ComputeKeys(TSource[] items, int count) keys[i] = selector(items[i]); } m_keys = keys; - if (m_next != null) - { - m_next.ComputeKeys(items, count); - } + m_next?.ComputeKeys(items, count); } internal override int CompareKeys(int index1, int index2) diff --git a/FoundationDB.Client/Shared/Linq/AsyncEnumerable.cs b/FoundationDB.Client/Shared/Linq/AsyncEnumerable.cs new file mode 100644 index 000000000..a209cb7b5 --- /dev/null +++ b/FoundationDB.Client/Shared/Linq/AsyncEnumerable.cs @@ -0,0 +1,1442 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Linq +{ + using System; + using System.Collections.Generic; + using System.Threading; + using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Linq.Async.Expressions; + using Doxense.Linq.Async.Iterators; + using Doxense.Threading.Tasks; + using JetBrains.Annotations; + + /// Provides a set of static methods for querying objects that implement . + public static partial class AsyncEnumerable + { + // Welcome to the wonderful world of the Monads! + + #region Entering the Monad... + + /// Returns an empty async sequence + [Pure, NotNull] + public static IAsyncEnumerable Empty() + { + return EmptySequence.Default; + } + + /// Returns an async sequence with a single element, which is a constant + [Pure, NotNull] + public static IAsyncEnumerable Singleton(T value) + { + //note: we can't call this method Single(T), because then Single(Func) would be ambigous with Single>(T) + return new SingletonSequence(() => value); + } + + /// Returns an async sequence which will produce a single element, using the specified lambda + /// Lambda that will be called once per iteration, to produce the single element of this sequene + /// If the sequence is iterated multiple times, then will be called once for each iteration. + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable Single([NotNull] Func lambda) + { + Contract.NotNull(lambda, nameof(lambda)); + return new SingletonSequence(lambda); + } + + /// Returns an async sequence which will produce a single element, using the specified lambda + /// Lambda that will be called once per iteration, to produce the single element of this sequene + /// If the sequence is iterated multiple times, then will be called once for each iteration. + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable Single([NotNull] Func> asyncLambda) + { + Contract.NotNull(asyncLambda, nameof(asyncLambda)); + return new SingletonSequence(asyncLambda); + } + + /// Returns an async sequence which will produce a single element, using the specified lambda + /// Lambda that will be called once per iteration, to produce the single element of this sequene + /// If the sequence is iterated multiple times, then will be called once for each iteration. + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable Single([NotNull] Func> asyncLambda) + { + Contract.NotNull(asyncLambda, nameof(asyncLambda)); + return new SingletonSequence(asyncLambda); + } + + /// Apply an async lambda to a sequence of elements to transform it into an async sequence + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable ToAsyncEnumerable([NotNull] this IEnumerable source, [NotNull] Func> lambda) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(lambda, nameof(lambda)); + + return Create(source, (iterator, ct) => new EnumerableIterator(iterator, lambda, ct)); + } + + /// Apply an async lambda to a sequence of elements to transform it into an async sequence + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable ToAsyncEnumerable([NotNull] this IEnumerable source) + { + Contract.NotNull(source, nameof(source)); + + return Create(source, (iterator, ct) => new EnumerableIterator(iterator, x => Task.FromResult(x), ct)); + } + + /// Wraps an async lambda into an async sequence that will return the result of the lambda + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable FromTask([NotNull] Func> asyncLambda) + { + //TODO: create a custom iterator for this ? + return ToAsyncEnumerable(new [] { asyncLambda }).Select(x => x()); + } + + /// Split a sequence of items into several batches + /// Type of the elemenst in + /// Source sequence + /// Maximum size of each batch + /// Sequence of batches, whose size will always we , except for the last batch that will only hold the remaning items. If the source is empty, an empty sequence is returned. + [Pure, NotNull, LinqTunnel] + public static IEnumerable> Buffered([NotNull] this IEnumerable source, int batchSize) + { + Contract.NotNull(source, nameof(source)); + if (batchSize <= 0) throw new ArgumentException("Batch size must be greater than zero.", nameof(batchSize)); + + var list = new List(batchSize); + foreach (var item in source) + { + list.Add(item); + if (list.Count >= batchSize) + { + yield return list; + list.Clear(); + } + } + if (list.Count > 0) + { + yield return list; + } + } + + #endregion + + #region Staying in the Monad... + + #region SelectMany... + + /// Projects each element of an async sequence to an and flattens the resulting sequences into one async sequence. + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable SelectMany([NotNull] this IAsyncEnumerable source, [NotNull] Func> selector) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(selector, nameof(selector)); + + if (source is AsyncIterator iterator) + { + return iterator.SelectMany(selector); + } + + return Flatten(source, new AsyncTransformExpression>(selector)); + } + + /// Projects each element of an async sequence to an and flattens the resulting sequences into one async sequence. + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable SelectMany([NotNull] this IAsyncEnumerable source, [NotNull] Func>> asyncSelector) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(asyncSelector, nameof(asyncSelector)); + + return SelectMany(source, TaskHelpers.WithCancellation(asyncSelector)); + } + + /// Projects each element of an async sequence to an and flattens the resulting sequences into one async sequence. + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable SelectMany([NotNull] this IAsyncEnumerable source, [NotNull] Func>> asyncSelector) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(asyncSelector, nameof(asyncSelector)); + + if (source is AsyncIterator iterator) + { + return iterator.SelectMany(asyncSelector); + } + + return Flatten(source, new AsyncTransformExpression>(asyncSelector)); + } + + /// Projects each element of an async sequence to an flattens the resulting sequences into one async sequence, and invokes a result selector function on each element therein. + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable SelectMany([NotNull] this IAsyncEnumerable source, [NotNull] Func> collectionSelector, [NotNull] Func resultSelector) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(collectionSelector, nameof(collectionSelector)); + Contract.NotNull(resultSelector, nameof(resultSelector)); + + if (source is AsyncIterator iterator) + { + return iterator.SelectMany(collectionSelector, resultSelector); + } + + return Flatten(source, new AsyncTransformExpression>(collectionSelector), resultSelector); + } + + /// Projects each element of an async sequence to an flattens the resulting sequences into one async sequence, and invokes a result selector function on each element therein. + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable SelectMany([NotNull] this IAsyncEnumerable source, [NotNull] Func>> asyncCollectionSelector, [NotNull] Func resultSelector) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(asyncCollectionSelector, nameof(asyncCollectionSelector)); + Contract.NotNull(resultSelector, nameof(resultSelector)); + + return SelectMany(source, TaskHelpers.WithCancellation(asyncCollectionSelector), resultSelector); + } + + /// Projects each element of an async sequence to an flattens the resulting sequences into one async sequence, and invokes a result selector function on each element therein. + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable SelectMany([NotNull] this IAsyncEnumerable source, [NotNull] Func>> asyncCollectionSelector, [NotNull] Func resultSelector) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(asyncCollectionSelector, nameof(asyncCollectionSelector)); + Contract.NotNull(resultSelector, nameof(resultSelector)); + + if (source is AsyncIterator iterator) + { + return iterator.SelectMany(asyncCollectionSelector, resultSelector); + } + + return Flatten(source, new AsyncTransformExpression>(asyncCollectionSelector), resultSelector); + } + + #endregion + + #region Select... + + /// Projects each element of an async sequence into a new form. + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable Select([NotNull] this IAsyncEnumerable source, [NotNull] Func selector) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(selector, nameof(selector)); + + if (source is AsyncIterator iterator) + { + return iterator.Select(selector); + } + + return Map(source, new AsyncTransformExpression(selector)); + } + + /// Projects each element of an async sequence into a new form. + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable Select([NotNull] this IAsyncEnumerable source, [NotNull] Func> asyncSelector) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(asyncSelector, nameof(asyncSelector)); + + return Select(source, TaskHelpers.WithCancellation(asyncSelector)); + } + + /// Projects each element of an async sequence into a new form. + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable Select([NotNull] this IAsyncEnumerable source, [NotNull] Func> asyncSelector) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(asyncSelector, nameof(asyncSelector)); + + if (source is AsyncIterator iterator) + { + return iterator.Select(asyncSelector); + } + + return Map(source, new AsyncTransformExpression(asyncSelector)); + } + + #endregion + + #region Where... + + /// Filters an async sequence of values based on a predicate. + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable Where([NotNull] this IAsyncEnumerable source, [NotNull] Func predicate) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(predicate, nameof(predicate)); + + if (source is AsyncIterator iterator) + { + return iterator.Where(predicate); + } + + return Filter(source, new AsyncFilterExpression(predicate)); + } + + /// Filters an async sequence of values based on a predicate. + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable Where([NotNull] this IAsyncEnumerable source, [NotNull] Func> asyncPredicate) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(asyncPredicate, nameof(asyncPredicate)); + + return Where(source, TaskHelpers.WithCancellation(asyncPredicate)); + } + + /// Filters an async sequence of values based on a predicate. + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable Where([NotNull] this IAsyncEnumerable source, [NotNull] Func> asyncPredicate) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(asyncPredicate, nameof(asyncPredicate)); + + if (source is AsyncIterator iterator) + { + return iterator.Where(asyncPredicate); + } + + return Filter(source, new AsyncFilterExpression(asyncPredicate)); + } + + #endregion + + #region Take... + + /// Returns a specified number of contiguous elements from the start of an async sequence. + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable Take([NotNull] this IAsyncEnumerable source, int count) + { + Contract.NotNull(source, nameof(source)); + if (count < 0) throw new ArgumentOutOfRangeException(nameof(count), count, "Count cannot be less than zero"); + + if (source is AsyncIterator iterator) + { + return iterator.Take(count); + } + + return Limit(source, count); + } + + #endregion + + #region TakeWhile... + + /// Returns elements from an async sequence as long as a specified condition is true, and then skips the remaining elements. + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable TakeWhile([NotNull] this IAsyncEnumerable source, [NotNull] Func condition) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(condition, nameof(condition)); + + if (source is AsyncIterator iterator) + { + return iterator.TakeWhile(condition); + } + + return Limit(source, condition); + } + + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable TakeWhile([NotNull] this IAsyncEnumerable source, [NotNull] Func condition, out QueryStatistics stopped) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(condition, nameof(condition)); + + var signal = new QueryStatistics(false); + stopped = signal; + + // to trigger the signal, we just intercept the condition returning false (which only happen once!) + bool Wrapped(TSource x) + { + if (condition(x)) return true; + signal.Update(true); + return false; + } + + return TakeWhile(source, Wrapped); + } + + #endregion + + #region Skip... + + /// Skips the first elements of an async sequence. + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable Skip([NotNull] this IAsyncEnumerable source, int count) + { + Contract.NotNull(source, nameof(source)); + if (count < 0) throw new ArgumentOutOfRangeException(nameof(count), count, "Count cannot be less than zero"); + + if (source is AsyncIterator iterator) + { + return iterator.Skip(count); + } + + return Offset(source, count); + } + + #endregion + + #region SelectAsync + + /// Projects each element of an async sequence into a new form. + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable SelectAsync([NotNull] this IAsyncEnumerable source, [NotNull] Func> asyncSelector, ParallelAsyncQueryOptions options = null) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(asyncSelector, nameof(asyncSelector)); + + return new ParallelSelectAsyncIterator(source, asyncSelector, options ?? new ParallelAsyncQueryOptions()); + } + + /// Always prefetch the next item from the inner sequence. + /// Type of the items in the source sequence + /// Source sequence that has a high latency, and from which we want to prefetch a set number of items. + /// Sequence that prefetch the next item, when outputing the current item. + /// + /// This iterator can help smooth out the query pipeline when every call to the inner sequence has a somewhat high latency (ex: reading the next page of results from the database). + /// Avoid prefetching from a source that is already reading from a buffer of results. + /// + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable Prefetch([NotNull] this IAsyncEnumerable source) + { + Contract.NotNull(source, nameof(source)); + + return new PrefetchingAsyncIterator(source, 1); + } + + /// Prefetch a certain number of items from the inner sequence, before outputing the results one by one. + /// Type of the items in the source sequence + /// Source sequence that has a high latency, and from which we want to prefetch a set number of items. + /// Maximum number of items to buffer from the source before they are consumed by the rest of the query. + /// Sequence that returns items from a buffer of prefetched list. + /// + /// This iterator can help smooth out the query pipeline when every call to the inner sequence has a somewhat high latency (ex: reading the next page of results from the database). + /// Avoid prefetching from a source that is already reading from a buffer of results. + /// + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable Prefetch([NotNull] this IAsyncEnumerable source, int prefetchCount) + { + Contract.NotNull(source, nameof(source)); + if (prefetchCount <= 0) throw new ArgumentOutOfRangeException(nameof(prefetchCount), prefetchCount, "Prefetch count must be at least one."); + + return new PrefetchingAsyncIterator(source, prefetchCount); + } + + /// Buffers the items of a bursty sequence, into a sequence of variable-sized arrays made up of items that where produced in a very short timespan. + /// Type of the items in the source sequence + /// Source sequence, that produces bursts of items, produced from the same page of results, before reading the next page. + /// Maximum number of items to return in a single window. If more items arrive at the same time, a new window will be opened with the rest of the items. + /// Sequence of batches, where all the items of a single batch arrived at the same time. A batch is closed once the next call to MoveNext() on the inner sequence does not complete immediately. Batches can be smaller than . + /// + /// This should only be called on bursty asynchronous sequences, and when you want to process items in batches, without incurring the cost of latency between two pages of results. + /// You should avoid using this operator on sequences where each call to MoveNext() is asynchronous, since it would only produce batchs with only a single item. + /// + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable Window([NotNull] this IAsyncEnumerable source, int maxWindowSize) + { + Contract.NotNull(source, nameof(source)); + if (maxWindowSize <= 0) throw ThrowHelper.ArgumentOutOfRangeException(nameof(maxWindowSize), maxWindowSize, "Window size must be at least one."); + + return new WindowingAsyncIterator(source, maxWindowSize); + } + + /// Buffers the items of a source sequence, and outputs a sequence of fixed-sized arrays. + /// Type of the items in the source sequence + /// Source sequence that will be cut into chunks containing at most items. + /// Number of items per batch. The last batch may contain less items, but should never be empty. + /// Sequence of arrays of size , except the last batch which can have less items. + /// + /// This operator does not care about the latency of each item, and will always try to fill each batch completely, before outputing a result. + /// If you are working on an inner sequence that is bursty in nature, where items arrives in waves, you should use which attempts to minimize the latency by outputing incomplete batches if needed. + /// + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable Batch([NotNull] this IAsyncEnumerable source, int batchSize) + { + Contract.NotNull(source, nameof(source)); + if (batchSize <= 0) throw ThrowHelper.ArgumentOutOfRangeException(nameof(batchSize), batchSize, "Batch size must be at least one."); + + return new BatchingAsyncIterator(source, batchSize); + } + + #endregion + + #region Distinct... + + [Pure, NotNull, LinqTunnel] + public static IAsyncEnumerable Distinct([NotNull] this IAsyncEnumerable source, IEqualityComparer comparer = null) + { + Contract.NotNull(source, nameof(source)); + comparer = comparer ?? EqualityComparer.Default; + + return new DistinctAsyncIterator(source, comparer); + } + + #endregion + + #region OrderBy... + + [Pure, NotNull, LinqTunnel] + public static IAsyncOrderedEnumerable OrderBy([NotNull] this IAsyncEnumerable source, [NotNull] Func keySelector, IComparer comparer = null) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(keySelector, nameof(keySelector)); + comparer = comparer ?? Comparer.Default; + + return new OrderedSequence(source, keySelector, comparer, descending: false, parent: null); + } + + [Pure, NotNull, LinqTunnel] + public static IAsyncOrderedEnumerable OrderByDescending([NotNull] this IAsyncEnumerable source, [NotNull] Func keySelector, IComparer comparer = null) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(keySelector, nameof(keySelector)); + comparer = comparer ?? Comparer.Default; + + return new OrderedSequence(source, keySelector, comparer, descending: true, parent: null); + } + + [Pure, NotNull, LinqTunnel] + public static IAsyncOrderedEnumerable ThenBy([NotNull] this IAsyncOrderedEnumerable source, [NotNull] Func keySelector, IComparer comparer = null) + { + Contract.NotNull(source, nameof(source)); + return source.CreateOrderedEnumerable(keySelector, comparer, descending: false); + } + + [Pure, NotNull, LinqTunnel] + public static IAsyncOrderedEnumerable ThenByDescending([NotNull] this IAsyncOrderedEnumerable source, [NotNull] Func keySelector, IComparer comparer = null) + { + Contract.NotNull(source, nameof(source)); + return source.CreateOrderedEnumerable(keySelector, comparer, descending: true); + } + + #endregion + + // If you are bored, maybe consider adding: + // - DefaultIfEmpty + // - Zip + // - OrderBy and OrderBy + // - GroupBy + + #endregion + + #region Leaving the Monad... + + /// Execute an action for each element of an async sequence + public static Task ForEachAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Action action, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(action, nameof(action)); + + if (source is AsyncIterator iterator) + { + return iterator.ExecuteAsync(action, ct); + } + return Run(source, AsyncIterationHint.All, action, ct); + } + + /// Execute an async action for each element of an async sequence + public static Task ForEachAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func asyncAction, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(asyncAction, nameof(asyncAction)); + + if (source is AsyncIterator iterator) + { + return iterator.ExecuteAsync(TaskHelpers.WithCancellation(asyncAction), ct); + } + + return ForEachAsync(source, TaskHelpers.WithCancellation(asyncAction), ct); + } + + /// Execute an async action for each element of an async sequence + public static Task ForEachAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func asyncAction, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(asyncAction, nameof(asyncAction)); + + if (source is AsyncIterator iterator) + { + return iterator.ExecuteAsync(asyncAction, ct); + } + + return Run(source, AsyncIterationHint.All, asyncAction, ct); + } + + #region ToList/Array/Dictionary/HashSet... + + /// Create a list from an async sequence. + [ItemNotNull] + public static Task> ToListAsync([NotNull] this IAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + + return AggregateAsync( + source, + new Buffer(), + (buffer, x) => buffer.Add(x), + (buffer) => buffer.ToList(), + ct + ); + } + + /// Create an array from an async sequence. + [ItemNotNull] + public static Task ToArrayAsync([NotNull] this IAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + + return AggregateAsync( + source, + new Buffer(), + (buffer, x) => buffer.Add(x), + (buffer) => buffer.ToArray(), + ct + ); + } + + /// Create an array from an async sequence, knowing a rough estimation of the number of elements. + [ItemNotNull] + internal static Task ToArrayAsync([NotNull] this IAsyncEnumerable source, int estimatedSize, CancellationToken ct = default(CancellationToken)) + { + Contract.Requires(source != null && estimatedSize >= 0); + + return AggregateAsync( + source, + new List(estimatedSize), + (buffer, x) => buffer.Add(x), + (buffer) => buffer.ToArray(), + ct + ); + } + + /// Creates a Dictionary from an async sequence according to a specified key selector function and key comparer. + [ItemNotNull] + public static Task> ToDictionaryAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func keySelector, IEqualityComparer comparer = null, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(keySelector, nameof(keySelector)); + + return AggregateAsync( + source, + new Dictionary(comparer ?? EqualityComparer.Default), + (results, x) => { results[keySelector(x)] = x; }, + ct + ); + } + + /// Creates a Dictionary from an async sequence according to a specified key selector function, a comparer, and an element selector function. + [ItemNotNull] + public static Task> ToDictionaryAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func keySelector, [NotNull, InstantHandle] Func elementSelector, IEqualityComparer comparer = null, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(keySelector, nameof(keySelector)); + Contract.NotNull(elementSelector, nameof(elementSelector)); + + return AggregateAsync( + source, + new Dictionary(comparer ?? EqualityComparer.Default), + (results, x) => { results[keySelector(x)] = elementSelector(x); }, + ct + ); + } + + /// Creates a Dictionary from an async sequence of pairs of keys and values. + [ItemNotNull] + public static Task> ToDictionaryAsync([NotNull] this IAsyncEnumerable> source, IEqualityComparer comparer = null, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + ct.ThrowIfCancellationRequested(); + + return AggregateAsync( + source, + new Dictionary(comparer ?? EqualityComparer.Default), + (results, x) => { results[x.Key] = x.Value; }, + ct + ); + } + + /// Create an Hashset from an async sequence. + [ItemNotNull] + public static Task> ToHashSetAsync([NotNull] this IAsyncEnumerable source, IEqualityComparer comparer = null, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + ct.ThrowIfCancellationRequested(); + + return AggregateAsync( + source, + new Buffer(), + (buffer, x) => buffer.Add(x), + (buffer) => buffer.ToHashSet(comparer), + ct + ); + } + + #endregion + + #region Aggregate... + + /// Applies an accumulator function over an async sequence. + public static async Task AggregateAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func aggregator, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(aggregator, nameof(aggregator)); + + ct.ThrowIfCancellationRequested(); + using (var iterator = source.GetEnumerator(ct, AsyncIterationHint.All)) + { + Contract.Assert(iterator != null, "The sequence returned a null async iterator"); + + if (!(await iterator.MoveNextAsync().ConfigureAwait(false))) + { + throw new InvalidOperationException("The sequence was empty"); + } + + var item = iterator.Current; + while (await iterator.MoveNextAsync().ConfigureAwait(false)) + { + item = aggregator(item, iterator.Current); + } + + return item; + } + } + + /// Applies an accumulator function over an async sequence. + public static async Task AggregateAsync([NotNull] this IAsyncEnumerable source, TAccumulate seed, [NotNull, InstantHandle] Func aggregator, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(aggregator, nameof(aggregator)); + + //TODO: opitmize this to not have to allocate lambdas! + var accumulate = seed; + await ForEachAsync(source, (x) => { accumulate = aggregator(accumulate, x); }, ct).ConfigureAwait(false); + return accumulate; + } + + /// Applies an accumulator function over an async sequence. + public static async Task AggregateAsync([NotNull] this IAsyncEnumerable source, TAccumulate seed, [NotNull, InstantHandle] Action aggregator, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(aggregator, nameof(aggregator)); + + var accumulate = seed; + await ForEachAsync(source, (x) => { aggregator(accumulate, x); }, ct).ConfigureAwait(false); + return accumulate; + } + + /// Applies an accumulator function over an async sequence. + public static async Task AggregateAsync([NotNull] this IAsyncEnumerable source, TAccumulate seed, [NotNull, InstantHandle] Func aggregator, [NotNull, InstantHandle] Func resultSelector, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(aggregator, nameof(aggregator)); + Contract.NotNull(resultSelector, nameof(resultSelector)); + + var accumulate = seed; + await ForEachAsync(source, (x) => { accumulate = aggregator(accumulate, x); }, ct).ConfigureAwait(false); + return resultSelector(accumulate); + } + + /// Applies an accumulator function over an async sequence. + public static async Task AggregateAsync([NotNull] this IAsyncEnumerable source, TAccumulate seed, [NotNull, InstantHandle] Action aggregator, [NotNull, InstantHandle] Func resultSelector, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(aggregator, nameof(aggregator)); + Contract.NotNull(resultSelector, nameof(resultSelector)); + + var accumulate = seed; + await ForEachAsync(source, (x) => aggregator(accumulate, x), ct); + return resultSelector(accumulate); + } + + #endregion + + #region First/Last/Single... + + /// Returns the first element of an async sequence, or an exception if it is empty + public static Task FirstAsync([NotNull] this IAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + ct.ThrowIfCancellationRequested(); + + //TODO:REFACTORING: create some interface or base class for this? + //var rq = source as FdbRangeQuery; + //if (rq != null) return rq.FirstAsync(); + + return Head(source, single: false, orDefault: false, ct: ct); + } + + /// Returns the first element of an async sequence, or an exception if it is empty + public static Task FirstAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func predicate, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(predicate, nameof(predicate)); + ct.ThrowIfCancellationRequested(); + + //TODO:REFACTORING: create some interface or base class for this? + //var rq = source as FdbRangeQuery; + //if (rq != null) return rq.FirstAsync(); + + //TODO: PERF: custom implementation for this? + return Head(source.Where(predicate), single: false, orDefault: false, ct: ct); + } + + /// Returns the first element of an async sequence, or the default value for the type if it is empty + public static Task FirstOrDefaultAsync([NotNull] this IAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + ct.ThrowIfCancellationRequested(); + + //TODO:REFACTORING: create some interface or base class for this? + //var rq = source as FdbRangeQuery; + //if (rq != null) return rq.FirstOrDefaultAsync(); + + return Head(source, single: false, orDefault: true, ct: ct); + } + + /// Returns the first element of an async sequence, or the default value for the type if it is empty + public static Task FirstOrDefaultAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func predicate, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(predicate, nameof(predicate)); + ct.ThrowIfCancellationRequested(); + + //TODO:REFACTORING: create some interface or base class for this? + //var rq = source as FdbRangeQuery; + //if (rq != null) return rq.FirstOrDefaultAsync(); + + //TODO: PERF: custom implementation for this? + return Head(source.Where(predicate), single: false, orDefault: true, ct: ct); + } + + /// Returns the first and only element of an async sequence, or an exception if it is empty or have two or more elements + /// Will need to call MoveNext at least twice to ensure that there is no second element. + public static Task SingleAsync([NotNull] this IAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + ct.ThrowIfCancellationRequested(); + + //TODO:REFACTORING: create some interface or base class for this? + //var rq = source as FdbRangeQuery; + //if (rq != null) return rq.SingleAsync(); + + return Head(source, single: true, orDefault: false, ct: ct); + } + + /// Returns the first and only element of an async sequence, or an exception if it is empty or have two or more elements + /// Will need to call MoveNext at least twice to ensure that there is no second element. + public static Task SingleAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func predicate, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(predicate, nameof(predicate)); + ct.ThrowIfCancellationRequested(); + + //TODO:REFACTORING: create some interface or base class for this? + //var rq = source as FdbRangeQuery; + //if (rq != null) return rq.SingleAsync(); + + //TODO: PERF: custom implementation for this? + return Head(source.Where(predicate), single: true, orDefault: false, ct: ct); + } + + /// Returns the first and only element of an async sequence, the default value for the type if it is empty, or an exception if it has two or more elements + /// Will need to call MoveNext at least twice to ensure that there is no second element. + public static Task SingleOrDefaultAsync([NotNull] this IAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + ct.ThrowIfCancellationRequested(); + + //TODO:REFACTORING: create some interface or base class for this? + //var rq = source as FdbRangeQuery; + //if (rq != null) return rq.SingleOrDefaultAsync(); + + return Head(source, single: true, orDefault: true, ct: ct); + } + + /// Returns the first and only element of an async sequence, the default value for the type if it is empty, or an exception if it has two or more elements + /// Will need to call MoveNext at least twice to ensure that there is no second element. + public static Task SingleOrDefaultAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func predicate, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(predicate, nameof(predicate)); + ct.ThrowIfCancellationRequested(); + + //TODO:REFACTORING: create some interface or base class for this? + //var rq = source as FdbRangeQuery; + //if (rq != null) return rq.SingleOrDefaultAsync(); + + return Head(source.Where(predicate), single: true, orDefault: true, ct: ct); + } + + /// Returns the last element of an async sequence, or an exception if it is empty + public static async Task LastAsync([NotNull] this IAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + ct.ThrowIfCancellationRequested(); + + //TODO:REFACTORING: create some interface or base class for this? + //var rq = source as FdbRangeQuery; + //if (rq != null) return await rq.LastAsync(); + + bool found = false; + T last = default(T); + + await ForEachAsync(source, (x) => { found = true; last = x; }, ct).ConfigureAwait(false); + + if (!found) throw new InvalidOperationException("The sequence was empty"); + return last; + } + + /// Returns the last element of an async sequence, or an exception if it is empty + public static async Task LastAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func predicate, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(predicate, nameof(predicate)); + ct.ThrowIfCancellationRequested(); + + //TODO:REFACTORING: create some interface or base class for this? + //var rq = source as FdbRangeQuery; + //if (rq != null) return await rq.LastAsync(); + + bool found = false; + T last = default(T); + + await ForEachAsync(source, (x) => { if (predicate(x)) { found = true; last = x; } }, ct).ConfigureAwait(false); + + if (!found) throw new InvalidOperationException("The sequence was empty"); + return last; + } + + /// Returns the last element of an async sequence, or the default value for the type if it is empty + public static async Task LastOrDefaultAsync([NotNull] this IAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + ct.ThrowIfCancellationRequested(); + + //TODO:REFACTORING: create some interface or base class for this? + //var rq = source as FdbRangeQuery; + //if (rq != null) return await rq.LastOrDefaultAsync(); + + bool found = false; + T last = default(T); + + await ForEachAsync(source, (x) => { found = true; last = x; }, ct).ConfigureAwait(false); + + return found ? last : default(T); + } + + /// Returns the last element of an async sequence, or the default value for the type if it is empty + public static async Task LastOrDefaultAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func predicate, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(predicate, nameof(predicate)); + ct.ThrowIfCancellationRequested(); + + //TODO:REFACTORING: create some interface or base class for this? + //var rq = source as FdbRangeQuery; + //if (rq != null) return await rq.LastOrDefaultAsync(); + + bool found = false; + T last = default(T); + + await ForEachAsync(source, (x) => { if (predicate(x)) { found = true; last = x; } }, ct).ConfigureAwait(false); + + return found ? last : default(T); + } + + /// Returns the element at a specific location of an async sequence, or an exception if there are not enough elements + public static async Task ElementAtAsync([NotNull] this IAsyncEnumerable source, int index, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + if (index < 0) throw new ArgumentOutOfRangeException(nameof(index)); + ct.ThrowIfCancellationRequested(); + + //TODO:REFACTORING: create some interface or base class for this? + //var rq = source as FdbRangeQuery; + //if (rq != null) return await rq.Skip(index).SingleAsync(); + + int counter = index; + T item = default(T); + await Run( + source, + AsyncIterationHint.All, + (x) => + { + if (counter-- == 0) { item = x; return false; } + return true; + }, + ct + ).ConfigureAwait(false); + + if (counter >= 0) throw new InvalidOperationException("The sequence was too small"); + return item; + } + + /// Returns the element at a specific location of an async sequence, or the default value for the type if it there are not enough elements + public static async Task ElementAtOrDefaultAsync([NotNull] this IAsyncEnumerable source, int index, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + if (index < 0) throw new ArgumentOutOfRangeException(nameof(index)); + ct.ThrowIfCancellationRequested(); + + //TODO:REFACTORING: create some interface or base class for this? + //var rq = source as FdbRangeQuery; + //if (rq != null) return await rq.Skip(index).SingleAsync(); + + int counter = index; + T item = default(T); + + //TODO: use ExecuteAsync() if the source is an Iterator! + await Run( + source, + AsyncIterationHint.All, + (x) => + { + if (counter-- == 0) { item = x; return false; } + return true; + }, + ct + ).ConfigureAwait(false); + + if (counter >= 0) return default(T); + return item; + } + + #endregion + + #region Count/Sum... + + /// Returns the number of elements in an async sequence. + public static async Task CountAsync([NotNull] this IAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + ct.ThrowIfCancellationRequested(); + + int count = 0; + + await ForEachAsync(source, (_) => { ++count; }, ct).ConfigureAwait(false); + + return count; + } + + /// Returns a number that represents how many elements in the specified async sequence satisfy a condition. + public static async Task CountAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func predicate, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(predicate, nameof(predicate)); + ct.ThrowIfCancellationRequested(); + + int count = 0; + + await ForEachAsync(source, (x) => { if (predicate(x)) ++count; }, ct).ConfigureAwait(false); + + return count; + } + + /// Returns the sum of all elements in the specified async sequence. + public static Task SumAsync([NotNull] this IAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + + return AggregateAsync(source, 0U, (sum, x) => checked(sum + x), ct); + } + + /// Returns the sum of all elements in the specified async sequence that satisfy a condition. + public static Task SumAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func selector, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(selector, nameof(selector)); + + return AggregateAsync(source, 0U, (sum, x) => checked(sum + selector(x)), ct); + } + + /// Returns the sum of all elements in the specified async sequence. + public static Task SumAsync([NotNull] this IAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + + return AggregateAsync(source, 0UL, (sum, x) => checked(sum + x), ct); + } + + /// Returns the sum of all elements in the specified async sequence that satisfy a condition. + public static Task SumAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func selector, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(selector, nameof(selector)); + + return AggregateAsync(source, 0UL, (sum, x) => checked(sum + selector(x)), ct); + } + + /// Returns the sum of all elements in the specified async sequence. + public static Task SumAsync([NotNull] this IAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + + return AggregateAsync(source, 0, (sum, x) => checked(sum + x), ct); + } + + /// Returns the sum of all elements in the specified async sequence that satisfy a condition. + public static Task SumAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func selector, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(selector, nameof(selector)); + + return AggregateAsync(source, 0, (sum, x) => checked(sum + selector(x)), ct); + } + + /// Returns the sum of all elements in the specified async sequence. + public static Task SumAsync([NotNull] this IAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + + return AggregateAsync(source, 0L, (sum, x) => checked(sum + x), ct); + } + + /// Returns the sum of all elements in the specified async sequence that satisfy a condition. + public static Task SumAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func selector, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(selector, nameof(selector)); + + return AggregateAsync(source, 0L, (sum, x) => checked(sum + selector(x)), ct); + } + + /// Returns the sum of all elements in the specified async sequence. + public static Task SumAsync([NotNull] this IAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + + return AggregateAsync(source, 0.0f, (sum, x) => sum + x, ct); + } + + /// Returns the sum of all elements in the specified async sequence that satisfy a condition. + public static Task SumAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func selector, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(selector, nameof(selector)); + + return AggregateAsync(source, 0.0f, (sum, x) => sum + selector(x), ct); + } + + /// Returns the sum of all elements in the specified async sequence. + public static Task SumAsync([NotNull] this IAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + + return AggregateAsync(source, 0.0, (sum, x) => sum + x, ct); + } + + /// Returns the sum of all elements in the specified async sequence that satisfy a condition. + public static Task SumAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func selector, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(selector, nameof(selector)); + + return AggregateAsync(source, 0.0, (sum, x) => sum + selector(x), ct); + } + + /// Returns the sum of all elements in the specified async sequence. + public static Task SumAsync([NotNull] this IAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + + return AggregateAsync(source, 0m, (sum, x) => sum + x, ct); + } + + /// Returns the sum of all elements in the specified async sequence that satisfy a condition. + public static Task SumAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func selector, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(selector, nameof(selector)); + + return AggregateAsync(source, 0m, (sum, x) => sum + selector(x), ct); + } + + #endregion + + #region Min/Max... + + /// Returns the smallest value in the specified async sequence + public static async Task MinAsync([NotNull] this IAsyncEnumerable source, IComparer comparer = null, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + comparer = comparer ?? Comparer.Default; + + //REVIEW: use C#7 tuples + bool found = false; + T min = default(T); + + await ForEachAsync( + source, + (x) => + { + if (!found || comparer.Compare(x, min) < 0) + { + min = x; + found = true; + } + }, + ct + ).ConfigureAwait(false); + + if (!found) throw ThrowHelper.InvalidOperationException("The sequence was empty"); + return min; + } + + /// Returns the smallest value in the specified async sequence + public static async Task MinAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func predicate, IComparer comparer = null, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(predicate, nameof(predicate)); + comparer = comparer ?? Comparer.Default; + + //REVIEW: use C#7 tuples + bool found = false; + T min = default(T); + + await ForEachAsync( + source, + (x) => + { + if (predicate(x) && (!found || comparer.Compare(x, min) < 0)) + { + min = x; + found = true; + } + }, + ct + ).ConfigureAwait(false); + + if (!found) throw ThrowHelper.InvalidOperationException("The sequence was empty"); + return min; + } + + /// Returns the largest value in the specified async sequence + public static async Task MaxAsync([NotNull] this IAsyncEnumerable source, IComparer comparer = null, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + comparer = comparer ?? Comparer.Default; + + //REVIEW: use C#7 tuples + bool found = false; + T max = default(T); + + await ForEachAsync( + source, + (x) => + { + if (!found || comparer.Compare(x, max) > 0) + { + max = x; + found = true; + } + }, + ct + ).ConfigureAwait(false); + + if (!found) throw ThrowHelper.InvalidOperationException("The sequence was empty"); + return max; + } + + /// Returns the largest value in the specified async sequence + public static async Task MaxAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func predicate, IComparer comparer = null, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(predicate, nameof(predicate)); + comparer = comparer ?? Comparer.Default; + + //REVIEW: use C#7 tuples + bool found = false; + T max = default(T); + + await ForEachAsync( + source, + (x) => + { + if (predicate(x) && (!found || comparer.Compare(x, max) > 0)) + { + max = x; + found = true; + } + }, + ct + ).ConfigureAwait(false); + + if (!found) throw ThrowHelper.InvalidOperationException("The sequence was empty"); + return max; + } + + #endregion + + #region Any/None... + + /// Determines whether an async sequence contains any elements. + /// This is the logical equivalent to "source.Count() > 0" but can be better optimized by some providers + public static async Task AnyAsync([NotNull] this IAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + ct.ThrowIfCancellationRequested(); + + using (var iterator = source.GetEnumerator(ct, AsyncIterationHint.Head)) + { + return await iterator.MoveNextAsync().ConfigureAwait(false); + } + } + + /// Determines whether any element of an async sequence satisfies a condition. + public static async Task AnyAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func predicate, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(predicate, nameof(predicate)); + ct.ThrowIfCancellationRequested(); + + using (var iterator = source.GetEnumerator(ct, AsyncIterationHint.Head)) + { + while (await iterator.MoveNextAsync().ConfigureAwait(false)) + { + if (predicate(iterator.Current)) return true; + } + } + return false; + } + + /// Determines wether an async sequence contains no elements at all. + /// This is the logical equivalent to "source.Count() == 0" or "!source.Any()" but can be better optimized by some providers + public static async Task NoneAsync([NotNull] this IAsyncEnumerable source, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + ct.ThrowIfCancellationRequested(); + + using (var iterator = source.GetEnumerator(ct, AsyncIterationHint.Head)) + { + return !(await iterator.MoveNextAsync().ConfigureAwait(false)); + } + } + + /// Determines whether none of the elements of an async sequence satisfies a condition. + public static async Task NoneAsync([NotNull] this IAsyncEnumerable source, [NotNull, InstantHandle] Func predicate, CancellationToken ct = default(CancellationToken)) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(predicate, nameof(predicate)); + ct.ThrowIfCancellationRequested(); + + using (var iterator = source.GetEnumerator(ct, AsyncIterationHint.Head)) + { + while (await iterator.MoveNextAsync().ConfigureAwait(false)) + { + if (predicate(iterator.Current)) return false; + } + } + return true; + } + + #endregion + + #endregion + + #region Query Statistics... + + //TODO: move this somewhere else? + + /// Measure the number of items that pass through this point of the query + /// The values returned in are only safe to read once the query has ended + [NotNull, LinqTunnel] + public static IAsyncEnumerable WithCountStatistics([NotNull] this IAsyncEnumerable source, out QueryStatistics counter) + { + Contract.NotNull(source, nameof(source)); + + var signal = new QueryStatistics(0); + counter = signal; + + // to count, we just increment the signal each type a value flows through here + return Select(source, (x) => + { + signal.Update(checked(signal.Value + 1)); + return x; + }); + } + + /// Measure the number and size of slices that pass through this point of the query + /// The values returned in are only safe to read once the query has ended + [NotNull, LinqTunnel] + public static IAsyncEnumerable> WithSizeStatistics([NotNull] this IAsyncEnumerable> source, out QueryStatistics statistics) + { + Contract.NotNull(source, nameof(source)); + + var data = new KeyValueSizeStatistics(); + statistics = new QueryStatistics(data); + + // to count, we just increment the signal each type a value flows through here + return Select(source,(kvp) => + { + data.Add(kvp.Key.Count, kvp.Value.Count); + return kvp; + }); + } + + /// Measure the number and sizes of the keys and values that pass through this point of the query + /// The values returned in are only safe to read once the query has ended + [NotNull, LinqTunnel] + public static IAsyncEnumerable WithSizeStatistics([NotNull] this IAsyncEnumerable source, out QueryStatistics statistics) + { + Contract.NotNull(source, nameof(source)); + + var data = new DataSizeStatistics(); + statistics = new QueryStatistics(data); + + // to count, we just increment the signal each type a value flows through here + return Select(source, (x) => + { + data.Add(x.Count); + return x; + }); + } + + /// Execute an action on each item passing through the sequence, without modifying the original sequence + /// The is execute inline before passing the item down the line, and should not block + [NotNull, LinqTunnel] + public static IAsyncEnumerable Observe([NotNull] this IAsyncEnumerable source, [NotNull] Action handler) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(handler, nameof(handler)); + + return new ObserverAsyncIterator(source, new AsyncObserverExpression(handler)); + } + + /// Execute an action on each item passing through the sequence, without modifying the original sequence + /// The is execute inline before passing the item down the line, and should not block + [NotNull, LinqTunnel] + public static IAsyncEnumerable Observe([NotNull] this IAsyncEnumerable source, [NotNull] Func asyncHandler) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(asyncHandler, nameof(asyncHandler)); + + return new ObserverAsyncIterator(source, new AsyncObserverExpression(asyncHandler)); + } + + #endregion + + } +} diff --git a/FoundationDB.Client/Linq/FdbAsyncMode.cs b/FoundationDB.Client/Shared/Linq/AsyncIterationHint.cs similarity index 96% rename from FoundationDB.Client/Linq/FdbAsyncMode.cs rename to FoundationDB.Client/Shared/Linq/AsyncIterationHint.cs index 69765f3b9..681cf95f4 100644 --- a/FoundationDB.Client/Linq/FdbAsyncMode.cs +++ b/FoundationDB.Client/Shared/Linq/AsyncIterationHint.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,13 +26,13 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq { /// /// Defines the intent of a consumer of an async iterator /// - public enum FdbAsyncMode + public enum AsyncIterationHint { /// /// Use the default settings. The provider will make no attempt at optimizing the query. @@ -42,7 +42,6 @@ public enum FdbAsyncMode /// /// The query will be consumed by chunks and may be aborted at any point. The provider will produce small chunks of data for the first few reads but should still be efficient if the caller consume all the sequence. /// - /// Iterator, /// @@ -62,4 +61,4 @@ public enum FdbAsyncMode } -} \ No newline at end of file +} diff --git a/FoundationDB.Client/Shared/Linq/EnumerableExtensions.cs b/FoundationDB.Client/Shared/Linq/EnumerableExtensions.cs new file mode 100644 index 000000000..8828d1fd9 --- /dev/null +++ b/FoundationDB.Client/Shared/Linq/EnumerableExtensions.cs @@ -0,0 +1,140 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + + namespace Doxense.Linq +{ + using System; + using System.Collections.Generic; + using System.Linq; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; + + /// Provides a set of static methods for querying objects that implement . + public static class EnumerableExtensions + { + + //TODO: ajouter Batch(...) ? + //TODO: peut-être merger avec les CollectionExtensions.cs? + + /// Determines wether a sequence contains no elements at all. + /// This is the logical equivalent to "source.Count() == 0" or "!source.Any()" but can be better optimized by some providers + public static bool None([NotNull, InstantHandle] this IEnumerable source) + { + Contract.NotNull(source, nameof(source)); + + using (var iterator = source.GetEnumerator()) + { + return !iterator.MoveNext(); + } + } + + /// Determines whether none of the elements of a sequence satisfies a condition. + public static bool None([NotNull, InstantHandle] this IEnumerable source, [NotNull, InstantHandle] Func predicate) + { + Contract.NotNull(source, nameof(source)); + Contract.NotNull(predicate, nameof(predicate)); + + using (var iterator = source.GetEnumerator()) + { + while (iterator.MoveNext()) + { + if (predicate(iterator.Current)) return false; + } + } + return true; + } + + #region Query Statistics... + + //TODO: move this somewhere else? + + /// Measure the number of items that pass through this point of the query + /// The values returned in are only safe to read once the query has ended + [NotNull, LinqTunnel] + public static IEnumerable WithCountStatistics([NotNull] this IEnumerable source, out QueryStatistics counter) + { + Contract.NotNull(source, nameof(source)); + + var signal = new QueryStatistics(0); + counter = signal; + + // to count, we just increment the signal each type a value flows through here + Func wrapped = (x) => + { + signal.Update(checked(signal.Value + 1)); + return x; + }; + + return source.Select(wrapped); + } + + /// Measure the number and size of slices that pass through this point of the query + /// The values returned in are only safe to read once the query has ended + [NotNull, LinqTunnel] + public static IEnumerable> WithSizeStatistics([NotNull] this IEnumerable> source, out QueryStatistics statistics) + { + Contract.NotNull(source, nameof(source)); + + var data = new KeyValueSizeStatistics(); + statistics = new QueryStatistics(data); + + // to count, we just increment the signal each type a value flows through here + Func, KeyValuePair> wrapped = (kvp) => + { + data.Add(kvp.Key.Count, kvp.Value.Count); + return kvp; + }; + + return source.Select(wrapped); + } + + /// Measure the number and sizes of the keys and values that pass through this point of the query + /// The values returned in are only safe to read once the query has ended + [NotNull, LinqTunnel] + public static IEnumerable WithSizeStatistics([NotNull] this IEnumerable source, out QueryStatistics statistics) + { + Contract.NotNull(source, nameof(source)); + + var data = new DataSizeStatistics(); + statistics = new QueryStatistics(data); + + // to count, we just increment the signal each type a value flows through here + Func wrapped = (x) => + { + data.Add(x.Count); + return x; + }; + + return source.Select(wrapped); + } + + #endregion + + } + +} diff --git a/FoundationDB.Client/Async/IAsyncEnumerable.cs b/FoundationDB.Client/Shared/Linq/IAsyncEnumerable.cs similarity index 84% rename from FoundationDB.Client/Async/IAsyncEnumerable.cs rename to FoundationDB.Client/Shared/Linq/IAsyncEnumerable.cs index 8564718ce..069cb1a86 100644 --- a/FoundationDB.Client/Async/IAsyncEnumerable.cs +++ b/FoundationDB.Client/Shared/Linq/IAsyncEnumerable.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,8 +26,9 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Async +namespace Doxense.Linq { + using System.Threading; using JetBrains.Annotations; // note: these interfaces are modeled after the IAsyncEnumerable and IAsyncEnumerator found in Rx @@ -38,9 +39,11 @@ namespace FoundationDB.Async public interface IAsyncEnumerable { /// Gets an asynchronous enumerator over the sequence. + /// Token used to cancel the iterator from the outside + /// Defines how the enumerator will be used by the caller. The source provider can use the mode to optimize how the results are produced. /// Enumerator for asynchronous enumeration over the sequence. [NotNull] - IAsyncEnumerator GetEnumerator(); + IAsyncEnumerator GetEnumerator(CancellationToken ct, AsyncIterationHint hint); } -} \ No newline at end of file +} diff --git a/FoundationDB.Client/Async/IAsyncEnumerator.cs b/FoundationDB.Client/Shared/Linq/IAsyncEnumerator.cs similarity index 89% rename from FoundationDB.Client/Async/IAsyncEnumerator.cs rename to FoundationDB.Client/Shared/Linq/IAsyncEnumerator.cs index 0db0e3564..095b60640 100644 --- a/FoundationDB.Client/Async/IAsyncEnumerator.cs +++ b/FoundationDB.Client/Shared/Linq/IAsyncEnumerator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,10 +26,9 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Async +namespace Doxense.Linq { using System; - using System.Threading; using System.Threading.Tasks; // note: these interfaces are modeled after the IAsyncEnumerable and IAsyncEnumerator found in Rx @@ -40,12 +39,11 @@ namespace FoundationDB.Async public interface IAsyncEnumerator : IDisposable { /// Advances the enumerator to the next element in the sequence, returning the result asynchronously. - /// Cancellation token that can be used to cancel the operation. /// - /// Task containing the result of the operation: true if the enumerator was successfully advanced + /// Task containing the result of the operation: true if the enumerator was successfully advanced /// to the next element; false if the enumerator has passed the end of the sequence. /// - Task MoveNext(CancellationToken cancellationToken); + Task MoveNextAsync(); /// Gets the current element in the iteration. T Current { get; } diff --git a/FoundationDB.Client/Linq/IFdbAsyncOrderedEnumerable.cs b/FoundationDB.Client/Shared/Linq/IAsyncOrderedEnumerable.cs similarity index 67% rename from FoundationDB.Client/Linq/IFdbAsyncOrderedEnumerable.cs rename to FoundationDB.Client/Shared/Linq/IAsyncOrderedEnumerable.cs index 97644cfdb..029583a2c 100644 --- a/FoundationDB.Client/Linq/IFdbAsyncOrderedEnumerable.cs +++ b/FoundationDB.Client/Shared/Linq/IAsyncOrderedEnumerable.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,20 +26,21 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq { - using FoundationDB.Async; - using FoundationDB.Client.Utils; using JetBrains.Annotations; using System; using System.Collections.Generic; - using System.Threading; - using System.Threading.Tasks; - public interface IFdbAsyncOrderedEnumerable : IFdbAsyncEnumerable + // note: these interfaces are modeled after the IAsyncEnumerable and IAsyncEnumerator found in Rx + //TODO: if/when async enumerables are avail in C#, we would just need to either remove these interfaces, or make them implement the real stuff + + /// Asynchronous version of the interface, allowing elements of the enumerable sequence to be retrieved asynchronously. + /// + public interface IAsyncOrderedEnumerable : IAsyncEnumerable { [NotNull, LinqTunnel] - IFdbAsyncOrderedEnumerable CreateOrderedEnumerable([NotNull] Func keySelector, IComparer comparer, bool descending); + IAsyncOrderedEnumerable CreateOrderedEnumerable([NotNull] Func keySelector, IComparer comparer, bool descending); } } diff --git a/FoundationDB.Client/Linq/FdbParallelQueryOptions.cs b/FoundationDB.Client/Shared/Linq/ParallelAsyncQueryOptions.cs similarity index 94% rename from FoundationDB.Client/Linq/FdbParallelQueryOptions.cs rename to FoundationDB.Client/Shared/Linq/ParallelAsyncQueryOptions.cs index fea5f3126..72b093088 100644 --- a/FoundationDB.Client/Linq/FdbParallelQueryOptions.cs +++ b/FoundationDB.Client/Shared/Linq/ParallelAsyncQueryOptions.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,13 +26,13 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Linq { using System; using System.Threading.Tasks; /// Container class for all settings relative to parallel operations - public sealed class FdbParallelQueryOptions + public sealed class ParallelAsyncQueryOptions { /// Maximum number of concurrent async tasks that can run in parallel diff --git a/FoundationDB.Client/FdbKeyRangeComparer.cs b/FoundationDB.Client/Shared/Linq/QueryStatistics.cs similarity index 52% rename from FoundationDB.Client/FdbKeyRangeComparer.cs rename to FoundationDB.Client/Shared/Linq/QueryStatistics.cs index 24d1f2c07..fcf73728f 100644 --- a/FoundationDB.Client/FdbKeyRangeComparer.cs +++ b/FoundationDB.Client/Shared/Linq/QueryStatistics.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,60 +26,63 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client +namespace Doxense.Linq { - using FoundationDB.Client.Utils; using System; - using System.Collections.Generic; - using System.Diagnostics; - [DebuggerDisplay("Mode={m_mode}")] - public sealed class FdbKeyRangeComparer : IComparer, IEqualityComparer + public class QueryStatistics { - private const int BOTH = 0; - private const int BEGIN = 1; - private const int END = 2; + public QueryStatistics() + { } - public static readonly FdbKeyRangeComparer Default = new FdbKeyRangeComparer(BOTH); - public static readonly FdbKeyRangeComparer Begin = new FdbKeyRangeComparer(BEGIN); - public static readonly FdbKeyRangeComparer End = new FdbKeyRangeComparer(END); - - private readonly int m_mode; - - private FdbKeyRangeComparer(int mode) + public QueryStatistics(TData value) { - Contract.Requires(mode >= BOTH && mode <= END); - m_mode = mode; + this.Value = value; } - public int Compare(FdbKeyRange x, FdbKeyRange y) + public TData Value { get; protected set; } + + public void Update(TData newValue) { - switch (m_mode) - { - case BEGIN: return x.Begin.CompareTo(y.Begin); - case END: return x.End.CompareTo(y.End); - default: return x.CompareTo(y); - } + this.Value = newValue; } + } + + + public sealed class KeyValueSizeStatistics + { + /// Total number of pairs of keys and values that have flowed through this point + public long Count { get; private set; } - public bool Equals(FdbKeyRange x, FdbKeyRange y) + /// Total size of all keys and values combined + public long Size => checked(this.KeySize + this.ValueSize); + + /// Total size of all keys combined + public long KeySize { get; private set; } + + /// Total size of all values combined + public long ValueSize { get; private set; } + + public void Add(int keySize, int valueSize) { - switch(m_mode) - { - case BEGIN: return x.Begin.Equals(y.Begin); - case END: return x.End.Equals(y.End); - default: return x.Equals(y); - } + this.Count++; + this.KeySize = checked(keySize + this.KeySize); + this.ValueSize = checked(valueSize + this.ValueSize); } + } + + public sealed class DataSizeStatistics + { + /// Total number of items that have flowed through this point + public long Count { get; private set; } + + /// Total size of all items that have flowed through this point + public long Size { get; private set; } - public int GetHashCode(FdbKeyRange obj) + public void Add(int size) { - switch(m_mode) - { - case BEGIN: return obj.Begin.GetHashCode(); - case END: return obj.End.GetHashCode(); - default: return obj.GetHashCode(); - } + this.Count++; + this.Size = checked(size + this.Size); } } diff --git a/FoundationDB.Client/Shared/Memory/BitHelpers.cs b/FoundationDB.Client/Shared/Memory/BitHelpers.cs new file mode 100644 index 000000000..1f8505a98 --- /dev/null +++ b/FoundationDB.Client/Shared/Memory/BitHelpers.cs @@ -0,0 +1,777 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Memory +{ + using System; + using System.Diagnostics; + using System.Runtime.CompilerServices; + using JetBrains.Annotations; + + /// Helper methods to work with bits + [PublicAPI] + [DebuggerNonUserCode] + public static class BitHelpers + { + + #region Power of Twos + + /// Round a number to the next power of 2 + /// Positive integer that will be rounded up (if not already a power of 2) + /// Smallest power of 2 that is greater than or equal to + /// Will return 1 for = 0 (because 0 is not a power of 2 !), and will throw for < 0 + /// If is greater than 2^31 and would overflow + [Pure] + public static uint NextPowerOfTwo(uint x) + { + // cf http://en.wikipedia.org/wiki/Power_of_two#Algorithm_to_round_up_to_power_of_two + + // special cases + if (x == 0) return 1; + if (x > (1U << 31)) throw UnsafeHelpers.Errors.PowerOfTwoOverflow(); + + --x; + x |= (x >> 1); + x |= (x >> 2); + x |= (x >> 4); + x |= (x >> 8); + x |= (x >> 16); + return x + 1; + } + + /// Round a number to the next power of 2 + /// Positive integer that will be rounded up (if not already a power of 2) + /// Smallest power of 2 that is greater then or equal to + /// Will return 1 for = 0 (because 0 is not a power 2 !), and will throws for < 0 + /// If is negative, or it is greater than 2^30 and would overflow. + [Pure] + public static int NextPowerOfTwo(int x) + { + // cf http://en.wikipedia.org/wiki/Power_of_two#Algorithm_to_round_up_to_power_of_two + + // special cases + if (x == 0) return 1; + if ((uint)x > (1U << 30)) throw UnsafeHelpers.Errors.PowerOfTwoNegative(); + + --x; + x |= (x >> 1); + x |= (x >> 2); + x |= (x >> 4); + x |= (x >> 8); + x |= (x >> 16); + return x + 1; + } + + /// Round a number to the next power of 2 + /// Positive integer that will be rounded up (if not already a power of 2) + /// Smallest power of 2 that is greater than or equal to + /// Will return 1 for = 0 (because 0 is not a power of 2 !), and will throw for < 0 + /// If is greater than 2^63 and would overflow + [Pure] + public static ulong NextPowerOfTwo(ulong x) + { + // cf http://en.wikipedia.org/wiki/Power_of_two#Algorithm_to_round_up_to_power_of_two + + // special cases + if (x == 0) return 1; + if (x > (1UL << 63)) throw UnsafeHelpers.Errors.PowerOfTwoOverflow(); + + --x; + x |= (x >> 1); + x |= (x >> 2); + x |= (x >> 4); + x |= (x >> 8); + x |= (x >> 16); + x |= (x >> 32); + return x + 1; + } + + /// Round a number to the next power of 2 + /// Positive integer that will be rounded up (if not already a power of 2) + /// Smallest power of 2 that is greater then or equal to + /// Will return 1 for = 0 (because 0 is not a power 2 !), and will throws for < 0 + /// If is negative, or it is greater than 2^62 and would overflow. + [Pure] + public static long NextPowerOfTwo(long x) + { + // cf http://en.wikipedia.org/wiki/Power_of_two#Algorithm_to_round_up_to_power_of_two + + // special cases + if (x == 0) return 1; + if ((ulong) x > (1UL << 62)) throw UnsafeHelpers.Errors.PowerOfTwoNegative(); + + --x; + x |= (x >> 1); + x |= (x >> 2); + x |= (x >> 4); + x |= (x >> 8); + x |= (x >> 16); + x |= (x >> 32); + return x + 1; + } + + /// Test if a number is a power of 2 + /// True if is expressible as 2^i (i>=0) + /// 0 is NOT considered to be a power of 2 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static bool IsPowerOfTwo(int x) + { + return x > 0 & unchecked((x & (x - 1)) == 0); + } + + /// Test if a number is a power of 2 + /// True if is expressible as 2^i (i>=0) + /// 0 is NOT considered to be a power of 2 + /// This methods guarantees that IsPowerOfTwo(x) == (NextPowerOfTwo(x) == x) + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static bool IsPowerOfTwo(uint x) + { + return x != 0 & unchecked((x & (x - 1)) == 0); + } + + /// Test if a number is a power of 2 + /// True if is expressible as 2^i (i>=0) + /// 0 is NOT considered to be a power of 2 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static bool IsPowerOfTwo(long x) + { + return x > 0 & unchecked((x & (x - 1)) == 0); + } + + /// Test if a number is a power of 2 + /// True if is expressible as 2^i (i>=0) + /// 0 is NOT considered to be a power of 2 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static bool IsPowerOfTwo(ulong x) + { + return x != 0 & unchecked((x & (x - 1)) == 0); + } + + #endregion + + #region Alignment / Padding... + + //REVIEW: align/padding should probably be moved somewhere else because it does not really have anything to do bith bit twiddling... + + /// Round a size to a multiple of a specific value + /// Minimum size required + /// Final size must be a multiple of this number + /// Result cannot be less than this value + /// Size rounded up to the next multiple of , or 0 if is negative + /// For aligments that are powers of two, will be faster + /// If the rounded size overflows over 2 GB + [Pure] + public static int Align(int size, [Positive] int alignment, int minimum = 0) + { + //Contract.Requires(alignment > 0); + long x = Math.Max(size, minimum); + x += alignment - 1; + x /= alignment; + x *= alignment; + return checked((int) x); + } + + /// Round a size to a multiple of power of two + /// Minimum size required + /// Must be a power two + /// Size rounded up to the next multiple of + /// If the rounded size overflows over 2 GB + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int AlignPowerOfTwo(int size, [PowerOfTwo] int powerOfTwo = 16) + { + //Contract.Requires(BitHelpers.IsPowerOfTwo(powerOfTwo)); + if (size <= 0) + { + return size < 0 ? 0 : powerOfTwo; + } + int mask = powerOfTwo - 1; + // force an exception if we overflow above 2GB + return checked(size + mask) & ~mask; + } + + /// Round a size to a multiple of a specific value + /// Minimum size required + /// Final size must be a multiple of this number + /// Result cannot be less than this value + /// Size rounded up to the next multiple of . + /// + /// For aligments that are powers of two, will be faster. + /// + /// If the rounded size overflows over 2 GB + [Pure] + public static uint Align(uint size, uint alignment, uint minimum = 0) + { + //Contract.Requires(alignment > 0); + ulong x = Math.Max(size, minimum); + x += alignment - 1; + x /= alignment; + x *= alignment; + return checked((uint) x); + } + + /// Round a size to a multiple of power of two + /// Minimum size required + /// Must be a power two + /// Size rounded up to the next multiple of + /// If the rounded size overflows over 4 GB + [Pure] + public static uint AlignPowerOfTwo(uint size, [PowerOfTwo] uint powerOfTwo = 16U) + { + //Contract.Requires(BitHelpers.IsPowerOfTwo(powerOfTwo)); + if (size == 0) return powerOfTwo; + uint mask = powerOfTwo - 1; + // force an exception if we overflow above 4GB + return checked(size + mask) & ~mask; + } + + /// Round a size to a multiple of a specific value + /// Minimum size required + /// Final size must be a multiple of this number + /// Result cannot be less than this value + /// Size rounded up to the next multiple of , or 0 if is negative + /// For aligments that are powers of two, will be faster + /// If the rounded size overflows over 2^63 + [Pure] + public static long Align(long size, [Positive] long alignment, long minimum = 0) + { + //Contract.Requires(alignment > 0); + long x = Math.Max(size, minimum); + // we have to divide first and check the modulo, because adding (aligment+1) before could overflow at the wrong time + long y = x /alignment; + if (x % alignment != 0) ++y; + return checked(y * alignment); + } + + /// Round a size to a multiple of power of two + /// Minimum size required + /// Must be a power two + /// Size rounded up to the next multiple of + /// If the rounded size overflows over long.MaxValue + [Pure] + public static long AlignPowerOfTwo(long size, [PowerOfTwo] long powerOfTwo = 16L) + { + //Contract.Requires(BitHelpers.IsPowerOfTwo(powerOfTwo)); + if (size <= 0) + { + return size < 0 ? 0 : powerOfTwo; + } + // force an exception if we overflow above ulong.MaxValue + long mask = powerOfTwo - 1; + return checked(size + mask) & ~mask; + } + + /// Round a size to a multiple of a specific value + /// Minimum size required + /// Final size must be a multiple of this number + /// Result cannot be less than this value + /// Size rounded up to the next multiple of . + /// + /// For aligments that are powers of two, will be faster. + /// + /// If the rounded size overflows over 2^63 + [Pure] + public static ulong Align(ulong size, ulong alignment, ulong minimum = 0) + { + //Contract.Requires(alignment > 0); + ulong x = Math.Max(size, minimum); + // we have to divide first and check the modulo, because adding (aligment+1) before could overflow at the wrong time + ulong y = x / alignment; + if (x % alignment != 0) ++y; + return checked(y * alignment); + } + + /// Round a size to a multiple of power of two + /// Minimum size required + /// Must be a power two + /// Size rounded up to the next multiple of + /// If the rounded size overflows over ulong.MaxValue + [Pure] + public static ulong AlignPowerOfTwo(ulong size, [PowerOfTwo] ulong powerOfTwo = 16UL) + { + //Contract.Requires(BitHelpers.IsPowerOfTwo(powerOfTwo)); + + if (size == 0) + { + return powerOfTwo; + } + // force an exception if we overflow above ulong.MaxValue + ulong mask = powerOfTwo - 1; + return checked(size + mask) & ~mask; + } + + /// Computes the number of padding bytes needed to align a buffer to a specific alignment + /// Size of the buffer + /// Alignement required (must be a power of two) + /// Number of padding bytes required to end up with a buffer size multiple of . Returns 0 if the buffer is already aligned + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int PaddingPowerOfTwo(int size, [PowerOfTwo] int powerOfTwo = 16) + { + //Contract.Requires(BitHelpers.IsPowerOfTwo(powerOfTwo)); + return (~size + 1) & (powerOfTwo - 1); + + } + + /// Computes the number of padding bytes needed to align a buffer to a specific alignment + /// Size of the buffer + /// Alignement required (must be a power of two) + /// Number of padding bytes required to end up with a buffer size multiple of . Returns 0 if the buffer is already aligned + /// Result is unspecified if is 0 or not a power of 2 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static uint PaddingPowerOfTwo(uint size, [PowerOfTwo] uint powerOfTwo = 16) + { + //Contract.Requires(BitHelpers.IsPowerOfTwo(powerOfTwo)); + return (~size + 1) & (powerOfTwo - 1); + } + + /// Computes the number of padding bytes needed to align a buffer to a specific alignment + /// Size of the buffer + /// Alignement required (must be a power of two) + /// Number of padding bytes required to end up with a buffer size multiple of . Returns 0 if the buffer is already aligned + /// Result is unspecified if is 0 or not a power of 2 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static long PaddingPowerOfTwo(long size, [PowerOfTwo] long powerOfTwo = 16) + { + //Contract.Requires(BitHelpers.IsPowerOfTwo(powerOfTwo)); + return (~size + 1) & (powerOfTwo - 1); + + } + + /// Computes the number of padding bytes needed to align a buffer to a specific alignment + /// Size of the buffer + /// Alignement required (must be a power of two) + /// Number of padding bytes required to end up with a buffer size multiple of . Returns 0 if the buffer is already aligned + /// Result is unspecified if is 0 or not a power of 2 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static ulong PaddingPowerOfTwo(ulong size, [PowerOfTwo] ulong powerOfTwo = 16) + { + //Contract.Requires(BitHelpers.IsPowerOfTwo(powerOfTwo)); + return (~size + 1) & (powerOfTwo - 1); + } + + #endregion + + #region CountBits... + + // CountBits(x) == POPCNT == number of bits that are set to 1 in a word + // - CountBits(0) == 0 + // - CountBits(8) == 1 + // - CountBits(42) == 3 + // - CountBits(uint.MaxValue) == 32 + + /// Count the number of bits set to 1 in a 32-bit signed integer + /// Value between 0 and 32 + [Pure] //REVIEW: force inline or not? + public static int CountBits(int value) + { + // cf https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSet64 + // PERF: this averages ~2ns/op OnMyMachine(tm) + value = value - ((value >> 1) & 0x55555555); + value = (value & 0x33333333) + ((value >> 2) & 0x33333333); + value = ((value + (value >> 4) & 0xF0F0F0F) * 0x1010101) >> (32 - 8); + return value; + } + + /// Count the number of bits set to 1 in a 32-bit unsigned integer + /// Value between 0 and 32 + [Pure] //REVIEW: force inline or not? + public static int CountBits(uint value) + { + // cf https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSet64 + // PERF: this averages ~2ns/op OnMyMachine(tm) + value = value - ((value >> 1) & 0x55555555); + value = (value & 0x33333333) + ((value >> 2) & 0x33333333); + value = ((value + (value >> 4) & 0xF0F0F0F) * 0x1010101) >> (32 - 8); + return (int) value; + } + + /// Count the number of bits set to 1 in a 64-bit signed integer + /// Value between 0 and 64 + [Pure] //REVIEW: force inline or not? + public static int CountBits(long value) + { + // cf https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSet64 + // PERF: this averages ~2.5ns/op OnMyMachine(tm) + value = value - ((value >> 1) & 0x5555555555555555); + value = (value & 0x3333333333333333) + ((value >> 2) & 0x3333333333333333); + value = ((value + (value >> 4) & 0x0F0F0F0F0F0F0F0F) * 0x0101010101010101) >> (64 - 8); + return (int) value; + } + + /// Count the number of bits set to 1 in a 32-bit unsigned integer + /// Value between 0 and 64 + [Pure] //REVIEW: force inline or not? + public static int CountBits(ulong value) + { + // cf https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSet64 + // PERF: this averages ~2.5ns/op OnMyMachine(tm) + value = value - ((value >> 1) & 0x5555555555555555); + value = (value & 0x3333333333333333) + ((value >> 2) & 0x3333333333333333); + value = ((value + (value >> 4) & 0x0F0F0F0F0F0F0F0F) * 0x0101010101010101) >> (64 - 8); + return (int) value; + } + + #endregion + + #region MostSignificantBit... + + // MostSignificantBit(x) == Highest bit index (0..63) of the first bit set to 1 + // - MostSignificantBit(1) == 0 + // - MostSignificantBit(8) == 3 + // - MostSignificantBit(42) == 5 + // - MostSignificantBit(uint.MaxValue) == 31 + // Remark: if the value can be 0, the convention is to return to the word size (32 or 64) + // - MostSignificantBit(default(uint)) == 32 + // - MostSignificantBit(default(ulong)) == 64 + // MostSignificantBitNonZeroXX(x) is a no-branch variant which is undefined for x == 0 + + private static readonly int[] MultiplyDeBruijnBitPosition32 = new int[32] + { + 0, 9, 1, 10, 13, 21, 2, 29, 11, 14, 16, 18, 22, 25, 3, 30, + 8, 12, 20, 28, 15, 17, 24, 7, 19, 27, 23, 6, 26, 5, 4, 31 + }; + + private static readonly int[] MultiplyDeBruijnBitPosition64 = new int[64] + { + 63, 0, 58, 1, 59, 47, 53, 2, + 60, 39, 48, 27, 54, 33, 42, 3, + 61, 51, 37, 40, 49, 18, 28, 20, + 55, 30, 34, 11, 43, 14, 22, 4, + 62, 57, 46, 52, 38, 26, 32, 41, + 50, 36, 17, 19, 29, 10, 13, 21, + 56, 45, 25, 31, 35, 16, 9, 12, + 44, 24, 15, 8, 23, 7, 6, 5 + }; + + /// Return the position of the highest bit that is set + /// Value between 0 and 32 + /// + /// Result is 32 if is 0. + /// If the value of is known to be non-zero, then you can call directly. + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int MostSignificantBit(int v) + { + return v == 0 ? 32 : MostSignificantBitNonZero32((uint) v); + } + + /// Return the position of the highest bit that is set + /// Value between 0 and 32 + /// + /// Result is 32 if is 0. + /// If the value of is known to be non-zero, then you can call directly. + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int MostSignificantBit(uint v) + { + return v == 0 ? 32 : MostSignificantBitNonZero32(v); + } + + /// Return the position of the highest bit that is set + /// Result is unspecified if is 0. + [Pure] //REVIEW: force inline or not? + public static int MostSignificantBitNonZero32(uint v) + { + // from: http://graphics.stanford.edu/~seander/bithacks.html#IntegerLogDeBruijn + v |= v >> 1; // first round down to one less than a power of 2 + v |= v >> 2; + v |= v >> 4; + v |= v >> 8; + v |= v >> 16; + + var r = (v * 0x07C4ACDDU) >> 27; + return MultiplyDeBruijnBitPosition32[r & 31]; + } + + /// Return the position of the highest bit that is set + /// Value between 0 and 64 + /// + /// Result is 64 if is 0. + /// If the value of is known to be non-zero, then you can call directly. + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int MostSignificantBit(long v) + { + return v == 0 ? 64 : MostSignificantBitNonZero64((ulong) v); + } + + /// Return the position of the highest bit that is set + /// Value between 0 and 64 + /// + /// Result is 64 if is zero. + /// If the value of is known to be non-zero, then you can call directly. + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int MostSignificantBit(ulong v) + { + return v == 0 ? 64 : MostSignificantBitNonZero64(v); + } + + /// Return the position of the highest bit that is set + /// Result is unspecified if is 0. + [Pure] //REVIEW: force inline or not? + public static int MostSignificantBitNonZero64(ulong nonZero) + { + ulong v = nonZero; + v |= v >> 1; + v |= v >> 2; + v |= v >> 4; + v |= v >> 8; + v |= v >> 16; + v |= v >> 32; + + var r = ((v - (v >> 1)) * 0x07EDD5E59A4E28C2UL) >> 58; + return MultiplyDeBruijnBitPosition64[r & 63]; + } + + #endregion + + #region LeastSignificantBit... + + // LeastSignificantBit(x) == Smallest bit index (0..63) of the first bit set to 1 + // - LeastSignificantBit(1) == 0 + // - LeastSignificantBit(8) == 3 + // - LeastSignificantBit(42) == 2 + // - LeastSignificantBit(uint.MaxValue) = 0 + // Remark: if the value is 0, the convention is to return to the word size (32 or 64) + // - LeastSignificantBit(default(uint)) == 32 + // - LeastSignificantBit(default(ulong)) == 64 + // LeastSignificantBitNonZeroXX(x) is a no-branch variant which is undefined for x == 0 + + /// Return the position of the lowest bit that is set + /// Value between 0 and 32 + /// Result is 32 if is 0 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int LeastSignificantBit(int v) + { + return v == 0 ? 32 : LeastSignificantBitNonZero32(v); + } + + /// Return the position of the lowest bit that is set + /// Value between 0 and 32 + /// Result is 32 if is 0 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int LeastSignificantBit(uint v) + { + return v == 0 ? 32 : LeastSignificantBitNonZero32(v); + } + + /// Return the position of the lowest bit that is set + /// Result is unspecified if is 0 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int LeastSignificantBitNonZero32(long nonZero) + { + // This solution does not have any branch, but conversion to float may not be fast enough on some architecture... + //PERF: this averages 2.5ns/op OnMyMachine() + unsafe + { + //note: nonZero must be a long, because -int.MaxValue would overflow on 32-bit + var d = (float) (nonZero & -nonZero); + return (int) (((*(uint*) &d) >> 23) - 0x7f); + //note: this returns -127 if w == 0, which is "negative" + } + } + + /// Return the position of the lowest bit that is set + /// Value between 0 and 64 + /// Result is 64 if is 0 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int LeastSignificantBit(ulong v) + { + return v == 0 ? 64 : LeastSignificantBitNonZero64((long) v); + } + + /// Return the position of the lowest bit that is set + /// Value between 0 and 64 + /// Result is 64 if is 0 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int LeastSignificantBit(long v) + { + return v == 0 ? 64 : LeastSignificantBitNonZero64(v); + } + + /// Return the position of the lowest bit that is set + /// Result is unspecified if is 0 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int LeastSignificantBitNonZero64(long nonZero) + { + // This solution does not have any branch, but conversion to double may not be fast enough on some architecture... + //PERF: this averages 2.5ns/op OnMyMachine() + unsafe + { + // isolated LS1B to double + var d = (double)(nonZero & -nonZero); + // exponent is in bits 52 to 62 (11 bits) + ulong l = *((ulong*)&d); + ulong exp = (l >> 52) & ((1 << 11) - 1); + return (int)(exp - 1023); + //note: this returns -1023 if w == 0, which is "negative" + } + } + + #endregion + + #region FirstNonZeroByte... + + // FirstNonZeroByte(x) == offset of the first byte in a multi-byte word, that has at least one bit set to 1 + // - FirstNonZeroByte(0x000042) == 0 + // - FirstNonZeroByte(0x004200) == 1 + // - FirstNonZeroByte(0x004201) == 0 + // - FirstNonZeroByte(0x420000) == 2 + // - FirstNonZeroByte(0x420001) == 0 + // Remark: if the value is 0, the convention is to return to the word size in bytes (4 or 8) + // - FirstNonZeroByte(default(uint)) == 4 + // - FirstNonZeroByte(default(ulong)) == 8 + + /// Return the offset of the first non-zero byte + /// Value between 0 and 4 + /// Returns 4 if is 0 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int FirstNonZeroByte(int v) + { + return v == 0 ? 4 : (LeastSignificantBitNonZero32(v) >> 3); + } + + /// Return the offset of the first non-zero byte + /// Value between 0 and 4 + /// Returns 4 if is 0 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int FirstNonZeroByte(uint v) + { + return v == 0 ? 4 : (LeastSignificantBitNonZero32((int) v) >> 3); + } + + /// Return the offset of the first non-zero byte + /// Value between 0 and 8 + /// Returns 8 if is 0 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int FirstNonZeroByte(long v) + { + return v == 0 ? 8 : (LeastSignificantBitNonZero64(v) >> 3); + } + + /// Return the offset of the first non-zero byte + /// Value between 0 and 8 + /// Returns 8 if is 0 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int FirstNonZeroByte(ulong v) + { + return v == 0 ? 8 : (LeastSignificantBitNonZero64((long) v) >> 3); + } + + #endregion + + #region LastNonZeroByte... + + // LastNonZeroByte(x) == offset of the first byte in a multi-byte word, that has at least one bit set to 1 + // - LastNonZeroByte(0x000042) == 0 + // - LastNonZeroByte(0x004200) == 1 + // - LastNonZeroByte(0x004201) == 1 + // - LastNonZeroByte(0x420000) == 2 + // - LastNonZeroByte(0x420001) == 2 + // Remark: if the value is 0, the convention is to return to the word size in bytes (4 or 8) + // - LastNonZeroByte(default(uint)) == 4 + // - LastNonZeroByte(default(ulong)) == 8 + + /// Return the offset of the last non-zero byte + /// Returns 4 if is 0 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int LastNonZeroByte(int v) + { + return v == 0 ? 4 : (MostSignificantBitNonZero32((uint) v) >> 3); + } + + /// Return the offset of the last non-zero byte + /// Returns 4 if is 0 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int LastNonZeroByte(uint v) + { + return v == 0 ? 4 : (MostSignificantBitNonZero32(v) >> 3); + } + + /// Return the offset of the last non-zero byte + /// Returns 8 if is 0 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int LastNonZeroByte(long v) + { + return v == 0 ? 8 : (MostSignificantBitNonZero64((ulong) v) >> 3); + } + + /// Return the offset of the last non-zero byte + /// Returns 8 if is 0 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int LastNonZeroByte(ulong v) + { + return v == 0 ? 8 : (MostSignificantBitNonZero64(v) >> 3); + } + + #endregion + + #region RotL/RotR... + + /// Rotate bits to the left (ROTL) + /// RotL32(0x12345678, 4) = 0x23456781 + /// Equivalent of the 'rotl' CRT function, or the 'ROL' x86 instruction + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static uint RotL32(uint x, int n) + { + return (x << n) | (x >> (32 - n)); + } + + /// Rotate bits to the right (ROTR) + /// RotR32(0x12345678, 4) = 0x81234567 + /// Equivalent of the 'rotr' CRT function, or the 'ROR' x86 instruction + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static uint RotR32(uint x, int n) + { + return (x >> n) | (x << (32 - n)); + } + + /// Rotate bits to the left (ROTL64) + /// RotL64(0x0123456789ABCDEF, 4) = 0x123456789ABCDEF0 + /// Equivalent of the '_rotl64' CRT function, or the 'ROL' x64 instruction + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static ulong RotL64(ulong x, int n) + { + return (x << n) | (x >> (64 - n)); + } + + /// Rotate bits to the right (ROTR64) + /// RotR64(0x0123456789ABCDEF, 4) = 0xF0123456789ABCDE + /// Equivalent of the '_rotr64' CRT function, or the 'ROR' x64 instruction + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static ulong RotR64(ulong x, int n) + { + return (x >> n) | (x << (64 - n)); + } + + #endregion + + } + +} diff --git a/FoundationDB.Client/Shared/Memory/Slice.Comparer.cs b/FoundationDB.Client/Shared/Memory/Slice.Comparer.cs new file mode 100644 index 000000000..f5f367792 --- /dev/null +++ b/FoundationDB.Client/Shared/Memory/Slice.Comparer.cs @@ -0,0 +1,89 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace System +{ + using System; + using System.Collections.Generic; + using Doxense.Memory; + + public partial struct Slice + { + + /// Performs optimized equality and comparison checks on Slices + public sealed class Comparer : IComparer, IEqualityComparer, IComparer> + { + /// Default instance of the slice comparator + public static readonly Comparer Default = new Comparer(); + + private Comparer() + { } + + /// Lexicographically compare two slices and returns an indication of their relative sort order. + /// Slice compared with + /// Slice compared with + /// Returns a NEGATIVE value if is LESS THAN , ZERO if is EQUAL TO , and a POSITIVE value if is GREATER THAN . + /// + /// If both and are nil or empty, the comparison will return ZERO. If only is nil or empty, it will return a NEGATIVE value. If only is nil or empty, it will return a POSITIVE value. + /// There are no guarantees that non-zero results will be exactly -1 or +1. You should always use comparison operators or the sign of the returned value, instead of testing for equality with -1 or +1. + /// + public int Compare(Slice x, Slice y) + { + //REVIEW: cmp(Nil, Empty) returns 0 but Nil != Empty ? + if (x.Count == 0) return y.Count == 0 ? 0 : -1; + if (y.Count == 0) return +1; + return UnsafeHelpers.Compare(x.Array, x.Offset, x.Count, y.Array, y.Offset, y.Count); + } + + /// Checks if two slices are equal. + /// Slice compared with + /// Slice compared with + /// true if and have the same size and contain the same sequence of bytes; otherwise, false. + public bool Equals(Slice x, Slice y) + { + return x.Count == y.Count && UnsafeHelpers.SameBytes(x.Array, x.Offset, y.Array, y.Offset, y.Count); + } + + /// Computes the hash code of a slice + /// A slice + /// A 32-bit signed hash coded calculated from all the bytes in the slice + public int GetHashCode(Slice obj) + { + return obj.Array == null ? 0 : UnsafeHelpers.ComputeHashCode(obj.Array, obj.Offset, obj.Count); + } + + int IComparer>.Compare(KeyValuePair x, KeyValuePair y) + { + // only compare the keys + return Compare(x.Key, y.Key); + } + + } + } + +} diff --git a/FoundationDB.Client/Shared/Memory/Slice.Encoding.cs b/FoundationDB.Client/Shared/Memory/Slice.Encoding.cs new file mode 100644 index 000000000..64057c81b --- /dev/null +++ b/FoundationDB.Client/Shared/Memory/Slice.Encoding.cs @@ -0,0 +1,2506 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +//#define ENABLE_SPAN + +namespace System +{ + using System; + using System.Globalization; + using System.Runtime.CompilerServices; + using System.Text; + using Doxense.Diagnostics.Contracts; + using Doxense.Memory; + using JetBrains.Annotations; + + public partial struct Slice + { + + #region FromXXX... + + /// Decode a Base64 encoded string into a slice + [Pure] + public static Slice FromBase64(string base64String) + { + return base64String == null ? Slice.Nil : base64String.Length == 0 ? Slice.Empty : Convert.FromBase64String(base64String).AsSlice(); + } + + #region 8-bit integers... + + /// Encode an unsigned 8-bit integer into a slice + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] //used as a shortcut by a lot of other methods + public static Slice FromByte(byte value) + { + return new Slice(ByteSprite, value, 1); + } + + /// Encode an unsigned 8-bit integer into a slice + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] //used as a shortcut by a lot of other methods + public static Slice FromByte(int value) + { + if ((uint) value > 255) ThrowHelper.ThrowArgumentOutOfRangeException(nameof(value)); + return new Slice(ByteSprite, value, 1); + } + + #endregion + + #region 16-bit integers + + /// Encode a signed 16-bit integer into a variable size slice (1 or 2 bytes) in little-endian + [Pure] + public static Slice FromInt16(short value) + { + if (value >= 0) + { + if (value <= 255) + { + return Slice.FromByte((byte)value); + } + return new Slice(new byte[] { (byte)value, (byte)(value >> 8) }, 0, 2); + } + + return FromFixed16(value); + } + + /// Encode a signed 16-bit integer into a 2-byte slice in little-endian + [Pure] + public static Slice FromFixed16(short value) + { + return new Slice(new byte[2] { (byte) value, (byte) (value >> 8) }, 0, 2); + } + + /// Encode an unsigned 16-bit integer into a variable size slice (1 or 2 bytes) in little-endian + [Pure] + public static Slice FromUInt16(ushort value) + { + if (value <= 255) + { + return Slice.FromByte((byte)value); + } + else + { + return FromFixedU16(value); + } + } + + /// Encode an unsigned 16-bit integer into a 2-byte slice in little-endian + /// 0x1122 => 11 22 + [Pure] + public static Slice FromFixedU16(ushort value) //REVIEW: we could drop the 'U' here + { + return new Slice(new byte[2] { (byte) value, (byte) (value >> 8) }, 0, 2); + } + + /// Encode an unsigned 16-bit integer into a 2-byte slice in big-endian + /// 0x1122 => 22 11 + [Pure] + public static Slice FromFixedU16BE(ushort value) //REVIEW: we could drop the 'U' here + { + return new Slice(new byte[2] { (byte) (value >> 8), (byte) value }, 0, 4); + } + + /// Encode an unsigned 16-bit integer into 7-bit encoded unsigned int (aka 'Varint16') + [Pure] + public static Slice FromVarint16(ushort value) + { + if (value < 128) + { + return FromByte((byte)value); + } + else + { + var writer = new SliceWriter(3); + writer.WriteVarInt16(value); + return writer.ToSlice(); + } + } + + #endregion + + #region 32-bit integers + + /// Encode a signed 32-bit integer into a variable size slice (1 to 4 bytes) in little-endian + [Pure] + public static Slice FromInt32(int value) + { + if (value >= 0) + { + if (value <= (1 << 8) - 1) + { + return Slice.FromByte((byte)value); + } + if (value <= (1 << 16) - 1) + { + //TODO: possible micro optimization is for values like 0x100, 0x201, 0x1413 or 0x4342, where we could use 2 consecutive bytes in the ByteSprite, + return new Slice(new byte[2] { (byte)value, (byte)(value >> 8) }, 0, 2); + } + if (value <= (1 << 24) - 1) + { + return new Slice(new byte[3] { (byte)value, (byte)(value >> 8), (byte)(value >> 16) }, 0, 3); + } + } + + return FromFixed32(value); + } + + /// Encode a signed 32-bit integer into a 4-byte slice in little-endian + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromFixed32(int value) + { + return new Slice(new byte[4] { (byte) value, (byte) (value >> 8), (byte) (value >> 16), (byte) (value >> 24) }, 0, 4); + } + + /// Encode a signed 32-bit integer into a variable size slice (1 to 4 bytes) in big-endian + [Pure] + public static Slice FromInt32BE(int value) + { + if (value >= 0) + { + if (value <= (1 << 8) - 1) + { + return Slice.FromByte((byte)value); + } + if (value <= (1 << 16) - 1) + { + //TODO: possible micro optimization is for values like 0x100, 0x201, 0x1413 or 0x4342, where we could use 2 consecutive bytes in the ByteSprite, + return new Slice(new byte[2] { (byte) (value >> 8), (byte) value }, 0, 2); + } + if (value <= (1 << 24) - 1) + { + return new Slice(new byte[3] { (byte) (value >> 16), (byte) (value >> 8), (byte) value }, 0, 3); + } + } + return FromFixed32BE(value); + } + + /// Encode a signed 32-bit integer into a 4-byte slice in big-endian + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromFixed32BE(int value) + { + return new Slice(new byte[4] { (byte) (value >> 24), (byte) (value >> 16), (byte) (value >> 8), (byte) value, }, 0, 4); + } + + /// Encode an unsigned 32-bit integer into a variable size slice (1 to 4 bytes) in little-endian + [Pure] + public static Slice FromUInt32(uint value) + { + if (value <= (1 << 8) - 1) + { + return FromByte((byte) value); + } + if (value <= (1 << 16) - 1) + { + return new Slice(new byte[2] { (byte) value, (byte) (value >> 8) }, 0, 2); + } + if (value <= (1 << 24) - 1) + { + return new Slice(new byte[3] { (byte) value, (byte) (value >> 8), (byte) (value >> 16) }, 0, 3); + } + return FromFixedU32(value); + } + + /// Encode an unsigned 32-bit integer into a 4-byte slice in little-endian + /// 0x11223344 => 11 22 33 44 + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromFixedU32(uint value) //REVIEW: we could drop the 'U' here + { + return new Slice(new byte[4] { (byte) value, (byte) (value >> 8), (byte) (value >> 16), (byte) (value >> 24) }, 0, 4); + } + + /// Encode an unsigned 32-bit integer into a variable size slice (1 to 4 bytes) in big-endian + [Pure] + public static Slice FromUInt32BE(uint value) + { + if (value <= (1 << 8) - 1) + { + return FromByte((byte)value); + } + if (value <= (1 << 16) - 1) + { + return new Slice(new byte[2] { (byte) (value >> 8), (byte) value }, 0, 2); + } + if (value <= (1 << 24) - 1) + { + return new Slice(new byte[3] { (byte) (value >> 16), (byte) (value >> 8), (byte) value }, 0, 3); + } + return FromFixedU32BE(value); + } + + /// Encode an unsigned 32-bit integer into a 4-byte slice in big-endian + /// 0x11223344 => 44 33 22 11 + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromFixedU32BE(uint value) //REVIEW: we could drop the 'U' here + { + return new Slice(new byte[4] { (byte) (value >> 24), (byte) (value >> 16), (byte) (value >> 8), (byte) value }, 0, 4); + } + + /// Encode an unsigned 32-bit integer into 7-bit encoded unsigned int (aka 'Varint32') + [Pure] + public static Slice FromVarint32(uint value) + { + if (value <= 127) + { // single byte slices are cached + return FromByte((byte)value); + } + + var writer = new SliceWriter(value <= (1 << 14) - 1 ? 2 : 5); + writer.WriteVarInt32(value); + return writer.ToSlice(); + } + + #endregion + + #region 64-bit integers + + /// Encode a signed 64-bit integer into a variable size slice (1 to 8 bytes) in little-endian + [Pure] + public static Slice FromInt64(long value) + { + if (value >= 0) + { + if (value <= (1L << 32) - 1) + { + return FromInt32((int) value); + } + if (value <= (1L << 40) - 1) + { + return new Slice(new byte[5] { (byte) value, (byte) (value >> 8), (byte) (value >> 16), (byte) (value >> 24), (byte) (value >> 32) }, 0, 5); + } + if (value <= (1L << 48) - 1) + { + return new Slice(new byte[6] { (byte) value, (byte) (value >> 8), (byte) (value >> 16), (byte) (value >> 24), (byte) (value >> 32), (byte) (value >> 40) }, 0, 6); + } + if (value <= (1L << 56) - 1) + { + return new Slice(new byte[7] { (byte) value, (byte) (value >> 8), (byte) (value >> 16), (byte) (value >> 24), (byte) (value >> 32), (byte) (value >> 40), (byte) (value >> 48) }, 0, 7); + } + } + + return FromFixed64(value); + } + + /// Encode a signed 64-bit integer into a 8-byte slice in little-endian + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromFixed64(long value) + { + return new Slice(new byte[8] { (byte) value, (byte) (value >> 8), (byte) (value >> 16), (byte) (value >> 24), (byte) (value >> 32), (byte) (value >> 40), (byte) (value >> 48), (byte) (value >> 56) }, 0, 8); + } + + /// Encode a signed 64-bit integer into a variable size slice (1 to 8 bytes) in big-endian + [Pure] + public static Slice FromInt64BE(long value) + { + if (value >= 0) + { + if (value <= (1L << 32) - 1) + { + return FromInt32BE((int) value); + } + if (value <= (1L << 40) - 1) + { + return new Slice(new byte[5] { (byte) (value >> 32), (byte) (value >> 24), (byte) (value >> 16), (byte) (value >> 8), (byte) value }, 0, 5); + } + if (value <= (1L << 48) - 1) + { + return new Slice(new byte[6] { (byte) (value >> 40), (byte) (value >> 32), (byte) (value >> 24), (byte) (value >> 16), (byte) (value >> 8), (byte) value }, 0, 6); + } + if (value <= (1L << 56) - 1) + { + return new Slice(new byte[7] { (byte) (value >> 48), (byte) (value >> 40), (byte) (value >> 32), (byte) (value >> 24), (byte) (value >> 16), (byte) (value >> 8), (byte) value }, 0, 7); + } + } + + return FromFixed64BE(value); + } + + /// Encode a signed 64-bit integer into a 8-byte slice in big-endian + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromFixed64BE(long value) + { + return new Slice(new byte[8] { (byte) (value >> 56), (byte) (value >> 48), (byte) (value >> 40), (byte) (value >> 32), (byte) (value >> 24), (byte) (value >> 16), (byte) (value >> 8), (byte) value }, 0, 8); + } + + /// Encode an unsigned 64-bit integer into a variable size slice (1 to 8 bytes) in little-endian + [Pure] + public static Slice FromUInt64(ulong value) + { + if (value <= (1UL << 32) - 1) + { + return FromUInt32((uint) value); + } + if (value <= (1UL << 40) - 1) + { + return new Slice(new byte[5] { (byte) value, (byte) (value >> 8), (byte) (value >> 16), (byte) (value >> 24), (byte) (value >> 32) }, 0, 5); + } + if (value <= (1UL << 48) - 1) + { + return new Slice(new byte[6] { (byte) value, (byte) (value >> 8), (byte) (value >> 16), (byte) (value >> 24), (byte) (value >> 32), (byte) (value >> 40) }, 0, 6); + } + if (value <= (1UL << 56) - 1) + { + return new Slice(new byte[7] { (byte) value, (byte) (value >> 8), (byte) (value >> 16), (byte) (value >> 24), (byte) (value >> 32), (byte) (value >> 40), (byte) (value >> 48) }, 0, 7); + } + return FromFixedU64(value); + } + + /// Encode an unsigned 64-bit integer into a 8-byte slice in little-endian + /// 0x1122334455667788 => 11 22 33 44 55 66 77 88 + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromFixedU64(ulong value) //REVIEW: we could drop the 'U' here + { + return new Slice(new byte[8] { (byte) value, (byte) (value >> 8), (byte) (value >> 16), (byte) (value >> 24), (byte) (value >> 32), (byte) (value >> 40), (byte) (value >> 48), (byte) (value >> 56) }, 0, 8); + } + + /// Encode an unsigned 64-bit integer into a variable size slice (1 to 8 bytes) in big-endian + [Pure] + public static Slice FromUInt64BE(ulong value) + { + if (value <= (1UL << 32) - 1) + { + return FromInt32BE((int) value); + } + if (value <= (1UL << 40) - 1) + { + return new Slice(new byte[5] { (byte) (value >> 32), (byte) (value >> 24), (byte) (value >> 16), (byte) (value >> 8), (byte) value }, 0, 5); + } + if (value <= (1UL << 48) - 1) + { + return new Slice(new byte[6] { (byte)(value >> 40), (byte)(value >> 32), (byte)(value >> 24), (byte)(value >> 16), (byte)(value >> 8), (byte)value }, 0, 6); + } + if (value <= (1UL << 56) - 1) + { + return new Slice(new byte[7] { (byte) (value >> 48), (byte) (value >> 40), (byte) (value >> 32), (byte) (value >> 24), (byte) (value >> 16), (byte) (value >> 8), (byte) value }, 0, 7); + } + return FromFixedU64BE(value); + } + + /// Encode an unsigned 64-bit integer into a 8-byte slice in big-endian + /// 0x1122334455667788 => 88 77 66 55 44 33 22 11 + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromFixedU64BE(ulong value) //REVIEW: we could drop the 'U' here + { + return new Slice(new byte[8] { (byte) (value >> 56), (byte) (value >> 48), (byte) (value >> 40), (byte) (value >> 32), (byte) (value >> 24), (byte) (value >> 16), (byte) (value >> 8), (byte) value }, 0, 8); + } + + /// Encode an unsigned 64-bit integer into 7-bit encoded unsigned int (aka 'Varint64') + [Pure] + public static Slice FromVarint64(ulong value) + { + if (value <= 127) + { // single byte slices are cached + return FromByte((byte)value); + } + + SliceWriter writer; + if (value <= uint.MaxValue) + { + writer = new SliceWriter(value <= (1 << 14) - 1 ? 2 : 5); + writer.WriteVarInt32((uint) value); + } + else + { + writer = new SliceWriter(10); + writer.WriteVarInt64(value); + } + return writer.ToSlice(); + } + + #endregion + + #region 128-bit integers + + // we model 128-bit integers as two 64-bit integers (low and high) + + /// Encode a signed 128-bit integer into a 16-byte slice in little-endian + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromFixed128(long lo, long hi) + { + return new Slice( + new byte[16] + { + (byte) (lo), (byte) (lo >> 8), (byte) (lo >> 16), (byte) (lo >> 24), (byte) (lo >> 32), (byte) (lo >> 40), (byte) (lo >> 48), (byte) (lo >> 56), + (byte) (hi), (byte) (hi >> 8), (byte) (hi >> 16), (byte) (hi >> 24), (byte) (hi >> 32), (byte) (hi >> 40), (byte) (hi >> 48), (byte) (hi >> 56), + }, + 0, + 16 + ); + } + + /// Encode a signed 128-bit integer into a 16-byte slice in big-endian + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromFixed128BE(long lo, long hi) + { + return new Slice( + new byte[16] + { + (byte) (hi >> 56), (byte) (hi >> 48), (byte) (hi >> 40), (byte) (hi >> 32), (byte) (hi >> 24), (byte) (hi >> 16), (byte) (hi >> 8), (byte) (hi), + (byte) (lo >> 56), (byte) (lo >> 48), (byte) (lo >> 40), (byte) (lo >> 32), (byte) (lo >> 24), (byte) (lo >> 16), (byte) (lo >> 8), (byte) (lo), + }, + 0, + 16 + ); + } + + #endregion + + #region decimals + + /// Encode a 32-bit decimal into an 4-byte slice + [Pure] + public static Slice FromSingle(float value) + { + //TODO: may not work on BE platforms? + byte[] tmp = new byte[4]; + unsafe + { + fixed (byte* ptr = &tmp[0]) + { + *((float*)ptr) = value; + } + } + return new Slice(tmp, 0, 4); + } + + /// Encode a 32-bit decimal into an 4-byte slice (in network order) + [Pure] + public static Slice FromSingleBE(float value) + { + //TODO: may not work on BE platforms? + byte[] tmp = new byte[4]; + unsafe + { + fixed (byte* ptr = &tmp[0]) + { + *((uint*)ptr) = UnsafeHelpers.ByteSwap32(*(uint*) &value); + } + } + return new Slice(tmp, 0, 4); + } + + /// Encode a 64-bit decimal into an 8-byte slice + [Pure] + public static Slice FromDouble(double value) + { + //TODO: may not work on BE platforms? + byte[] tmp = new byte[8]; + unsafe + { + fixed (byte* ptr = &tmp[0]) + { + *((double*) ptr) = value; + } + } + return new Slice(tmp, 0, 8); + } + + /// Encode a 64-bit decimal into an 8-byte slice (in network order) + [Pure] + public static Slice FromDoubleBE(double value) + { + //TODO: may not work on BE platforms? + byte[] tmp = new byte[8]; + unsafe + { + fixed (byte* ptr = &tmp[0]) + { + *((ulong*)ptr) = UnsafeHelpers.ByteSwap64(*(ulong*) &value); + } + } + return new Slice(tmp, 0, 8); + } + + /// Encode a 128-bit decimal into an 16-byte slice + public static Slice FromDecimal(decimal value) + { + //TODO: may not work on BE platforms? + byte[] tmp = new byte[16]; + unsafe + { + fixed (byte* ptr = &tmp[0]) + { + *((decimal*) ptr) = value; + } + } + return new Slice(tmp, 0, 16); + } + + #endregion + + /// Create a 16-byte slice containing a System.Guid encoding according to RFC 4122 (Big Endian) + /// WARNING: Slice.FromGuid(guid).GetBytes() will not produce the same result as guid.ToByteArray() ! + /// If you need to produce Microsoft compatible byte arrays, use Slice.Create(guid.ToByteArray()) but then you shoud NEVER use Slice.ToGuid() to decode such a value ! + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromGuid(Guid value) + { + // UUID are stored using the RFC4122 format (Big Endian), while .NET's System.GUID use Little Endian + // => we will convert the GUID into a UUID under the hood, and hope that it gets converted back when read from the db + + return new Uuid128(value).ToSlice(); + } + + /// Create a 16-byte slice containing an RFC 4122 compliant 128-bit UUID + /// You should never call this method on a slice created from the result of calling System.Guid.ToByteArray() ! + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromUuid128(Uuid128 value) + { + // UUID should already be in the RFC 4122 ordering + return value.ToSlice(); + } + + /// Create an 8-byte slice containing an 64-bit UUID + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromUuid64(Uuid64 value) + { + return value.ToSlice(); + } + + /// Encoding used to produce UTF-8 slices + [NotNull] + internal static readonly UTF8Encoding Utf8NoBomEncoding = new UTF8Encoding(encoderShouldEmitUTF8Identifier: false, throwOnInvalidBytes: true); + + /// Dangerously create a slice containing string converted to the local ANSI code page. All non-ANSI characters may be corrupted or converted to '?', and this slice may not decode properly on a different system. + /// + /// WARNING: if you put a string that contains non-ANSI chars, it will be silently corrupted! This should only be used to store keywords or 'safe' strings, and when the decoding will only happen on the same system, or systems using the same codepage. + /// Slices encoded by this method are not guaranteed to be decoded without loss. YOU'VE BEEN WARNED! + /// + [Pure] + public static Slice FromStringAnsi([CanBeNull] string text) + { + return text == null ? Slice.Nil + : text.Length == 0 ? Slice.Empty + : new Slice(Encoding.Default.GetBytes(text)); + } + + /// Create a slice from an ASCII string, where all the characters map directory into bytes (0..255). The string will be checked before being encoded. + /// + /// This method will check each character and fail if at least one is greater than 255. + /// Slices encoded by this method are only guaranteed to roundtrip if decoded with . If the original string only contained ASCII characters (0..127) then it can also be decoded by . + /// The only difference between this method and is that the later will truncate non-ASCII characters to their lowest 8 bits, while the former will throw an exception. + /// + /// If at least one character is greater than 255. + [Pure] + public static Slice FromStringAscii([CanBeNull] string value) + { + if (value == null) return Slice.Nil; + if (value.Length == 0) return Slice.Empty; + byte[] _ = null; + unsafe + { + fixed(char* chars = value) + { + return ConvertByteStringChecked(chars, value.Length, ref _); + } + } + } + +#if ENABLE_SPAN + /// Create a slice from an ASCII string, where all the characters map directory into bytes (0..255). The string will be checked before being encoded. + /// + /// This method will check each character and fail if at least one is greater than 255. + /// Slices encoded by this method are only guaranteed to roundtrip if decoded with . If the original string only contained ASCII characters (0..127) then it can also be decoded by . + /// The only difference between this method and is that the later will truncate non-ASCII characters to their lowest 8 bits, while the former will throw an exception. + /// + /// If at least one character is greater than 255. + [Pure] + public static Slice FromStringAscii(ReadOnlySpan value) + { + if (value.Length == 0) return Slice.Empty; + byte[] _ = null; + return ConvertByteStringChecked(value, ref _); + } + + /// Create a slice from an ASCII string, where all the characters map directory into bytes (0..255). The string will be checked before being encoded. + /// + /// This method will check each character and fail if at least one is greater than 255. + /// Slices encoded by this method are only guaranteed to roundtrip if decoded with . If the original string only contained ASCII characters (0..127) then it can also be decoded by . + /// The only difference between this method and is that the later will truncate non-ASCII characters to their lowest 8 bits, while the former will throw an exception. + /// + /// If at least one character is greater than 255. + [Pure] + public static Slice FromStringAscii(ReadOnlySpan value, ref byte[] buffer) + { + if (value.Length == 0) return Slice.Empty; + return ConvertByteStringChecked(value, ref buffer); + } + + /// Create a slice from an byte string, where all the characters map directly into bytes (0..255), without performing any validation + /// + /// This method does not make any effort to detect characters above 255, which will be truncated to their lower 8 bits, introducing corruption when the string will be decoded. Please MAKE SURE to not call this with untrusted data. + /// Slices encoded by this method are ONLY compatible with UTF-8 encoding if all characters are between 0 and 127. If this is not the case, then decoding it as an UTF-8 sequence may introduce corruption. + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromByteString([CanBeNull] string value) + { + if (value == null) return Slice.Nil; + byte[] _ = null; + return FromByteString(value.AsSpan(), ref _); + } + + /// Create a slice from an byte string, where all the characters map directly into bytes (0..255), without performing any validation + /// + /// This method does not make any effort to detect characters above 255, which will be truncated to their lower 8 bits, introducing corruption when the string will be decoded. Please MAKE SURE to not call this with untrusted data. + /// Slices encoded by this method are ONLY compatible with UTF-8 encoding if all characters are between 0 and 127. If this is not the case, then decoding it as an UTF-8 sequence may introduce corruption. + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromByteString(ReadOnlySpan value) + { + byte[] _ = default; + return FromByteString(value, ref _); + } + + /// Create a slice from an byte string, where all the characters map directly into bytes (0..255), without performing any validation + /// + /// This method does not make any effort to detect characters above 255, which will be truncated to their lower 8 bits, introducing corruption when the string will be decoded. Please MAKE SURE to not call this with untrusted data. + /// Slices encoded by this method are ONLY compatible with UTF-8 encoding if all characters are between 0 and 127. If this is not the case, then decoding it as an UTF-8 sequence may introduce corruption. + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromByteString(ReadOnlySpan value, ref byte[] buffer) + { + return value.Length != 0 ? ConvertByteStringNoCheck(value, ref buffer) : Slice.Empty; + } + + [Pure] + internal static Slice ConvertByteStringChecked(ReadOnlySpan value, ref byte[] buffer) + { + int n = value.Length; + if (n == 1) + { + char c = value[0]; + if (c > 0xFF) goto InvalidChar; + if (buffer?.Length > 0) + { + buffer[0] = (byte) c; + return new Slice(buffer, 0, 1); + } + return FromByte((byte) c); + } + + var tmp = UnsafeHelpers.EnsureCapacity(ref buffer, n); + if (!TryConvertBytesStringChecked(new Span(tmp, 0, n), value)) goto InvalidChar; + return new Slice(tmp, 0, n); + InvalidChar: + throw ThrowHelper.FormatException("The specified string contains characters that cannot be safely truncated to 8 bits. If you are encoding natural text, you should use UTF-8 encoding."); + } + + [Pure] + private static bool TryConvertBytesStringChecked(Span buffer, ReadOnlySpan value) + { + int n = value.Length; + if ((uint) buffer.Length < (uint) n) return false; + unsafe + { + fixed (byte* pBytes = &MemoryMarshal.GetReference(buffer)) + fixed (char* pChars = &MemoryMarshal.GetReference(value)) + { + char* inp = pChars; + byte* outp = pBytes; + + while (n > 0) + { + char c = *inp; + if (c > 0xFF) return false; + *outp++ = (byte)(*inp++); + --n; + } + } + } + return true; + } +#else + + /// Create a slice from an byte string, where all the characters map directly into bytes (0..255), without performing any validation + /// + /// This method does not make any effort to detect characters above 255, which will be truncated to their lower 8 bits, introducing corruption when the string will be decoded. Please MAKE SURE to not call this with untrusted data. + /// Slices encoded by this method are ONLY compatible with UTF-8 encoding if all characters are between 0 and 127. If this is not the case, then decoding it as an UTF-8 sequence may introduce corruption. + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromByteString([CanBeNull] string value) + { + if (value == null) return Slice.Nil; + byte[] _ = null; + unsafe + { + fixed(char* chars = value) + { + return FromByteString(chars, value.Length, ref _); + } + } + } + + /// Create a slice from an byte string, where all the characters map directly into bytes (0..255), without performing any validation + /// + /// This method does not make any effort to detect characters above 255, which will be truncated to their lower 8 bits, introducing corruption when the string will be decoded. Please MAKE SURE to not call this with untrusted data. + /// Slices encoded by this method are ONLY compatible with UTF-8 encoding if all characters are between 0 and 127. If this is not the case, then decoding it as an UTF-8 sequence may introduce corruption. + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static unsafe Slice FromByteString(char* chars, int numChars, ref byte[] buffer) + { + return numChars != 0 ? ConvertByteStringNoCheck(chars, numChars, ref buffer) : Slice.Empty; + } + + [Pure] + internal static unsafe Slice ConvertByteStringChecked(char* value, int n, ref byte[] buffer) + { + if (n == 1) + { + char c = value[0]; + if (c > 0xFF) goto InvalidChar; + if (buffer?.Length > 0) + { + buffer[0] = (byte) c; + return new Slice(buffer, 0, 1); + } + return FromByte((byte) c); + } + + var tmp = UnsafeHelpers.EnsureCapacity(ref buffer, n); + if (!TryConvertBytesStringChecked(new Slice(tmp, 0, n), value, n)) goto InvalidChar; + return new Slice(tmp, 0, n); + InvalidChar: + throw ThrowHelper.FormatException("The specified string contains characters that cannot be safely truncated to 8 bits. If you are encoding natural text, you should use UTF-8 encoding."); + } + + [Pure] + private static unsafe bool TryConvertBytesStringChecked(Slice buffer, char* value, int n) + { + if ((uint) buffer.Count < (uint) n) return false; + unsafe + { + fixed (byte* pBytes = &buffer.DangerousGetPinnableReference()) + { + char* inp = value; + byte* outp = pBytes; + + while (n > 0) + { + char c = *inp; + if (c > 0xFF) return false; + *outp++ = (byte)(*inp++); + --n; + } + } + } + return true; + } +#endif + +#if ENABLE_SPAN + /// Create a slice containing the UTF-8 bytes of the string . + /// + /// This method is optimized for strings that usually contain only ASCII characters. + /// DO NOT call this method to encode special strings that contain binary prefixes, like "\xFF/some/system/path" or "\xFE\x01\x02\x03", because they do not map to UTF-8 directly. + /// For these case, or when you known that the string only contains ASCII only (with 100% certainty), you should use . + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromString([CanBeNull] string value) + { + //REVIEW: what if people call FromString"\xFF/some/system/path") by mistake? + // Should be special case when the string starts with \xFF (or \xFF\xFF)? What about \xFE ? + if (value == null) return default(Slice); + byte[] _ = null; + return FromString(value.AsSpan(), ref _); + } + + /// Create a slice containing the UTF-8 bytes of the string . + /// + /// This method is optimized for strings that usually contain only ASCII characters. + /// DO NOT call this method to encode special strings that contain binary prefixes, like "\xFF/some/system/path" or "\xFE\x01\x02\x03", because they do not map to UTF-8 directly. + /// For these case, or when you known that the string only contains ASCII only (with 100% certainty), you should use . + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromString(ReadOnlySpan value) + { + byte[] _ = null; + return FromString(value, ref _); + } + + /// Create a slice containing the UTF-8 bytes of the string . + /// + /// This method is optimized for strings that usually contain only ASCII characters. + /// DO NOT call this method to encode special strings that contain binary prefixes, like "\xFF/some/system/path" or "\xFE\x01\x02\x03", because they do not map to UTF-8 directly. + /// For these case, or when you known that the string only contains ASCII only (with 100% certainty), you should use . + /// + [Pure] + public static Slice FromString(ReadOnlySpan value, ref byte[] buffer) + { + if (value.Length == 0) return Empty; + if (UnsafeHelpers.IsAsciiString(value)) + { + return ConvertByteStringNoCheck(value, ref buffer); + } + + unsafe + { + fixed (char* chars = &MemoryMarshal.GetReference(value)) + { + int capa = Utf8Encoder.GetByteCount(chars, value.Length); + var tmp = UnsafeHelpers.EnsureCapacity(ref buffer, capa); + fixed (byte* ptr = &tmp[0]) + { + if (Utf8NoBomEncoding.GetBytes(chars, value.Length, ptr, capa) != capa) + { +#if DEBUG + // uhoh, on a une désynchro entre GetByteCount() et ce que l'encoding a réellement généré?? + if (System.Diagnostics.Debugger.IsAttached) System.Diagnostics.Debugger.Break(); +#endif + throw new InvalidOperationException("UTF-8 byte capacity estimation failed."); + } + return new Slice(tmp, 0, capa); + } + } + } + } + +#else + + /// Create a slice containing the UTF-8 bytes of the string . + /// + /// This method is optimized for strings that usually contain only ASCII characters. + /// DO NOT call this method to encode special strings that contain binary prefixes, like "\xFF/some/system/path" or "\xFE\x01\x02\x03", because they do not map to UTF-8 directly. + /// For these case, or when you known that the string only contains ASCII only (with 100% certainty), you should use . + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromString([CanBeNull] string value) + { + //REVIEW: what if people call FromString"\xFF/some/system/path") by mistake? + // Should be special case when the string starts with \xFF (or \xFF\xFF)? What about \xFE ? + if (value == null) return default(Slice); + byte[] _ = null; + unsafe + { + fixed(char* chars = value) + { + return FromString(chars, value.Length, ref _); + } + } + } + + /// Create a slice containing the UTF-8 bytes of the string . + /// + /// This method is optimized for strings that usually contain only ASCII characters. + /// DO NOT call this method to encode special strings that contain binary prefixes, like "\xFF/some/system/path" or "\xFE\x01\x02\x03", because they do not map to UTF-8 directly. + /// For these case, or when you known that the string only contains ASCII only (with 100% certainty), you should use . + /// + [Pure] + public static unsafe Slice FromString(char* chars, int numChars, ref byte[] buffer) + { + if (numChars == 0) return Empty; + if (UnsafeHelpers.IsAsciiString(chars, numChars)) + { + return ConvertByteStringNoCheck(chars, numChars, ref buffer); + } + + int capa = Encoding.UTF8.GetByteCount(chars, numChars); + var tmp = UnsafeHelpers.EnsureCapacity(ref buffer, capa); + fixed (byte* ptr = &tmp[0]) + { + if (Utf8NoBomEncoding.GetBytes(chars, numChars, ptr, capa) != capa) + { +#if DEBUG + // uhoh, on a une désynchro entre GetByteCount() et ce que l'encoding a réellement généré?? + if (System.Diagnostics.Debugger.IsAttached) System.Diagnostics.Debugger.Break(); +#endif + throw new InvalidOperationException("UTF-8 byte capacity estimation failed."); + } + return new Slice(tmp, 0, capa); + } + } + +#endif + + /// Create a slice containing the UTF-8 bytes of the string . + /// + /// The slice will NOT include the UTF-8 BOM. + /// This method will not try to identify ASCII-only strings: + /// - If the string provided can ONLY contain ASCII, you should use . + /// - If it is more frequent for the string to be ASCII-only than having UNICODE characters, consider using . + /// DO NOT call this method to encode special strings that contain binary prefixes, like "\xFF/some/system/path" or "\xFE\x01\x02\x03", because they do not map to UTF-8 directly. + /// For these case, or when you known that the string only contains ASCII only (with 100% certainty), you should use . + /// + [Pure] + public static Slice FromStringUtf8([CanBeNull] string value) + { + //REVIEW: what if people call FromString"\xFF/some/system/path") by mistake? + // Should be special case when the string starts with \xFF (or \xFF\xFF)? What about \xFE ? + return value == null ? Slice.Nil + : value.Length == 0 ? Slice.Empty + : new Slice(Utf8NoBomEncoding.GetBytes(value)); + } + +#if ENABLE_SPAN + /// Create a slice containing the UTF-8 bytes of subsection of the string . + /// + /// The slice will NOT include the UTF-8 BOM. + /// This method will not try to identify ASCII-only strings: + /// - If the string provided can ONLY contain ASCII, you should use . + /// - If it is more frequent for the string to be ASCII-only than having UNICODE characters, consider using . + /// DO NOT call this method to encode special strings that contain binary prefixes, like "\xFF/some/system/path" or "\xFE\x01\x02\x03", because they do not map to UTF-8 directly. + /// For these case, or when you known that the string only contains ASCII only (with 100% certainty), you should use . + /// + [Pure, ContractAnnotation("=> buffer:notnull")] + [Obsolete("Use FromStringUtf8(ReadOnlySpan, ...) instead")] + public static Slice FromStringUtf8([NotNull] string value, [Positive] int offset, [Positive] int count, ref byte[] buffer, out bool asciiOnly) + { + if (count == 0) + { + asciiOnly = true; + return Empty; + } + return FromStringUtf8(value.AsSpan(offset, count), ref buffer, out asciiOnly); + } + + /// Create a slice containing the UTF-8 bytes of subsection of the string . + /// + /// The slice will NOT include the UTF-8 BOM. + /// This method will not try to identify ASCII-only strings: + /// - If the string provided can ONLY contain ASCII, you should use . + /// - If it is more frequent for the string to be ASCII-only than having UNICODE characters, consider using . + /// DO NOT call this method to encode special strings that contain binary prefixes, like "\xFF/some/system/path" or "\xFE\x01\x02\x03", because they do not map to UTF-8 directly. + /// For these case, or when you known that the string only contains ASCII only (with 100% certainty), you should use . + /// + public static Slice FromStringUtf8(ReadOnlySpan value) + { + if (value.Length == 0) return Empty; + byte[] __ = null; + return FromStringUtf8(value, ref __, out _); + } + + /// Create a slice containing the UTF-8 bytes of subsection of the string . + /// + /// The slice will NOT include the UTF-8 BOM. + /// This method will not try to identify ASCII-only strings: + /// - If the string provided can ONLY contain ASCII, you should use . + /// - If it is more frequent for the string to be ASCII-only than having UNICODE characters, consider using . + /// DO NOT call this method to encode special strings that contain binary prefixes, like "\xFF/some/system/path" or "\xFE\x01\x02\x03", because they do not map to UTF-8 directly. + /// For these case, or when you known that the string only contains ASCII only (with 100% certainty), you should use . + /// + public static Slice FromStringUtf8(ReadOnlySpan value, ref byte[] buffer, out bool asciiOnly) + { + if (value.Length == 0) + { + asciiOnly = true; + return Empty; + } + + unsafe + { + //note: there is no direct way to GetBytes(..) from a segment of a string, without going to char pointers :( + fixed (char* inp = &MemoryMarshal.GetReference(value)) + { + int len = Utf8NoBomEncoding.GetByteCount(inp, value.Length); + Contract.Assert(len > 0); + + //TODO: we could optimize conversion if we know it is only ascii! + asciiOnly = len == value.Length; + + // write UTF-8 bytes to buffer + var tmp = UnsafeHelpers.EnsureCapacity(ref buffer, len); + fixed (byte* outp = &tmp[0]) + { + //TODO: PERF: if len == count, we know it is ASCII only and could optimize for that case? + if (len != Utf8NoBomEncoding.GetBytes(inp, value.Length, outp, len)) + { +#if DEBUG + // uhoh, y a mismatch entre GetByteCount() et l'encoding UTF-8! + if (System.Diagnostics.Debugger.IsAttached) System.Diagnostics.Debugger.Break(); +#endif + throw new InvalidOperationException("UTF-8 string size estimation failed."); + } + return new Slice(tmp, 0, len); + } + } + } + } +#else + /// Create a slice containing the UTF-8 bytes of subsection of the string . + /// + /// The slice will NOT include the UTF-8 BOM. + /// This method will not try to identify ASCII-only strings: + /// - If the string provided can ONLY contain ASCII, you should use . + /// - If it is more frequent for the string to be ASCII-only than having UNICODE characters, consider using . + /// DO NOT call this method to encode special strings that contain binary prefixes, like "\xFF/some/system/path" or "\xFE\x01\x02\x03", because they do not map to UTF-8 directly. + /// For these case, or when you known that the string only contains ASCII only (with 100% certainty), you should use . + /// + [Pure, ContractAnnotation("=> buffer:notnull")] + [Obsolete("Use FromStringUtf8(ReadOnlySpan, ...) instead")] + public static Slice FromStringUtf8([NotNull] string value, [Positive] int offset, [Positive] int count, ref byte[] buffer, out bool asciiOnly) + { + Contract.DoesNotOverflow(value, offset, count); + if (count == 0) + { + asciiOnly = true; + return Empty; + } + + unsafe + { + fixed(char* chars = value) + { + return FromStringUtf8(chars + offset, count, ref buffer, out asciiOnly); + } + } + } + + /// Create a slice containing the UTF-8 bytes of subsection of the string . + /// + /// The slice will NOT include the UTF-8 BOM. + /// This method will not try to identify ASCII-only strings: + /// - If the string provided can ONLY contain ASCII, you should use . + /// - If it is more frequent for the string to be ASCII-only than having UNICODE characters, consider using . + /// DO NOT call this method to encode special strings that contain binary prefixes, like "\xFF/some/system/path" or "\xFE\x01\x02\x03", because they do not map to UTF-8 directly. + /// For these case, or when you known that the string only contains ASCII only (with 100% certainty), you should use . + /// + public static unsafe Slice FromStringUtf8(char* chars, int numChars, ref byte[] buffer, out bool asciiOnly) + { + if (numChars == 0) + { + asciiOnly = true; + return Empty; + } + + //note: there is no direct way to GetBytes(..) from a segment of a string, without going to char pointers :( + int len = Utf8NoBomEncoding.GetByteCount(chars, numChars); + Contract.Assert(len > 0); + + //TODO: we could optimize conversion if we know it is only ascii! + asciiOnly = len == numChars; + + // write UTF-8 bytes to buffer + var tmp = UnsafeHelpers.EnsureCapacity(ref buffer, len); + fixed (byte* outp = &tmp[0]) + { + //TODO: PERF: if len == count, we know it is ASCII only and could optimize for that case? + if (len != Utf8NoBomEncoding.GetBytes(chars, numChars, outp, len)) + { +#if DEBUG + // uhoh, y a mismatch entre GetByteCount() et l'encoding UTF-8! + if (System.Diagnostics.Debugger.IsAttached) System.Diagnostics.Debugger.Break(); +#endif + throw new InvalidOperationException("UTF-8 string size estimation failed."); + } + return new Slice(tmp, 0, len); + } + } +#endif + +#if ENABLE_SPAN + /// Create a slice containing the UTF-8 bytes of the string , prefixed by the UTF-8 BOM. + /// + /// If the string is null, an empty slice is returned. + /// If the string is empty, the UTF-8 BOM is returned. + /// DO NOT call this method to encode special strings that contain binary prefixes, like "\xFF/some/system/path" or "\xFE\x01\x02\x03", because they do not map to UTF-8 directly. + /// For these case, or when you known that the string only contains ASCII only (with 100% certainty), you should use . + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromStringUtf8WithBom([CanBeNull] string value) + { + //REVIEW: what if people call FromString"\xFF/some/system/path") by mistake? + // Should be special case when the string starts with \xFF (or \xFF\xFF)? What about \xFE ? + if (value == null) return default; + byte[] _ = null; + return FromStringUtf8WithBom(value.AsSpan(), ref _); + } + + /// Create a slice containing the UTF-8 bytes of the string , prefixed by the UTF-8 BOM. + /// + /// If the string is null, an empty slice is returned. + /// If the string is empty, the UTF-8 BOM is returned. + /// DO NOT call this method to encode special strings that contain binary prefixes, like "\xFF/some/system/path" or "\xFE\x01\x02\x03", because they do not map to UTF-8 directly. + /// For these case, or when you known that the string only contains ASCII only (with 100% certainty), you should use . + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromStringUtf8WithBom(ReadOnlySpan value) + { + byte[] _ = null; + return FromStringUtf8WithBom(value, ref _); + } + + /// Create a slice containing the UTF-8 bytes of the string , prefixed by the UTF-8 BOM. + /// + /// If the string is null, an empty slice is returned. + /// If the string is empty, the UTF-8 BOM is returned. + /// DO NOT call this method to encode special strings that contain binary prefixes, like "\xFF/some/system/path" or "\xFE\x01\x02\x03", because they do not map to UTF-8 directly. + /// For these case, or when you known that the string only contains ASCII only (with 100% certainty), you should use . + /// + [Pure] + public static Slice FromStringUtf8WithBom(ReadOnlySpan value, ref byte[] buffer) + { + if (value.Length == 0) + { + //note: cannot use a singleton buffer because it could be mutated by the caller! + var tmp = UnsafeHelpers.EnsureCapacity(ref buffer, 8); + tmp[0] = 0xEF; + tmp[1] = 0xBB; + tmp[2] = 0xBF; + return new Slice(tmp, 0, 3); + } + unsafe + { + fixed (char* pchars = &MemoryMarshal.GetReference(value)) + { + int capa = checked(3 + Utf8NoBomEncoding.GetByteCount(pchars, value.Length)); + var tmp = UnsafeHelpers.EnsureCapacity(ref buffer, capa); + fixed (byte* outp = &tmp[0]) + { + outp[0] = 0xEF; + outp[1] = 0xBB; + outp[2] = 0xBF; + Utf8NoBomEncoding.GetBytes(pchars, value.Length, outp + 3, tmp.Length - 3); + } + return new Slice(tmp, 0, capa); + } + } + } + + /// Create a slice containing the UTF-8 bytes of the string , prefixed by the UTF-8 BOM. + /// + /// If the string is null, an empty slice is returned. + /// If the string is empty, the UTF-8 BOM is returned. + /// DO NOT call this method to encode special strings that contain binary prefixes, like "\xFF/some/system/path" or "\xFE\x01\x02\x03", because they do not map to UTF-8 directly. + /// For these case, or when you known that the string only contains ASCII only (with 100% certainty), you should use . + /// + [Pure] + private static Slice ConvertByteStringNoCheck(ReadOnlySpan value, ref byte[] buffer) + { + int len = value.Length; + if (len == 0) return Empty; + if (len == 1) return FromByte((byte) value[0]); + + var tmp = UnsafeHelpers.EnsureCapacity(ref buffer, len); + unsafe + { + fixed (byte* pBytes = &tmp[0]) + fixed (char* pChars = &MemoryMarshal.GetReference(value)) + { + byte* outp = pBytes; + byte* stop = pBytes + len; + char* inp = pChars; + while (outp < stop) + { + *outp++ = (byte) *inp++; + } + } + } + return new Slice(tmp, 0, len); + } +#else + + /// Create a slice containing the UTF-8 bytes of the string , prefixed by the UTF-8 BOM. + /// + /// If the string is null, an empty slice is returned. + /// If the string is empty, the UTF-8 BOM is returned. + /// DO NOT call this method to encode special strings that contain binary prefixes, like "\xFF/some/system/path" or "\xFE\x01\x02\x03", because they do not map to UTF-8 directly. + /// For these case, or when you known that the string only contains ASCII only (with 100% certainty), you should use . + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromStringUtf8WithBom([CanBeNull] string value) + { + //REVIEW: what if people call FromString"\xFF/some/system/path") by mistake? + // Should be special case when the string starts with \xFF (or \xFF\xFF)? What about \xFE ? + if (value == null) return default(Slice); + byte[] _ = null; + unsafe + { + fixed(char* chars = value) + { + return FromStringUtf8WithBom(chars, value.Length, ref _); + } + } + } + + /// Create a slice containing the UTF-8 bytes of the string , prefixed by the UTF-8 BOM. + /// + /// If the string is null, an empty slice is returned. + /// If the string is empty, the UTF-8 BOM is returned. + /// DO NOT call this method to encode special strings that contain binary prefixes, like "\xFF/some/system/path" or "\xFE\x01\x02\x03", because they do not map to UTF-8 directly. + /// For these case, or when you known that the string only contains ASCII only (with 100% certainty), you should use . + /// + [Pure] + public static unsafe Slice FromStringUtf8WithBom(char* chars, int numChars, ref byte[] buffer) + { + if (numChars == 0) + { + //note: cannot use a singleton buffer because it could be mutated by the caller! + var tmp = UnsafeHelpers.EnsureCapacity(ref buffer, 8); + tmp[0] = 0xEF; + tmp[1] = 0xBB; + tmp[2] = 0xBF; + return new Slice(tmp, 0, 3); + } + else + { + int capa = checked(3 + Utf8NoBomEncoding.GetByteCount(chars, numChars)); + var tmp = UnsafeHelpers.EnsureCapacity(ref buffer, capa); + fixed (byte* outp = &tmp[0]) + { + outp[0] = 0xEF; + outp[1] = 0xBB; + outp[2] = 0xBF; + Utf8NoBomEncoding.GetBytes(chars, numChars, outp + 3, tmp.Length - 3); + } + return new Slice(tmp, 0, capa); + } + } + + /// Create a slice containing the UTF-8 bytes of the string , prefixed by the UTF-8 BOM. + /// + /// If the string is null, an empty slice is returned. + /// If the string is empty, the UTF-8 BOM is returned. + /// DO NOT call this method to encode special strings that contain binary prefixes, like "\xFF/some/system/path" or "\xFE\x01\x02\x03", because they do not map to UTF-8 directly. + /// For these case, or when you known that the string only contains ASCII only (with 100% certainty), you should use . + /// + [Pure] + private static unsafe Slice ConvertByteStringNoCheck(char* chars, int numChars, ref byte[] buffer) + { + if (numChars == 0) return Empty; + if (numChars == 1) return FromByte((byte) chars[0]); + + var tmp = UnsafeHelpers.EnsureCapacity(ref buffer, numChars); + unsafe + { + fixed (byte* pBytes = &tmp[0]) + { + byte* outp = pBytes; + byte* stop = pBytes + numChars; + char* inp = chars; + while (outp < stop) + { + *outp++ = (byte) *inp++; + } + } + } + return new Slice(tmp, 0, numChars); + } +#endif + + /// Create a slice that holds the UTF-8 encoded representation of + /// + /// The returned slice is only guaranteed to hold 1 byte for ASCII chars (0..127). For non-ASCII chars, the size can be from 1 to 6 bytes. + /// If you need to use ASCII chars, you should use Slice.FromByte() instead + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice FromChar(char value) + { + if (value < 128) + { // ASCII + return FromByte((byte)value); + } + + byte[] _ = null; + return FromChar(value, ref _); + } + + /// Create a slice that holds the UTF-8 encoded representation of + /// The returned slice is only guaranteed to hold 1 byte for ASCII chars (0..127). For non-ASCII chars, the size can be from 1 to 6 bytes. + /// If you need to use ASCII chars, you should use Slice.FromByte() instead + [Pure] + public static Slice FromChar(char value, ref byte[] buffer) + { + if (value < 128) + { // ASCII + return Slice.FromByte((byte)value); + } + + // note: Encoding.UTF8.GetMaxByteCount(1) returns 6, but allocate 8 to stay aligned + var tmp = UnsafeHelpers.EnsureCapacity(ref buffer, 8); + unsafe + { + fixed (byte* ptr = &tmp[0]) + { + int n = Utf8NoBomEncoding.GetBytes(&value, 1, ptr, tmp.Length); + return n == 1 ? FromByte(tmp[0]) : new Slice(tmp, 0, n); + } + } + } + + /// Convert an hexadecimal digit (0-9A-Fa-f) into the corresponding decimal value + /// Hexadecimal digit (case insensitive) + /// Decimal value between 0 and 15, or an exception + [Pure] + private static int NibbleToDecimal(char c) + { + int x = c - 48; + if (x < 10) return x; + if (x >= 17 && x <= 42) return x - 7; + if (x >= 49 && x <= 74) return x - 39; + return ThrowInputNotValidHexadecimalDigit(); + } + + private static int ThrowInputNotValidHexadecimalDigit() + { + throw FailInputNotValidHexadecimalDigit(); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + private static FormatException FailInputNotValidHexadecimalDigit() + { + return ThrowHelper.FormatException("Input is not a valid hexadecimal digit"); + } + + /// Convert an hexadecimal encoded string ("1234AA7F") into a slice + /// String contains a sequence of pairs of hexadecimal digits with no separating spaces. + /// Slice containing the decoded byte array, or an exeception if the string is empty or has an odd length + [Pure] + public static Slice FromHexa([CanBeNull] string hexaString) + { + if (string.IsNullOrEmpty(hexaString)) return hexaString == null ? Slice.Nil : Slice.Empty; + + if (hexaString.IndexOf(' ') > 0) + { // remove spaces + hexaString = hexaString.Replace(" ", ""); + } + + if ((hexaString.Length & 1) != 0) throw new ArgumentException("Hexadecimal string must be of even length", nameof(hexaString)); + + var buffer = new byte[hexaString.Length >> 1]; + for (int i = 0; i < hexaString.Length; i += 2) + { + buffer[i >> 1] = (byte)((NibbleToDecimal(hexaString[i]) << 4) | NibbleToDecimal(hexaString[i + 1])); + } + return new Slice(buffer); + } + + #endregion + + #region ToXXX + + /// Stringify a slice containing characters in the operating system's current ANSI codepage + /// Decoded string, or null if the slice is + /// + /// Calling this method on a slice that is not ANSI, or was generated with different codepage than the current process, will return a corrupted string! + /// This method should ONLY be used to interop with the Win32 API or unamanged libraries that require the ANSI codepage! + /// You SHOULD *NOT* use this to expose data to other systems or locale (via sockets, files, ...) + /// If you are decoding natural text, you should probably change the encoding at the source to be UTF-8! + /// If you are decoding identifiers or keywords that are known to be ASCII only, you should use instead (safe). + /// If these identifiers can contain 'special' bytes (like \xFF or \xFE), you should use instead (unsafe). + /// + [Pure, CanBeNull] + public string ToStringAnsi() + { + if (this.Count == 0) return this.Array != null ? String.Empty : default(string); + //note: Encoding.GetString() will do the bound checking for us + return Encoding.Default.GetString(this.Array, this.Offset, this.Count); + } + + /// Stringify a slice containing 7-bit ASCII characters only + /// Decoded string, or null if the slice is null + /// + /// This method should ONLY be used to decoded data that is GUARANTEED to be in the range 0..127. + /// This method will THROW if any byte in the slice has bit 7 set to 1 (ie: >= 0x80) + /// If you are decoding identifiers or keywords with 'special' bytes (like \xFF or \xFE), you should use instead. + /// If you are decoding natural text, or text from unknown origin, you should use or instead. + /// If you are attempting to decode a string obtain from a Win32 or unamanged library call, you should use instead. + /// + [Pure, CanBeNull] + public string ToStringAscii() + { + if (this.Count == 0) + { + return this.Array != null ? String.Empty : default(string); + } + if (UnsafeHelpers.IsAsciiBytes(this.Array, this.Offset, this.Count)) + { + return UnsafeHelpers.ConvertToByteString(this.Array, this.Offset, this.Count); + } + throw new DecoderFallbackException("The slice contains at least one non-ASCII character"); + } + + /// Stringify a slice containing only ASCII chars + /// ASCII string, or null if the slice is null + [Pure, CanBeNull] + public string ToByteString() //REVIEW: rename to ToStringSOMETHING(): ToStringByte()? ToStringRaw()? + { + return this.Count == 0 + ? (this.Array != null ? String.Empty : default(string)) + : UnsafeHelpers.ConvertToByteString(this.Array, this.Offset, this.Count); + } + + + + /// Stringify a slice containing either 7-bit ASCII, or UTF-8 characters + /// Decoded string, or null if the slice is null. The encoding will be automatically detected + /// + /// This should only be used for slices produced by any of the , , , or methods. + /// This is NOT compatible with slices produced by or encoded with any specific encoding or code page. + /// This method will NOT automatically remove the UTF-8 BOM if present (use if you need this) + /// + [Pure, CanBeNull] + public string ToUnicode() //REVIEW: rename this to ToStringUnicode() ? + { + var array = this.Array; + int count = this.Count; + int offset = this.Offset; + return count == 0 ? (array != null ? String.Empty : default(string)) + : UnsafeHelpers.IsAsciiBytes(array, offset, count) ? UnsafeHelpers.ConvertToByteString(array, offset, count) + : Utf8NoBomEncoding.GetString(array, offset, count); + } + + [Pure] + private static bool HasUtf8Bom([NotNull] byte[] array, int offset, int count) + { + return count >= 3 + && (uint) (offset + count) <= (uint) array.Length + && array[offset + 0] == 0xEF + && array[offset + 1] == 0xBB + && array[offset + 2] == 0xBF; + } + + /// Decode a slice that is known to contain an UTF-8 encoded string with an optional UTF-8 BOM + /// Decoded string, or null if the slice is null + /// If the slice contains one or more invalid UTF-8 sequences + /// + /// This method will THROW if the slice does not contain valid UTF-8 sequences. + /// This method will remove any UTF-8 BOM if present. If you need to keep the BOM as the first character of the string, use + /// + [Pure, CanBeNull] + public string ToStringUtf8() + { + int count = this.Count; + var array = this.Array; + if (count == 0) return array != null ? String.Empty : default(string); + + // detect BOM + int offset = this.Offset; + if (HasUtf8Bom(array, offset, count)) + { // skip it! + offset += 3; + count -= 3; + if (count == 0) return String.Empty; + } + return Slice.Utf8NoBomEncoding.GetString(array, offset, count); + } + + /// Converts a slice using Base64 encoding + [Pure, CanBeNull] + public string ToBase64() + { + if (this.Count == 0) return this.Array != null ? String.Empty : default(string); + //note: Convert.ToBase64String() will do the bound checking for us + return Convert.ToBase64String(this.Array, this.Offset, this.Count); + } + + /// Converts a slice into a string with each byte encoded into hexadecimal (lowercase) + /// If true, produces lowercase hexadecimal (a-f); otherwise, produces uppercase hexadecimal (A-F) + /// "0123456789abcdef" + [Pure, NotNull] + public string ToHexaString(bool lower = false) + { + return FormatHexaString(this.Array, this.Offset, this.Count, '\0', lower); + } + + /// Converts a slice into a string with each byte encoded into hexadecimal (uppercase) separated by a char + /// Character used to separate the hexadecimal pairs (ex: ' ') + /// If true, produces lowercase hexadecimal (a-f); otherwise, produces uppercase hexadecimal (A-F) + /// "01 23 45 67 89 ab cd ef" + [Pure, NotNull] + public string ToHexaString(char sep, bool lower = false) + { + return FormatHexaString(this.Array, this.Offset, this.Count, sep, lower); + } + + [Pure, NotNull] + internal static string FormatHexaString(byte[] buffer, int offset, int count, char sep, bool lower) + { + if (count == 0) return String.Empty; + UnsafeHelpers.EnsureBufferIsValidNotNull(buffer, offset, count); + + var sb = new StringBuilder(count * (sep == '\0' ? 2 : 3)); + int letters = lower ? 87 : 55; + unsafe + { + fixed (byte* ptr = &buffer[offset]) + { + byte* inp = ptr; + byte* stop = ptr + count; + while (inp < stop) + { + if ((sep != '\0') & (sb.Length > 0)) sb.Append(sep); + byte b = *inp++; + int h = b >> 4; + int l = b & 0xF; + h += h < 10 ? 48 : letters; + l += l < 10 ? 48 : letters; + sb.Append((char) h).Append((char) l); + } + } + } + + return sb.ToString(); + } + + [NotNull] + private static StringBuilder EscapeString(StringBuilder sb, [NotNull] byte[] buffer, int offset, int count, [NotNull] Encoding encoding) + { + if (sb == null) sb = new StringBuilder(count + 16); + foreach (var c in encoding.GetChars(buffer, offset, count)) + { + if ((c >= ' ' && c <= '~') || (c >= 880 && c <= 2047) || (c >= 12352 && c <= 12591)) + sb.Append(c); + else if (c == 0) + sb.Append(@"\0"); + else if (c == '\n') + sb.Append(@"\n"); + else if (c == '\r') + sb.Append(@"\r"); + else if (c == '\t') + sb.Append(@"\t"); + else if (c > 127) + sb.Append(@"\u").Append(((int)c).ToString("x4", CultureInfo.InvariantCulture)); + else // pas clean! + sb.Append(@"\x").Append(((int)c).ToString("x2", CultureInfo.InvariantCulture)); + } + return sb; + } + + /// Helper method that dumps the slice as a string (if it contains only printable ascii chars) or an hex array if it contains non printable chars. It should only be used for logging and troubleshooting ! + /// Returns either "'abc'", "<00 42 7F>", or "{ ...JSON... }". Returns "''" for Slice.Empty, and "" for + [Pure, NotNull] + public string PrettyPrint() + { + if (this.Count == 0) return this.Array != null ? "''" : String.Empty; + return PrettyPrint(this.Array, this.Offset, this.Count, 1024); //REVIEW: constant for max size! + } + + /// Helper method that dumps the slice as a string (if it contains only printable ascii chars) or an hex array if it contains non printable chars. It should only be used for logging and troubleshooting ! + /// Truncate the slice if it exceeds this size + /// Returns either "'abc'", "<00 42 7F>", or "{ ...JSON... }". Returns "''" for Slice.Empty, and "" for + [Pure, NotNull] + public string PrettyPrint(int maxLen) + { + if (this.Count == 0) return this.Array != null ? "''" : String.Empty; + return PrettyPrint(this.Array, this.Offset, this.Count, maxLen); + } + + [Pure, NotNull] + internal static string PrettyPrint([NotNull] byte[] buffer, int offset, int count, int maxLen) + { + if (count == 0) return "''"; + + // look for UTF-8 BOM + if (count >= 3 && buffer[offset] == 0xEF && buffer[offset + 1] == 0xBB && buffer[offset + 2] == 0xBF) + { // this is supposed to be an UTF-8 string + return EscapeString(new StringBuilder(count).Append('\''), buffer, offset + 3, Math.Min(count - 3, maxLen), Slice.Utf8NoBomEncoding).Append('\'').ToString(); + } + + if (count >= 2) + { + // look for JSON objets or arrays + if ((buffer[offset] == '{' && buffer[offset + count - 1] == '}') || (buffer[offset] == '[' && buffer[offset + count - 1] == ']')) + { + try + { + if (count <= maxLen) + { + return EscapeString(new StringBuilder(count + 16), buffer, offset, count, Slice.Utf8NoBomEncoding).ToString(); + } + else + { + return + EscapeString(new StringBuilder(count + 16), buffer, offset, maxLen, Slice.Utf8NoBomEncoding) + .Append("[\u2026]") + .Append(buffer[offset + count - 1]) + .ToString(); + } + } + catch (System.Text.DecoderFallbackException) + { + // sometimes, binary data "looks" like valid JSON but is not, so we just ignore it (even if we may have done a bunch of work for nothing) + } + } + } + + // do a first path on the slice to look for binary of possible text + bool mustEscape = false; + int n = count; + int p = offset; + while (n-- > 0) + { + byte b = buffer[p++]; + if (b >= 32 && b < 127) continue; + + // we accept via escaping the following special chars: CR, LF, TAB + if (b == 0 || b == 10 || b == 13 || b == 9) + { + mustEscape = true; + continue; + } + + //TODO: are there any chars above 128 that could be accepted ? + + // this looks like binary + //return "<" + FormatHexaString(buffer, offset, count, ' ', false) + ">"; + return Slice.Dump(new Slice(buffer, offset, count), maxLen); + } + + if (!mustEscape) + { // only printable chars found + if (count <= maxLen) + { + return "'" + Encoding.ASCII.GetString(buffer, offset, count) + "'"; + } + else + { + return "'" + Encoding.ASCII.GetString(buffer, offset, maxLen) + "[\u2026]'"; // Unicode for '...' + } + } + // some escaping required + if (count <= maxLen) + { + return EscapeString(new StringBuilder(count + 2).Append('\''), buffer, offset, count, Slice.Utf8NoBomEncoding).Append('\'').ToString(); + } + else + { + return EscapeString(new StringBuilder(count + 2).Append('\''), buffer, offset, maxLen, Slice.Utf8NoBomEncoding).Append("[\u2026]'").ToString(); + } + } + + /// Converts a slice into a byte + /// Value of the first and only byte of the slice, or 0 if the slice is null or empty. + /// If the slice has more than one byte + [Pure] + public byte ToByte() + { + switch (this.Count) + { + case 0: return 0; + case 1: return this.Array[this.Offset]; + default: + if (this.Count < 0) throw UnsafeHelpers.Errors.SliceCountNotNeg(); + return UnsafeHelpers.Errors.ThrowSliceTooLargeForConversion(1); + } + } + + /// Converts a slice into a signed byte (-128..+127) + /// Value of the first and only byte of the slice, or 0 if the slice is null or empty. + /// If the slice has more than one byte + [Pure] + public sbyte ToSByte() + { + switch (this.Count) + { + case 0: return 0; + case 1: return (sbyte)this.Array[this.Offset]; + default: + if (this.Count < 0) throw UnsafeHelpers.Errors.SliceCountNotNeg(); + return UnsafeHelpers.Errors.ThrowSliceTooLargeForConversion(1); + } + } + + /// Converts a slice into a boolean. + /// False if the slice is empty, or is equal to the byte 0; otherwise, true. + [Pure] + public bool ToBool() + { + EnsureSliceIsValid(); + // Anything appart from nil/empty, or the byte 0 itself is considered truthy. + return this.Count > 1 || (this.Count == 1 && this.Array[this.Offset] != 0); + //TODO: consider checking if the slice consist of only zeroes ? (ex: Slice.FromFixed32(0) could be considered falsy ...) + } + + #region 16 bits... + + /// Converts a slice into a little-endian encoded, signed 16-bit integer. + /// 0 of the slice is null or empty, a signed integer, or an error if the slice has more than 2 bytes + /// If there are more than 2 bytes in the slice + [Pure] + public short ToInt16() + { + switch (this.Count) + { + case 0: return 0; + case 1: return this.Array[this.Offset]; + case 2: return (short)(this.Array[this.Offset] | (this.Array[this.Offset + 1] << 8)); + default: + if (this.Count < 0) throw UnsafeHelpers.Errors.SliceCountNotNeg(); + return UnsafeHelpers.Errors.ThrowSliceTooLargeForConversion(2); + } + } + + /// Converts a slice into a big-endian encoded, signed 16-bit integer. + /// 0 of the slice is null or empty, a signed integer, or an error if the slice has more than 2 bytes + /// If there are more than 2 bytes in the slice + [Pure] + public short ToInt16BE() + { + EnsureSliceIsValid(); + switch (this.Count) + { + case 0: return 0; + case 1: return this.Array[this.Offset]; + case 2: return (short)(this.Array[this.Offset + 1] | (this.Array[this.Offset] << 8)); + default: + if (this.Count < 0) throw UnsafeHelpers.Errors.SliceCountNotNeg(); + return UnsafeHelpers.Errors.ThrowSliceTooLargeForConversion(2); + + } + } + + /// Converts a slice into a little-endian encoded, unsigned 16-bit integer. + /// 0 of the slice is null or empty, an unsigned integer, or an error if the slice has more than 2 bytes + /// If there are more than 2 bytes in the slice + [Pure] + public ushort ToUInt16() + { + EnsureSliceIsValid(); + switch (this.Count) + { + case 0: return 0; + case 1: return this.Array[this.Offset]; + case 2: return (ushort)(this.Array[this.Offset] | (this.Array[this.Offset + 1] << 8)); + default: + if (this.Count < 0) throw UnsafeHelpers.Errors.SliceCountNotNeg(); + return UnsafeHelpers.Errors.ThrowSliceTooLargeForConversion(2); + } + } + + /// Converts a slice into a little-endian encoded, unsigned 16-bit integer. + /// 0 of the slice is null or empty, an unsigned integer, or an error if the slice has more than 2 bytes + /// If there are more than 2 bytes in the slice + [Pure] + public ushort ToUInt16BE() + { + EnsureSliceIsValid(); + switch (this.Count) + { + case 0: return 0; + case 1: return this.Array[this.Offset]; + case 2: return (ushort)(this.Array[this.Offset + 1] | (this.Array[this.Offset] << 8)); + default: + if (this.Count < 0) throw UnsafeHelpers.Errors.SliceCountNotNeg(); + return UnsafeHelpers.Errors.ThrowSliceTooLargeForConversion(2); + + } + } + + /// Read a variable-length, little-endian encoded, unsigned integer from a specific location in the slice + /// Relative offset of the first byte + /// Number of bytes to read (up to 2) + /// Decoded unsigned short. + /// If is less than zero, or more than 2. + [Pure] + public ushort ReadUInt16(int offset, int bytes) + { + if ((uint) bytes > 2) goto fail; + + var buffer = this.Array; + int p = UnsafeMapToOffset(offset); + switch (bytes) + { + case 0: return 0; + case 1: return buffer[p]; + default: return (ushort)(buffer[p] | (buffer[p + 1] << 8)); + } + fail: + throw new ArgumentOutOfRangeException(nameof(bytes)); + } + + /// Read a variable-length, big-endian encoded, unsigned integer from a specific location in the slice + /// Relative offset of the first byte + /// Number of bytes to read (up to 2) + /// Decoded unsigned short. + /// If is less than zero, or more than 2. + [Pure] + public ushort ReadUInt16BE(int offset, int bytes) + { + if ((uint) bytes > 2) goto fail; + + var buffer = this.Array; + int p = UnsafeMapToOffset(offset); + switch (bytes) + { + case 0: return 0; + case 1: return buffer[p]; + default: return (ushort)(buffer[p + 1] | (buffer[p] << 8)); + } + fail: + throw new ArgumentOutOfRangeException(nameof(bytes)); + } + + #endregion + + #region 24 bits... + + //note: all 'Int24' and 'UInt24' are represented in memory as Int32/UInt32 using only the lowest 24 bits (upper 8 bits will be IGNORED) + //note: 'FF FF' is equivalent to '00 FF FF', so is considered to be positive (= 65535) + + /// Converts a slice into a little-endian encoded, signed 24-bit integer. + /// 0 of the slice is null or empty, a signed integer, or an error if the slice has more than 3 bytes + /// If there are more than 3 bytes in the slice + [Pure] + public int ToInt24() + { + EnsureSliceIsValid(); + int count = this.Count; + if (count == 0) return 0; + unsafe + { + fixed (byte* ptr = &DangerousGetPinnableReference()) + { + switch (count) + { + case 1: return *ptr; + case 2: return UnsafeHelpers.LoadUInt16LE(ptr); // cannot be negative + case 3: return UnsafeHelpers.LoadInt24LE(ptr); + } + } + } + if (count < 0) UnsafeHelpers.Errors.ThrowSliceCountNotNeg(); + return UnsafeHelpers.Errors.ThrowSliceTooLargeForConversion(3); + } + + /// Converts a slice into a big-endian encoded, signed 24-bit integer. + /// 0 of the slice is null or empty, a signed integer, or an error if the slice has more than 3 bytes + /// If there are more than 3 bytes in the slice + [Pure] + public int ToInt24BE() + { + EnsureSliceIsValid(); + int count = this.Count; + if (count == 0) return 0; + unsafe + { + fixed (byte* ptr = &DangerousGetPinnableReference()) + { + switch (count) + { + case 1: return *ptr; + case 2: return UnsafeHelpers.LoadUInt16BE(ptr); + case 3: return UnsafeHelpers.LoadInt24BE(ptr); + } + } + } + if (count < 0) UnsafeHelpers.Errors.ThrowSliceCountNotNeg(); + return UnsafeHelpers.Errors.ThrowSliceTooLargeForConversion(3); + } + + /// Converts a slice into a little-endian encoded, unsigned 24-bit integer. + /// 0 of the slice is null or empty, an unsigned integer, or an error if the slice has more than 3 bytes + /// If there are more than 3 bytes in the slice + [Pure] + public uint ToUInt24() + { + EnsureSliceIsValid(); + int count = this.Count; + if (count == 0) return 0; + unsafe + { + fixed (byte* ptr = &DangerousGetPinnableReference()) + { + switch (count) + { + case 1: return *ptr; + case 2: return UnsafeHelpers.LoadUInt16LE(ptr); + case 3: return UnsafeHelpers.LoadUInt24LE(ptr); + } + } + } + if (count < 0) UnsafeHelpers.Errors.ThrowSliceCountNotNeg(); + return UnsafeHelpers.Errors.ThrowSliceTooLargeForConversion(3); + } + + /// Converts a slice into a little-endian encoded, unsigned 24-bit integer. + /// 0 of the slice is null or empty, an unsigned integer, or an error if the slice has more than 3 bytes + /// If there are more than 3 bytes in the slice + [Pure] + public uint ToUInt24BE() + { + EnsureSliceIsValid(); + int count = this.Count; + if (count == 0) return 0; + unsafe + { + fixed (byte* ptr = &DangerousGetPinnableReference()) + { + switch (count) + { + case 1: return *ptr; + case 2: return UnsafeHelpers.LoadUInt16BE(ptr); + case 3: return UnsafeHelpers.LoadUInt24BE(ptr); + } + } + } + if (count < 0) UnsafeHelpers.Errors.ThrowSliceCountNotNeg(); + return UnsafeHelpers.Errors.ThrowSliceTooLargeForConversion(3); + } + + /// Read a variable-length, little-endian encoded, unsigned integer from a specific location in the slice + /// Relative offset of the first byte + /// Number of bytes to read (up to 2) + /// Decoded unsigned short. + /// If is less than zero, or more than 3. + [Pure] + public uint ReadUInt24(int offset, int bytes) + { + if ((uint) bytes > 3) throw ThrowHelper.ArgumentOutOfRangeException(nameof(bytes)); + + var buffer = this.Array; + int p = UnsafeMapToOffset(offset); + switch (bytes) + { + case 0: return 0; + case 1: return buffer[p]; + case 2: return (uint)(buffer[p] | (buffer[p + 1] << 8)); + default: return (uint)(buffer[p] | (buffer[p + 1] << 8) | (buffer[p + 2] << 16)); + } + } + + /// Read a variable-length, big-endian encoded, unsigned integer from a specific location in the slice + /// Relative offset of the first byte + /// Number of bytes to read (up to 2) + /// Decoded unsigned short. + /// If is less than zero, or more than 3. + [Pure] + public ushort ReadUInt24BE(int offset, int bytes) + { + if ((uint) bytes > 3) throw ThrowHelper.ArgumentOutOfRangeException(nameof(bytes)); + + var buffer = this.Array; + int p = UnsafeMapToOffset(offset); + switch (bytes) + { + case 0: return 0; + case 1: return buffer[p]; + case 2: return (ushort)(buffer[p + 1] | (buffer[p] << 8)); + default: return (ushort)(buffer[p + 2] | (buffer[p + 1] << 8) | (buffer[p] << 16)); + } + } + + #endregion + + #region 32 bits... + + /// Converts a slice into a little-endian encoded, signed 32-bit integer. + /// 0 of the slice is null or empty, a signed integer, or an error if the slice has more than 4 bytes + /// If there are more than 4 bytes in the slice + [Pure] + public int ToInt32() + { + // note: we ensure that offset is not negative by doing a cast to uint + uint off = checked((uint)this.Offset); + var arr = this.Array; // if null, whill throw later with a nullref + switch (this.Count) // if negative, will throw in the default case below + { + case 0: return 0; + case 1: return arr[off]; + case 2: return arr[off] | (arr[off + 1] << 8); + case 3: return arr[off] | (arr[off + 1] << 8) | (arr[off + 2] << 16); + case 4: return arr[off] | (arr[off + 1] << 8) | (arr[off + 2] << 16) | (arr[off + 3] << 24); + default: + { + if (this.Count < 0) UnsafeHelpers.Errors.ThrowSliceCountNotNeg(); + return UnsafeHelpers.Errors.ThrowSliceTooLargeForConversion(4); + } + } + } + + /// Converts a slice into a big-endian encoded, signed 32-bit integer. + /// 0 of the slice is null or empty, a signed integer, or an error if the slice has more than 4 bytes + /// If there are more than 4 bytes in the slice + [Pure] + public int ToInt32BE() + { + // note: we ensure that offset is not negative by doing a cast to uint + uint off = checked((uint)this.Offset); + var arr = this.Array; // if null, whill throw later with a nullref + switch (this.Count) // if negative, will throw in the default case below + { + case 0: return 0; + case 1: return arr[off]; + case 2: return (arr[off] << 8) | arr[off + 1]; + case 3: return (arr[off] << 16) | (arr[off + 1] << 8) | arr[off + 2]; + case 4: return (arr[off] << 24) | (arr[off + 1] << 16) | (arr[off + 2] << 8) | arr[off + 3]; + default: + { + if (this.Count < 0) UnsafeHelpers.Errors.ThrowSliceCountNotNeg(); + return UnsafeHelpers.Errors.ThrowSliceTooLargeForConversion(4); + } + } + } + + /// Converts a slice into a little-endian encoded, unsigned 32-bit integer. + /// 0 of the slice is null or empty, an unsigned integer, or an error if the slice has more than 4 bytes + /// If there are more than 4 bytes in the slice + [Pure] + public uint ToUInt32() + { + // note: we ensure that offset is not negative by doing a cast to uint + uint off = checked((uint)this.Offset); + var arr = this.Array; // if null, whill throw later with a nullref + switch (this.Count) // if negative, will throw in the default case below + { + case 0: return 0; + case 1: return arr[off]; + case 2: return (uint)(arr[off] | (arr[off + 1] << 8)); + case 3: return (uint)(arr[off] | (arr[off + 1] << 8) | (arr[off + 2] << 16)); + case 4: return (uint)(arr[off] | (arr[off + 1] << 8) | (arr[off + 2] << 16) | (arr[off + 3] << 24)); + default: + { + if (this.Count < 0) UnsafeHelpers.Errors.ThrowSliceCountNotNeg(); + return UnsafeHelpers.Errors.ThrowSliceTooLargeForConversion(4); + } + } + } + + /// Converts a slice into a big-endian encoded, unsigned 32-bit integer. + /// 0 of the slice is null or empty, an unsigned integer, or an error if the slice has more than 4 bytes + /// If there are more than 4 bytes in the slice + [Pure] + public uint ToUInt32BE() + { + // note: we ensure that offset is not negative by doing a cast to uint + uint off = checked((uint)this.Offset); + var arr = this.Array; // if null, whill throw later with a nullref + switch (this.Count) // if negative, will throw in the default case below + { + case 0: return 0; + case 1: return arr[off]; + case 2: return (uint)((arr[off] << 8) | arr[off + 1]); + case 3: return (uint)((arr[off] << 16) | (arr[off + 1] << 8) | arr[off + 2]); + case 4: return (uint)((arr[off] << 24) | (arr[off + 1] << 16) | (arr[off + 2] << 8) | arr[off + 3]); + default: + { + if (this.Count < 0) UnsafeHelpers.Errors.ThrowSliceCountNotNeg(); + return UnsafeHelpers.Errors.ThrowSliceTooLargeForConversion(4); + } + } + } + + /// Read a variable-length, little-endian encoded, unsigned integer from a specific location in the slice + /// Relative offset of the first byte + /// Number of bytes to read (up to 4) + /// Decoded unsigned integer. + /// If is less than zero, or more than 4. + [Pure] + public uint ReadUInt32(int offset, int bytes) + { + if (bytes == 0) return 0; + if ((uint) bytes > 4) throw ThrowHelper.ArgumentOutOfRangeException(nameof(bytes)); + + var buffer = this.Array; + int p = UnsafeMapToOffset(offset) + bytes - 1; + + uint value = buffer[p--]; + while (--bytes > 0) + { + value = (value << 8) | buffer[p--]; + } + return value; + } + + /// Read a variable-length, big-endian encoded, unsigned integer from a specific location in the slice + /// Relative offset of the first byte + /// Number of bytes to read (up to 4) + /// Decoded unsigned integer. + /// If is less than zero, or more than 4. + [Pure] + public uint ReadUInt32BE(int offset, int bytes) + { + if (bytes == 0) return 0; + if ((uint) bytes > 4) throw ThrowHelper.ArgumentOutOfRangeException(nameof(bytes)); + + var buffer = this.Array; + int p = UnsafeMapToOffset(offset); + + uint value = buffer[p++]; + while (--bytes > 0) + { + value = (value << 8) | buffer[p++]; + } + return value; + } + + #endregion + + #region 64 bits... + + /// Converts a slice into a little-endian encoded, signed 64-bit integer. + /// 0 of the slice is null or empty, a signed integer, or an error if the slice has more than 8 bytes + /// If there are more than 8 bytes in the slice + [Pure] + public long ToInt64() + { + return this.Count <= 4 ? ToUInt32() : ToInt64Slow(); + } + + [Pure] + private long ToInt64Slow() + { + int n = this.Count; + if ((uint) n > 8) goto fail; + EnsureSliceIsValid(); + + var buffer = this.Array; + int p = this.Offset + n - 1; + + long value = buffer[p--]; + while (--n > 0) + { + value = (value << 8) | buffer[p--]; + } + + return value; + fail: + throw new FormatException("Cannot convert slice into an Int64 because it is larger than 8 bytes"); + } + + /// Converts a slice into a big-endian encoded, signed 64-bit integer. + /// 0 of the slice is null or empty, a signed integer, or an error if the slice has more than 8 bytes + /// If there are more than 8 bytes in the slice + [Pure] + public long ToInt64BE() + { + return this.Count <= 4 ? ToInt32BE() : ToInt64BESlow(); + } + + [Pure] + private long ToInt64BESlow() + { + int n = this.Count; + if (n == 0) return 0L; + if ((uint) n > 8) goto fail; + EnsureSliceIsValid(); + + var buffer = this.Array; + int p = this.Offset; + + long value = buffer[p++]; + while (--n > 0) + { + value = (value << 8) | buffer[p++]; + } + return value; + fail: + throw new FormatException("Cannot convert slice into an Int64 because it is larger than 8 bytes"); + } + + /// Converts a slice into a little-endian encoded, unsigned 64-bit integer. + /// 0 of the slice is null or empty, an unsigned integer, or an error if the slice has more than 8 bytes + /// If there are more than 8 bytes in the slice + [Pure] + public ulong ToUInt64() + { + int n = this.Count; + if (n == 0) return 0L; + if ((uint) n > 8) goto fail; + EnsureSliceIsValid(); + + var buffer = this.Array; + int p = this.Offset + n - 1; + + ulong value = buffer[p--]; + while (--n > 0) + { + value = (value << 8) | buffer[p--]; + } + return value; + fail: + throw new FormatException("Cannot convert slice into an UInt64 because it is larger than 8 bytes"); + } + + /// Converts a slice into a little-endian encoded, unsigned 64-bit integer. + /// 0 of the slice is null or empty, an unsigned integer, or an error if the slice has more than 8 bytes + /// If there are more than 8 bytes in the slice + [Pure] + public ulong ToUInt64BE() + { + int n = this.Count; + if (n == 0) return 0L; + if ((uint) n > 8) goto fail; + EnsureSliceIsValid(); + + var buffer = this.Array; + int p = this.Offset; + + ulong value = buffer[p++]; + while (--n > 0) + { + value = (value << 8) | buffer[p++]; + } + return value; + fail: + throw new FormatException("Cannot convert slice into an UInt64 because it is larger than 8 bytes"); + } + + /// Read a variable-length, little-endian encoded, unsigned integer from a specific location in the slice + /// Relative offset of the first byte + /// Number of bytes to read (up to 8) + /// Decoded unsigned integer. + /// If is less than zero, or more than 8. + [Pure] + public ulong ReadUInt64(int offset, int bytes) + { + if (bytes == 0) return 0UL; + if ((uint) bytes > 8) goto fail; + + var buffer = this.Array; + int p = UnsafeMapToOffset(offset) + bytes - 1; + + ulong value = buffer[p--]; + while (--bytes > 0) + { + value = (value << 8) | buffer[p--]; + } + return value; + fail: + throw new ArgumentOutOfRangeException(nameof(bytes)); + } + + /// Read a variable-length, big-endian encoded, unsigned integer from a specific location in the slice + /// Relative offset of the first byte + /// Number of bytes to read (up to 8) + /// Decoded unsigned integer. + /// If is less than zero, or more than 8. + [Pure] + public ulong ReadUInt64BE(int offset, int bytes) + { + if (bytes == 0) return 0UL; + if ((uint) bytes > 8) throw ThrowHelper.ArgumentOutOfRangeException(nameof(bytes)); + + var buffer = this.Array; + int p = UnsafeMapToOffset(offset); + + ulong value = buffer[p++]; + while (--bytes > 0) + { + value = (value << 8) | buffer[p++]; + } + return value; + } + + /// Converts a slice into a 64-bit UUID. + /// Uuid decoded from the Slice. + /// The slice can either be an 8-byte array, or an ASCII string of 16, 17 or 19 chars + [Pure] + public Uuid64 ToUuid64() + { + if (this.Count == 0) return default(Uuid64); + EnsureSliceIsValid(); + + switch (this.Count) + { + case 8: + { // binary (8 bytes) + return Uuid64.Read(this); + } + + case 16: // hex16 + case 17: // hex8-hex8 + case 19: // {hex8-hex8} + { + // ReSharper disable once AssignNullToNotNullAttribute + return Uuid64.Parse(this.ToByteString()); + } + } + + throw new FormatException("Cannot convert slice into an Uuid64 because it has an incorrect size"); + } + + #endregion + + #region Floating Point... + + /// Converts a slice into a 32-bit IEEE floating point. + /// 0 of the slice is null or empty, an unsigned integer, or an error if the slice has more than 4 bytes + /// If there are less or more than 4 bytes in the slice + [Pure] + public float ToSingle() + { + if (this.Count == 0) return 0f; + if (this.Count != 4) goto fail; + EnsureSliceIsValid(); + + unsafe + { + fixed (byte* ptr = &DangerousGetPinnableReference()) + { + return *((float*)ptr); + } + } + fail: + throw new FormatException("Cannot convert slice into a Single because it is not exactly 4 bytes long."); + } + + /// Converts a slice into a 32-bit IEEE floating point (in network order). + /// 0 of the slice is null or empty, an unsigned integer, or an error if the slice has more than 4 bytes + /// If there are less or more than 4 bytes in the slice + [Pure] + public float ToSingleBE() + { + if (this.Count == 0) return 0f; + if (this.Count != 4) goto fail; + EnsureSliceIsValid(); + + unsafe + { + fixed (byte* ptr = &DangerousGetPinnableReference()) + { + uint tmp = UnsafeHelpers.ByteSwap32(*(uint*)ptr); + return *((float*) &tmp); + } + } + fail: + throw new FormatException("Cannot convert slice into a Single because it is not exactly 4 bytes long."); + } + + /// Converts a slice into a 64-bit IEEE floating point. + /// 0 of the slice is null or empty, an unsigned integer, or an error if the slice has more than 8 bytes + /// If there are less or more than 8 bytes in the slice + [Pure] + public double ToDouble() + { + if (this.Count == 0) return 0d; + if (this.Count != 8) goto fail; + EnsureSliceIsValid(); + + unsafe + { + fixed (byte* ptr = &DangerousGetPinnableReference()) + { + return *((double*) ptr); + } + } + fail: + throw new FormatException("Cannot convert slice into a Double because it is not exactly 8 bytes long."); + } + + /// Converts a slice into a 64-bit IEEE floating point (in network order). + /// 0 of the slice is null or empty, an unsigned integer, or an error if the slice has more than 8 bytes + /// If there are less or more than 8 bytes in the slice + [Pure] + public double ToDoubleBE() + { + if (this.Count == 0) return 0d; + if (this.Count != 8) goto fail; + EnsureSliceIsValid(); + + unsafe + { + fixed (byte* ptr = &DangerousGetPinnableReference()) + { + ulong tmp = UnsafeHelpers.ByteSwap64(*(ulong*)ptr); + return *((double*) &tmp); + } + } + fail: + throw new FormatException("Cannot convert slice into a Double because it is not exactly 8 bytes long."); + } + + /// Converts a slice into a 128-bit IEEE floating point. + /// 0 of the slice is null or empty, an unsigned integer, or an error if the slice has more than 8 bytes + /// If there are less or more than 8 bytes in the slice + [Pure] + public decimal ToDecimal() + { + if (this.Count == 0) return 0m; + if (this.Count != 16) goto fail; + EnsureSliceIsValid(); + + unsafe + { + fixed (byte* ptr = &DangerousGetPinnableReference()) + { + return *((decimal*)ptr); + } + } + fail: + throw new FormatException("Cannot convert slice into a Decimal because it is not exactly 16 bytes long."); + } + + #endregion + + #region 128 bits... + + /// Converts a slice into a Guid. + /// Native Guid decoded from the Slice. + /// The slice can either be a 16-byte RFC4122 GUID, or an ASCII string of 36 chars + [Pure] + public Guid ToGuid() + { + if (this.Count == 0) return default(Guid); + EnsureSliceIsValid(); + + if (this.Count == 16) + { // direct byte array + + // UUID are stored using the RFC4122 format (Big Endian), while .NET's System.GUID use Little Endian + // we need to swap the byte order of the Data1, Data2 and Data3 chunks, to ensure that Guid.ToString() will return the proper value. + + return new Uuid128(this).ToGuid(); + } + + if (this.Count == 36) + { // string representation (ex: "da846709-616d-4e82-bf55-d1d3e9cde9b1") + // ReSharper disable once AssignNullToNotNullAttribute + return Guid.Parse(this.ToByteString()); + } + + throw new FormatException("Cannot convert slice into a Guid because it has an incorrect size"); + } + + /// Converts a slice into a 128-bit UUID. + /// Uuid decoded from the Slice. + /// The slice can either be a 16-byte RFC4122 GUID, or an ASCII string of 36 chars + [Pure] + public Uuid128 ToUuid128() + { + if (this.Count == 0) return default(Uuid128); + EnsureSliceIsValid(); + + if (this.Count == 16) + { + return new Uuid128(this); + } + + if (this.Count == 36) + { + // ReSharper disable once AssignNullToNotNullAttribute + return Uuid128.Parse(ToByteString()); + } + + throw new FormatException("Cannot convert slice into an Uuid128 because it has an incorrect size"); + } + + #endregion + + #endregion + } +} diff --git a/FoundationDB.Client/Shared/Memory/Slice.cs b/FoundationDB.Client/Shared/Memory/Slice.cs new file mode 100644 index 000000000..861323b16 --- /dev/null +++ b/FoundationDB.Client/Shared/Memory/Slice.cs @@ -0,0 +1,2519 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +//#define ENABLE_SPAN + +namespace System +{ + using System; + using System.Collections.Generic; + using System.ComponentModel; + using System.Diagnostics; + using System.IO; + using System.Linq; + using System.Runtime.CompilerServices; + using System.Runtime.InteropServices; + using System.Text; + using System.Threading; + using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Memory; + using JetBrains.Annotations; + + /// Delimits a section of a byte array + /// A Slice if the logical equivalent to a ReadOnlySpan<byte> + [PublicAPI, ImmutableObject(true), DebuggerDisplay("Count={Count}, Offset={Offset}"), DebuggerTypeProxy(typeof(Slice.DebugView))] + [DebuggerNonUserCode] //remove this when you need to troubleshoot this class! + public readonly partial struct Slice : IEquatable, IEquatable>, IEquatable, IComparable, IFormattable + { + #region Static Members... + + /// Null slice ("no segment") + public static readonly Slice Nil = default(Slice); + + /// Empty slice ("segment of 0 bytes") + //note: we allocate a 1-byte array so that we can get a pointer to &slice.Array[slice.Offset] even for the empty slice + public static readonly Slice Empty = new Slice(new byte[1], 0, 0); + + /// Cached array of bytes from 0 to 255 + [NotNull] + internal static readonly byte[] ByteSprite = CreateByteSprite(); + + private static byte[] CreateByteSprite() + { + var tmp = new byte[256]; + for (int i = 0; i < tmp.Length; i++) tmp[i] = (byte) i; + return tmp; + } + + #endregion + + //REVIEW: Layout: should we maybe swap things around? .Count seems to be the most often touched field before the rest + // => Should it be Array/Offset/Count (current), or Count/Offset/Array ? + + /// Pointer to the buffer (or null for ) + public readonly byte[] Array; + + /// Offset of the first byte of the slice in the parent buffer + public readonly int Offset; + + /// Number of bytes in the slice + public readonly int Count; + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal Slice([NotNull] byte[] array, int offset, int count) + { + //Paranoid.Requires(array != null && offset >= 0 && offset <= array.Length && count >= 0 && offset + count <= array.Length); + this.Array = array; + this.Offset = offset; + this.Count = count; + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal Slice([NotNull] byte[] array) + { + //Paranoid.Requires(array != null); + this.Array = array; + this.Offset = 0; + this.Count = array.Length; + } + + /// Creates a slice mapping a section of a buffer, without any sanity checks or buffer optimization + /// Original buffer + /// Offset into buffer + /// Number of bytes + /// Slice that maps this segment of buffer. + /// + /// Slice.CreateUnsafe(buffer, 1, 5) => Slice { Array = buffer, Offset = 1, Count = 5 } + /// + /// + /// Use this method ONLY if you are 100% sure that the slice will be valid. Failure to do so may introduce memory corruption! + /// Also, please note that this method will NOT optimize the case where count == 0, and will keep a reference to the original buffer! + /// The caller is responsible for handle that scenario if it is important! + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice CreateUnsafe([NotNull] byte[] buffer, [Positive] int offset, [Positive] int count) + { + Contract.Requires(buffer != null && (uint) offset <= (uint) buffer.Length && (uint) count <= (uint) (buffer.Length - offset)); + return new Slice(buffer, offset, count); + } + + /// Creates a slice mapping a section of a buffer, without any sanity checks or buffer optimization + /// Original buffer + /// Offset into buffer + /// Number of bytes + /// Slice that maps this segment of buffer. + /// + /// Slice.CreateUnsafe(buffer, 1, 5) => Slice { Array = buffer, Offset = 1, Count = 5 } + /// + /// + /// Use this method ONLY if you are 100% sure that the slice will be valid. Failure to do so may introduce memory corruption! + /// Also, please note that this method will NOT optimize the case where count == 0, and will keep a reference to the original buffer! + /// The caller is responsible for handle that scenario if it is important! + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice CreateUnsafe([NotNull] byte[] buffer, uint offset, uint count) + { + Contract.Requires(buffer != null && offset <= (uint) buffer.Length && count <= ((uint) buffer.Length - offset)); + return new Slice(buffer, (int) offset, (int) count); + } + + /// Creates a new empty slice of a specified size containing all zeroes + public static Slice Create(int size) + { + Contract.Positive(size, nameof(size)); + return size != 0 ? new Slice(new byte[size]) : Slice.Empty; + } + + /// Creates a new empty slice of a specified size containing all zeroes + [Pure] + public static Slice Create(uint size) + { + Contract.LessOrEqual(size, int.MaxValue, nameof(size)); + return size != 0 ? new Slice(new byte[size]) : Slice.Empty; + } + + /// Creates a new slice with a copy of the array + [Pure] + public static Slice Copy(byte[] source) + { + Contract.NotNull(source, nameof(source)); + if (source.Length == 0) return Empty; + return Copy(source, 0, source.Length); + } + +#if ENABLE_SPAN + + /// Creates a new slice with a copy of the array segment + [Pure] + public static Slice Copy(byte[] source, int offset, int count) + { + return Copy(new ReadOnlySpan(source, offset, count)); + } + + /// Creates a new slice with a copy of the span + [Pure] + public static Slice Copy(ReadOnlySpan source) + { + if (source.Length == 0) return Empty; + var tmp = source.ToArray(); + return new Slice(tmp, 0, source.Length); + } + + /// Creates a new slice with a copy of the span, using a scratch buffer + [Pure] + public static Slice Copy(ReadOnlySpan source, [CanBeNull] ref byte[] buffer) + { + if (source.Length == 0) return Empty; + var tmp = UnsafeHelpers.EnsureCapacity(ref buffer, BitHelpers.NextPowerOfTwo(source.Length)); + UnsafeHelpers.Copy(tmp, 0, source); + return new Slice(tmp, 0, source.Length); + } + +#else + + /// Creates a new slice with a copy of the array segment + [Pure] + public static Slice Copy(byte[] source, int offset, int count) + { + if (count == 0) return source == null ? Nil : Empty; + var tmp = new byte[count]; + UnsafeHelpers.Copy(tmp, 0, source, offset, count); + return new Slice(tmp, 0, count); + } + + /// Creates a new slice with a copy of the span, using a scratch buffer + [Pure] + public static Slice Copy(Slice source, [CanBeNull] ref byte[] buffer) + { + if (source.Count == 0) return source.Array == null ? default(Slice) : Empty; + var tmp = UnsafeHelpers.EnsureCapacity(ref buffer, BitHelpers.NextPowerOfTwo(source.Count)); + UnsafeHelpers.Copy(tmp, 0, source.Array, source.Offset, source.Count); + return new Slice(tmp, 0, source.Count); + } + +#endif + + /// Creates a new slice with a copy of an unmanaged memory buffer + /// Pointer to unmanaged buffer + /// Number of bytes in the buffer + /// Slice with a managed copy of the data + [Pure] + public static Slice Copy(IntPtr source, int count) + { + unsafe + { + return Copy((byte*) source.ToPointer(), count); + } + } + + /// Creates a new slice with a copy of an unmanaged memory buffer + /// Pointer to unmanaged buffer + /// Number of bytes in the buffer + /// Slice with a managed copy of the data + [Pure] + public static unsafe Slice Copy(void * source, int count) + { + return Copy((byte*) source, count); + } + + + /// Creates a new slice with a copy of an unmanaged memory buffer + /// Pointer to unmanaged buffer + /// Number of bytes in the buffer + /// Slice with a managed copy of the data + [Pure] + public static unsafe Slice Copy(byte* source, int count) + { + if (count == 0) + { + return source == null ? default(Slice) : Empty; + } + Contract.PointerNotNull(source, nameof(source)); + Contract.Positive(count, nameof(count)); + + if (count == 1) + { // Use the sprite cache + return Slice.FromByte(*source); + } + + var bytes = new byte[count]; + UnsafeHelpers.CopyUnsafe(bytes, 0, source, (uint) count); + return new Slice(bytes, 0, count); + } + +#if ENABLE_SPAN + /// Return a copy of the memory content of an array of item + public static Slice CopyMemory(ReadOnlySpan items) + where T : struct + { + return Copy(MemoryMarshal.AsBytes(items)); + } + + /// Return a copy of the memory content of an array of item + public static Slice CopyMemory(ReadOnlySpan items, [CanBeNull] ref byte[] buffer) + where T : struct + { + return Copy(MemoryMarshal.AsBytes(items), ref buffer); + } +#endif + + /// Implicitly converts a Slice into an ArraySegment<byte> + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator ArraySegment(Slice value) + { + return value.HasValue ? new ArraySegment(value.Array, value.Offset, value.Count) : default(ArraySegment); + } + + /// Implicitly converts an ArraySegment<byte> into a Slice + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator Slice(ArraySegment value) + { + if (value.Count == 0) return value.Array == null ? default(Slice) : Slice.Empty; + return new Slice(value.Array, value.Offset, value.Count); + } + +#if ENABLE_SPAN + /// Converts a Slice into an Span<byte> + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static explicit operator Span(Slice value) + { + //note: explicit because casting to writable Span MAY be dangerous, and we need opt-in from the caller! + return new Span(value.Array, value.Offset, value.Count); + } + + /// Implicitly converts a Slice into an ReadOnlySpan<byte> + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator ReadOnlySpan(Slice value) + { + //note: implicit because casting to non-writable ReadOnlySpan is safe + return new ReadOnlySpan(value.Array, value.Offset, value.Count); + } +#endif + + /// Returns true is the slice is not null + /// An empty slice is NOT considered null + public bool HasValue + { + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return this.Array != null; } + } + + /// Returns true if the slice is null + /// An empty slice is NOT considered null + public bool IsNull + { + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return this.Array == null; } + } + + /// Return true if the slice is not null but contains 0 bytes + /// A null slice is NOT empty + public bool IsEmpty + { + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return this.Count == 0 && this.Array != null; } + } + + /// Returns true if the slice is null or empty, or false if it contains at least one byte + public bool IsNullOrEmpty + { + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return this.Count == 0; } + } + + /// Returns true if the slice contains at least one byte, or false if it is null or empty + public bool IsPresent + { + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return this.Count > 0; } + } + + /// Replace with + /// The same slice if it is not ; otherwise, + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Slice OrEmpty() + { + return this.Count > 0? this : Empty; + } + + /// Return a byte array containing all the bytes of the slice, or null if the slice is null + /// Byte array with a copy of the slice, or null + [Pure, CanBeNull] + public byte[] GetBytes() + { + int len = this.Count; + if (len == 0) return this.Array == null ? null : System.Array.Empty(); + EnsureSliceIsValid(); + + var tmp = new byte[len]; + UnsafeHelpers.CopyUnsafe(tmp, 0, this.Array, this.Offset, len); + return tmp; + } + + /// Return a byte array containing all the bytes of the slice, or and empty array if the slice is null or empty + /// Byte array with a copy of the slice + [Pure, NotNull] + public byte[] GetBytesOrEmpty() + { + //note: this is a convenience method for code where dealing with null is a pain, or where it has already checked IsNull + int len = this.Count; + if (len == 0) return System.Array.Empty(); + EnsureSliceIsValid(); + + var tmp = new byte[len]; + UnsafeHelpers.CopyUnsafe(tmp, 0, this.Array, this.Offset, len); + return tmp; + } + + /// Return a byte array containing a subset of the bytes of the slice, or null if the slice is null + /// Byte array with a copy of a subset of the slice, or null + [Pure, NotNull] + public byte[] GetBytes(int offset, int count) + { + //TODO: throw if this.Array == null ? (what does "Slice.Nil.GetBytes(..., 0)" mean ?) + + if (offset < 0) throw new ArgumentOutOfRangeException(nameof(offset)); + + int len = this.Count; + if ((uint) count > (uint) len || (uint) count > (uint) (len - offset)) throw new ArgumentOutOfRangeException(nameof(count)); + + if (count == 0) return System.Array.Empty(); + EnsureSliceIsValid(); + + var tmp = new byte[count]; + UnsafeHelpers.CopyUnsafe(tmp, 0, this.Array, this.Offset + offset, count); + return tmp; + } + + /// Return a SliceReader that can decode this slice into smaller fields + [Obsolete("Use ToSliceReader() instead")] + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public SliceReader GetReader() + { + return new SliceReader(this); + } + + /// Return a SliceReader that can decode this slice into smaller fields + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public SliceReader ToSliceReader() + { + return new SliceReader(this); + } + + /// Return a stream that wraps this slice + /// Stream that will read the slice from the start. + /// + /// You can use this method to convert text into specific encodings, load bitmaps (JPEG, PNG, ...), or any serialization format that requires a Stream or TextReader instance. + /// Disposing this stream will have no effect on the slice. + /// + [Pure, NotNull] + public SliceStream ToSliceStream() + { + EnsureSliceIsValid(); + return new SliceStream(this); + } + + /// Returns a new slice that contains an isolated copy of the buffer + /// Slice that is equivalent, but is isolated from any changes to the buffer + [Pure] + public Slice Memoize() + { + if (this.Count == 0) return this.Array == null ? Slice.Nil : Slice.Empty; + // ReSharper disable once AssignNullToNotNullAttribute + return new Slice(GetBytes()); + } + + /// Map an offset in the slice into the absolute offset in the buffer, without any bound checking + /// Relative offset (negative values mean from the end) + /// Absolute offset in the buffer + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private int UnsafeMapToOffset(int index) + { + return this.Offset + NormalizeIndex(index); + } + + /// Map an offset in the slice into the absolute offset in the buffer + /// Relative offset (negative values mean from the end) + /// Absolute offset in the buffer + /// If the index is outside the slice + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private int MapToOffset(int index) + { + int p = NormalizeIndex(index); + if ((uint) p >= (uint) this.Count) UnsafeHelpers.Errors.ThrowIndexOutOfBound(index); + return checked(this.Offset + p); + } + + /// Normalize negative index values into offset from the start + /// Relative offset (negative values mean from the end) + /// Relative offset from the start of the slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + private int NormalizeIndex(int index) + { + return index < 0 ? checked(index + this.Count) : index; + } + + /// Returns the value of one byte in the slice + /// Offset of the byte (negative values means start from the end) + public byte this[int index] + { + [MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return this.Array[MapToOffset(index)]; } + } + +#if ENABLE_SPAN + /// Returns a reference to a specific position in the slice + [MethodImpl(MethodImplOptions.AggressiveInlining)] + [EditorBrowsable(EditorBrowsableState.Never)] + public ref readonly byte ItemRef(int index) + { + return ref this.Array[MapToOffset(index)]; + } +#endif + + /// Returns a substring of the current slice that fits withing the specified index range + /// The starting position of the substring. Positive values means from the start, negative values means from the end + /// The end position (excluded) of the substring. Positive values means from the start, negative values means from the end + /// Subslice + public Slice this[int start, int end] + { + get + { + start = NormalizeIndex(start); + end = NormalizeIndex(end); + + // bound check + if (start < 0) start = 0; + if (end > this.Count) end = this.Count; + + if (start >= end) return Slice.Empty; + if (start == 0 && end == this.Count) return this; + + checked { return new Slice(this.Array, this.Offset + start, end - start); } + } + } + + /// + /// Returns a reference to the first byte in the slice. + /// If the slice is empty, returns a reference to the location where the first character would have been stored. + /// Such a reference can be used for pinning but must never be dereferenced. + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + [EditorBrowsable(EditorBrowsableState.Never)] + public ref byte DangerousGetPinnableReference() + { + //note: this is the equivalent of MemoryMarshal.GetReference(..) and does not check for the 0-length case! + return ref this.Array[this.Offset]; + } + +#if ENABLE_SPAN + /// + /// Returns a reference to the 0th element of the Span. If the Span is empty, returns null reference. + /// It can be used for pinning and is required to support the use of span within a fixed statement. + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + [EditorBrowsable(EditorBrowsableState.Never)] + public ref byte GetPinnableReference() + { + unsafe + { + return ref (this.Count != 0) ? ref this.Array[this.Offset] : ref Unsafe.AsRef(null); + } + } +#endif + + /// Copy this slice into another buffer, and move the cursor + /// Buffer where to copy this slice + /// Offset into the destination buffer + public void WriteTo([NotNull] byte[] buffer, ref int cursor) + { + //note: CopyBytes will validate all the parameters + int count = this.Count; + UnsafeHelpers.Copy(buffer, cursor, this.Array, this.Offset, count); + cursor += count; + } + + public void CopyTo(Slice destination) + { + if (destination.Count < this.Count) throw UnsafeHelpers.Errors.SliceBufferTooSmall(); + UnsafeHelpers.Copy(destination.Array, destination.Offset, this.Array, this.Offset, this.Count); + } + +#if ENABLE_SPAN + public void CopyTo(Span destination) + { + if (destination.Length < this.Count) throw UnsafeHelpers.Errors.SliceBufferTooSmall(); + UnsafeHelpers.Copy(destination, this.Array, this.Offset, this.Count); + } +#endif + + /// Copy this slice into another buffer + /// Buffer where to copy this slice + /// Offset into the destination buffer + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void CopyTo([NotNull] byte[] buffer, int offset) + { + UnsafeHelpers.Copy(buffer, offset, this.Array, this.Offset, this.Count); + } + + /// Copy this slice into memory and return the advanced cursor + /// Pointer where to copy this slice + /// Pointer to the next byte after the last availble position in the output buffer + /// Copy will fail if there is not enough space in the output buffer (ie: if it would writer at or after ) + [NotNull] + public unsafe byte* CopyToUnsafe([NotNull] byte* ptr, [NotNull] byte* end) + { + if (ptr == null | end == null) throw new ArgumentNullException(ptr == null ? nameof(ptr) : nameof(end)); + long count = this.Count; + byte* next = ptr + count; + if (next > end) throw new ArgumentException("Slice is too large to fit in the specified output buffer"); + if (count > 0) + { + fixed (byte* bytes = &DangerousGetPinnableReference()) + { + Buffer.MemoryCopy(bytes, ptr, count, count); + } + } + return next; + } + + /// Try to copy this slice into memory and return the advanced cursor, if the destination is large enough + /// Pointer where to copy this slice + /// Pointer to the next byte after the last availble position in the output buffer + /// Point to the advanced memory position, or null if the destination buffer was too small + [CanBeNull] + public unsafe byte* TryCopyToUnsafe([NotNull] byte* ptr, [NotNull] byte* end) + { + if (ptr == null | end == null) throw new ArgumentNullException(ptr == null ? nameof(ptr) : nameof(end)); + long count = this.Count; + byte* next = ptr + count; + if (next > end) return null; + if (count > 0) + { + fixed (byte* bytes = &DangerousGetPinnableReference()) + { + Buffer.MemoryCopy(bytes, ptr, count, count); + } + } + return next; + } + + /// Copy this slice into memory and return the advanced cursor + /// Pointer where to copy this slice + /// Capacity of the output buffer + /// Copy will fail if there is not enough space in the output buffer (ie: if it would writer at or after ) + public IntPtr CopyTo(IntPtr ptr, long count) + { + unsafe + { + byte* p = (byte*) ptr.ToPointer(); + return (IntPtr) CopyToUnsafe(p, p + count); + } + } + + /// Copy this slice into memory and return the advanced cursor + /// Pointer where to copy this slice + /// Capacity of the output buffer + /// Updated pointer after the copy, of if the destination buffer was too small + public bool TryCopyTo(IntPtr ptr, long count) + { + unsafe + { + byte* p = (byte*) ptr.ToPointer(); + return null != TryCopyToUnsafe(p, p + count); + } + } + + /// Retrieves a substring from this instance. The substring starts at a specified character position. + /// The starting position of the substring. Positive values mmeans from the start, negative values means from the end + /// A slice that is equivalent to the substring that begins at (from the start or the end depending on the sign) in this instance, or Slice.Empty if is equal to the length of the slice. + /// The substring does not copy the original data, and refers to the same buffer as the original slice. Any change to the parent slice's buffer will be seen by the substring. You must call Memoize() on the resulting substring if you want a copy + /// {"ABCDE"}.Substring(0) => {"ABC"} + /// {"ABCDE"}.Substring(1} => {"BCDE"} + /// {"ABCDE"}.Substring(-2} => {"DE"} + /// {"ABCDE"}.Substring(5} => Slice.Empty + /// Slice.Empty.Substring(0) => Slice.Empty + /// Slice.Nil.Substring(0) => Slice.Emtpy + /// + /// indicates a position not within this instance, or is less than zero + [Pure] + public Slice Substring(int offset) + { + int len = this.Count; + + // negative values mean from the end + if (offset < 0) offset += this.Count; + //REVIEW: TODO: get rid of negative indexing, and create a different "substring from the end" method? + + // bound check + if ((uint) offset > (uint) len) UnsafeHelpers.Errors.ThrowOffsetOutsideSlice(); + + int r = len - offset; + return r != 0 ? new Slice(this.Array, this.Offset + offset, r) : Slice.Empty; + } + + /// Retrieves a substring from this instance. The substring starts at a specified character position and has a specified length. + /// The starting position of the substring. Positive values means from the start, negative values means from the end + /// Number of bytes in the substring + /// A slice that is equivalent to the substring of length that begins at (from the start or the end depending on the sign) in this instance, or Slice.Empty if count is zero. + /// The substring does not copy the original data, and refers to the same buffer as the original slice. Any change to the parent slice's buffer will be seen by the substring. You must call Memoize() on the resulting substring if you want a copy + /// {"ABCDE"}.Substring(0, 3) => {"ABC"} + /// {"ABCDE"}.Substring(1, 3} => {"BCD"} + /// {"ABCDE"}.Substring(-2, 2} => {"DE"} + /// Slice.Empty.Substring(0, 0) => Slice.Empty + /// Slice.Nil.Substring(0, 0) => Slice.Emtpy + /// + /// plus indicates a position not within this instance, or or is less than zero + [Pure] + public Slice Substring(int offset, int count) + { + if (count == 0) return Slice.Empty; + int len = this.Count; + + // bound check + if ((uint) offset >= (uint) len || (uint) count > (uint)(len - offset)) UnsafeHelpers.Errors.ThrowOffsetOutsideSlice(); + + return new Slice(this.Array, this.Offset + offset, count); + } + + /// Truncate the slice if its size exceeds the specified length. + /// Maximum size. + /// Slice of at most the specified size, or smaller if the original slice does not exceed the size. + /// + /// Smaller than maxSize is unmodified{"Hello, World!"}.Truncate(20) => {"Hello, World!"} + /// Larger than maxSize is truncated{"Hello, World!"}.Truncate(5) => {"Hello"} + /// Truncating to 0 returns Empty (or Nil){"Hello, World!"}.Truncate(0) == Slice.Empty + /// + [Pure] + public Slice Truncate([Positive] int maxSize) + { + //note: the only difference with Substring(0, maxSize) is that we don't throw if the slice is smaller than ! + Contract.Positive(maxSize, nameof(maxSize)); + + if (maxSize == 0) return this.Array == null ? Nil : Empty; + return this.Count <= maxSize ? this : new Slice(this.Array, this.Offset, maxSize); + } + + /// Returns a slice array that contains the sub-slices in this instance that are delimited by the specified separator + /// The slice that delimits the sub-slices in this instance. + /// to omit empty array elements from the array returned; or to include empty array elements in the array returned. + /// An array whose elements contains the sub-slices in this instance that are delimited by the value of . + [Pure] + public Slice[] Split(Slice separator, StringSplitOptions options = StringSplitOptions.None) + { + return Split(this, separator, options); + } + + [Pure] + public Slice[] Split(int stride) + { + return Split(this, stride); + } + + /// Reports the zero-based index of the first occurence of the specified slice in this instance. + /// The slice to seek + /// The zero-based index of if that slice is found, or -1 if it is not. If is , then the return value is -1. + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public int IndexOf(Slice value) + { + return Find(this, value); + } + + /// Reports the zero-based index of the first occurence of the specified slice in this instance. The search starts at a specified position. + /// The slice to seek + /// The search starting position + /// The zero-based index of if that slice is found, or -1 if it is not. If is , then the return value is startIndex + [Pure] + public int IndexOf(Slice value, int startIndex) + { + return Substring(startIndex).IndexOf(value); + } + + /// Reports the zero-based index of the first occurence of the specified byte in this instance. + /// The byte to seek + /// The zero-based index of if that slice is found, or -1 if it is not. + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public int IndexOf(byte value) + { + return Find(this, value); + } + + /// Reports the zero-based index of the first occurence of the specified byte in this instance. The search starts at a specified position. + /// The byte to seek + /// The search starting position + /// The zero-based index of if that byte is found, or -1 if it is not. + [Pure] + public int IndexOf(byte value, int startIndex) + { + int len = this.Count; + if ((uint) startIndex >= (uint) len) UnsafeHelpers.Errors.ThrowOffsetOutsideSlice(); + + var tmp = new Slice(this.Array, this.Offset + startIndex, len - startIndex); + int idx = Find(tmp, value); + return idx >= 0 ? checked(startIndex + idx) : -1; + } + + /// Determines whether the beginning of this slice instance matches a specified slice. + /// The slice to compare + /// true if matches the beginning of this slice; otherwise, false + [Pure] + public bool StartsWith(Slice value) + { + if (!value.HasValue) throw ThrowHelper.ArgumentNullException(nameof(value)); + + int count = value.Count; + + // any strings starts with the empty string + if (count == 0) return true; + + // prefix cannot be bigger + if ((uint) count > (uint) this.Count) return false; + + return UnsafeHelpers.SameBytes(this.Array, this.Offset, value.Array, value.Offset, count); + } + + /// Determines whether the end of this slice instance matches a specified slice. + /// The slice to compare to the substring at the end of this instance. + /// true if matches the end of this slice; otherwise, false + [Pure] + public bool EndsWith(Slice value) + { + if (!value.HasValue) throw ThrowHelper.ArgumentNullException(nameof(value)); + + // any strings ends with the empty string + int count = value.Count; + if (count == 0) return true; + + // suffix cannot be bigger + int len = this.Count; + if ((uint) count > (uint) len) return false; + + return UnsafeHelpers.SameBytes(this.Array, this.Offset + (len - count), value.Array, value.Offset, count); + } + + /// Equivalent of StartsWith, but the returns false if both slices are identical + [Pure] + public bool PrefixedBy(Slice parent) + { + int count = parent.Count; + + // empty is a parent of everyone + if (count == 0) return true; + + // we must have at least one more byte then the parent + if (this.Count <= count) return false; + + // must start with the same bytes + return UnsafeHelpers.SameBytes(parent.Array, parent.Offset, this.Array, this.Offset, count); + } + + /// Equivalent of EndsWith, but the returns false if both slices are identical + [Pure] + public bool SuffixedBy(Slice parent) + { + // empty is a parent of everyone + int count = parent.Count; + if (count == 0) return true; + + // empty is not a child of anything + int len = this.Count; + if (len == 0) return false; + + // we must have at least one more byte then the parent + if (len <= count) return false; + + // must start with the same bytes + return UnsafeHelpers.SameBytes(parent.Array, parent.Offset + (len - count), this.Array, this.Offset, count); + } + + /// Append/Merge a slice at the end of the current slice + /// Slice that must be appended + /// Merged slice if both slices are contigous, or a new slice containg the content of the current slice, followed by the tail slice. Or Slice.Empty if both parts are nil or empty + [Pure] + public Slice Concat(Slice tail) + { + if (tail.Count == 0) return this.Count > 0 ? this: Slice.Empty; + if (this.Count == 0) return tail; + + tail.EnsureSliceIsValid(); + this.EnsureSliceIsValid(); + + // special case: adjacent segments ? + if (object.ReferenceEquals(this.Array, tail.Array) && this.Offset + this.Count == tail.Offset) + { + return new Slice(this.Array, this.Offset, this.Count + tail.Count); + } + + byte[] tmp = new byte[this.Count + tail.Count]; + UnsafeHelpers.CopyUnsafe(tmp, 0, this.Array, this.Offset, this.Count); + UnsafeHelpers.CopyUnsafe(tmp, this.Count, tail.Array, tail.Offset, tail.Count); + return new Slice(tmp); + } + + /// Append an array of slice at the end of the current slice, all sharing the same buffer + /// Slices that must be appended + /// Array of slices (for all keys) that share the same underlying buffer + [Pure, NotNull] + public Slice[] ConcatRange([NotNull] Slice[] slices) + { + Contract.NotNull(slices, nameof(slices)); + EnsureSliceIsValid(); + + // pre-allocate by computing final buffer capacity + var prefixSize = this.Count; + var capacity = slices.Sum((slice) => prefixSize + slice.Count); + var writer = new SliceWriter(capacity); + var next = new List(slices.Length); + + //TODO: use multiple buffers if item count is huge ? + + foreach (var slice in slices) + { + writer.WriteBytes(this); + writer.WriteBytes(slice); + next.Add(writer.Position); + } + + return SplitIntoSegments(writer.Buffer, 0, next); + } + + /// Append a sequence of slice at the end of the current slice, all sharing the same buffer + /// Slices that must be appended + /// Array of slices (for all keys) that share the same underlying buffer + [Pure, NotNull] + public Slice[] ConcatRange([NotNull] IEnumerable slices) + { + Contract.NotNull(slices, nameof(slices)); + + // use optimized version for arrays + if (slices is Slice[] array) return ConcatRange(array); + + var next = new List(); + var writer = default(SliceWriter); + + //TODO: use multiple buffers if item count is huge ? + + foreach (var slice in slices) + { + writer.WriteBytes(this); + writer.WriteBytes(slice); + next.Add(writer.Position); + } + + return SplitIntoSegments(writer.Buffer, 0, next); + + } + + /// Split a buffer containing multiple contiguous segments into an array of segments + /// Buffer containing all the segments + /// Offset of the start of the first segment + /// Array containing, for each segment, the offset of the following segment + /// Array of segments + /// SplitIntoSegments("HelloWorld", 0, [5, 10]) => [{"Hello"}, {"World"}] + [NotNull] + public static Slice[] SplitIntoSegments([NotNull] byte[] buffer, int start, [NotNull] List endOffsets) + { + Contract.Requires(buffer != null && endOffsets != null); + var result = new Slice[endOffsets.Count]; + int i = 0; + int p = start; + foreach (var end in endOffsets) + { + result[i++] = new Slice(buffer, p, end - p); + p = end; + } + + return result; + } + + /// Concatenate two slices together + public static Slice Concat(Slice a, Slice b) + { + return a.Concat(b); + } + + /// Concatenate three slices together + public static Slice Concat(Slice a, Slice b, Slice c) + { + int count = a.Count + b.Count + c.Count; + if (count == 0) return Slice.Empty; + var writer = new SliceWriter(count); + writer.WriteBytes(a); + writer.WriteBytes(b); + writer.WriteBytes(c); + return writer.ToSlice(); + } + + /// Concatenate an array of slices into a single slice + public static Slice Concat(params Slice[] args) + { + int count = 0; + for (int i = 0; i < args.Length; i++) count += args[i].Count; + if (count == 0) return Slice.Empty; + var writer = new SliceWriter(count); + for (int i = 0; i < args.Length; i++) writer.WriteBytes(args[i]); + return writer.ToSlice(); + } + + /// Adds a prefix to a list of slices + /// Prefix to add to all the slices + /// List of slices to process + /// Array of slice that all start with and followed by the corresponding entry in + /// This method is optmized to reduce the amount of memory allocated + [Pure, NotNull] + public static Slice[] ConcatRange(Slice prefix, IEnumerable slices) + { + Contract.NotNull(slices, nameof(slices)); + + if (prefix.IsNullOrEmpty) + { // nothing to do, but we still need to copy the array + return slices.ToArray(); + } + + Slice[] res; + Slice[] arr; + ICollection coll; + + if ((arr = slices as Slice[]) != null) + { // fast-path for arrays (most frequent with range reads) + + // we wil use a SliceBuffer to store all the keys produced in as few byte[] arrays as needed + + // precompute the exact size needed + int totalSize = prefix.Count * arr.Length; + for (int i = 0; i < arr.Length; i++) totalSize += arr[i].Count; + var buf = new SliceBuffer(Math.Min(totalSize, 64 * 1024)); + + res = new Slice[arr.Length]; + for (int i = 0; i < arr.Length; i++) + { + res[i] = buf.Intern(prefix, arr[i], aligned: false); + } + } + else if ((coll = slices as ICollection) != null) + { // collection (size known) + + //TODO: also use a SliceBuffer since we could precompute the total size... + + res = new Slice[coll.Count]; + int p = 0; + foreach (var suffix in coll) + { + res[p++] = prefix.Concat(suffix); + } + } + else + { // streaming sequence (size unknown) + + //note: we can only scan the list once, so would be no way to get a sensible value for the buffer's page size + var list = new List(); + foreach (var suffix in slices) + { + list.Add(prefix.Concat(suffix)); + } + res = list.ToArray(); + } + + return res; + } + + /// Reports the zero-based index of the first occurrence of the specified slice in this source. + /// The slice Input slice + /// The slice to seek + /// Offset of the match if positive, or no occurence was found if negative + [Pure] + public static int Find(Slice source, Slice value) + { + const int NOT_FOUND = -1; + + source.EnsureSliceIsValid(); + source.EnsureSliceIsValid(); + + int m = value.Count; + if (m == 0) return 0; + + int n = source.Count; + if (n == 0) return NOT_FOUND; + + if (m == n) return source.Equals(value) ? 0 : NOT_FOUND; + if (m <= n) + { + byte[] src = source.Array; + int p = source.Offset; + byte firstByte = value[0]; + + // note: this is a very simplistic way to find a value, and is optimized for the case where the separator is only one byte (most common) + while (n-- > 0) + { + if (src[p++] == firstByte) + { // possible match ? + if (m == 1 || UnsafeHelpers.SameBytesUnsafe(src, p, value.Array, value.Offset + 1, m - 1)) + { + return p - source.Offset - 1; + } + } + } + } + + return NOT_FOUND; + } + + /// Reports the zero-based index of the first occurrence of the specified byte in this source. + /// The slice Input slice + /// The byte to find + /// Offset of the match if positive, or the byte was not found if negative + [Pure] + public static int Find(Slice source, byte value) + { + source.EnsureSliceIsValid(); + + const int NOT_FOUND = -1; + int n = source.Count; + if (n == 0) return NOT_FOUND; + unsafe + { + //TODO: Optimize this! + fixed (byte* ptr = &source.DangerousGetPinnableReference()) + { + byte* inp = ptr; + while (n-- > 0) + { + if (*inp == value) + { // match + return checked((int)(inp - ptr)); + } + ++inp; + } + } + } + return NOT_FOUND; + } + + /// Concatenates all the elements of a slice array, using the specified separator between each element. + /// The slice to use as a separator. Can be empty. + /// An array that contains the elements to concatenate. + /// A slice that consists of the elements in a value delimited by the slice. If is an empty array, the method returns . + /// If is null. + public static Slice Join(Slice separator, [NotNull] Slice[] values) + { + Contract.NotNull(values, nameof(values)); + + int count = values.Length; + if (count == 0) return Slice.Empty; + if (count == 1) return values[0]; + return Join(separator, values, 0, count); + } + + /// Concatenates the specified elements of a slice array, using the specified separator between each element. + /// The slice to use as a separator. Can be empty. + /// An array that contains the elements to concatenate. + /// The first element in to use. + /// The number of elements of to use. + /// A slice that consists of the slices in delimited by the slice. -or- if is zero, has no elements, or and all the elements of are . + /// If is null. + /// If or is less than zero. -or- plus is greater than the number of elements in . + public static Slice Join(Slice separator, [NotNull] Slice[] values, int startIndex, int count) + { + // Note: this method is modeled after String.Join() and should behave the same + // - Only difference is that Slice.Nil and Slice.Empty are equivalent (either for separator, or for the elements of the array) + + Contract.NotNull(values, nameof(values)); + + if (startIndex < 0) throw ThrowHelper.ArgumentOutOfRangeException(nameof(startIndex), startIndex, "Start index must be a positive integer"); + if (count < 0) throw ThrowHelper.ArgumentOutOfRangeException(nameof(count), count, "Count must be a positive integer"); + if (startIndex > values.Length - count) throw ThrowHelper.ArgumentOutOfRangeException(nameof(startIndex), startIndex, "Start index must fit within the array"); + + if (count == 0) return Slice.Empty; + if (count == 1) return values[startIndex]; + + int size = 0; + for (int i = 0; i < values.Length; i++) size += values[i].Count; + size += (values.Length - 1) * separator.Count; + + // if the size overflows, that means that the resulting buffer would need to be >= 2 GB, which is not possible! + if (size < 0) throw new OutOfMemoryException(); + + //note: we want to make sure the buffer of the writer will be the exact size (so that we can use the result as a byte[] without copying again) + var tmp = new byte[size]; + var writer = new SliceWriter(tmp); + for (int i = 0; i < values.Length; i++) + { + if (i > 0) writer.WriteBytes(separator); + writer.WriteBytes(values[i]); + } + Contract.Assert(writer.Buffer.Length == size); + return writer.ToSlice(); + } + + /// Concatenates the specified elements of a slice sequence, using the specified separator between each element. + /// The slice to use as a separator. Can be empty. + /// A sequence will return the elements to concatenate. + /// A slice that consists of the slices in delimited by the slice. -or- if has no elements, or and all the elements of are . + /// If is null. + public static Slice Join(Slice separator, [NotNull] IEnumerable values) + { + Contract.NotNull(values, nameof(values)); + var array = (values as Slice[]) ?? values.ToArray(); + return Join(separator, array, 0, array.Length); + } + + /// Concatenates the specified elements of a slice array, using the specified separator between each element. + /// The slice to use as a separator. Can be empty. + /// An array that contains the elements to concatenate. + /// The first element in to use. + /// The number of elements of to use. + /// A byte array that consists of the slices in delimited by the slice. -or- an emtpy array if is zero, has no elements, or and all the elements of are . + /// If is null. + /// If or is less than zero. -or- plus is greater than the number of elements in . + [NotNull] + public static byte[] JoinBytes(Slice separator, [NotNull] Slice[] values, int startIndex, int count) + { + // Note: this method is modeled after String.Join() and should behave the same + // - Only difference is that Slice.Nil and Slice.Empty are equivalent (either for separator, or for the elements of the array) + + Contract.NotNull(values, nameof(values)); + //REVIEW: support negative indexing ? + if (startIndex < 0) throw ThrowHelper.ArgumentOutOfRangeException(nameof(startIndex), startIndex, "Start index must be a positive integer"); + if (count < 0) throw ThrowHelper.ArgumentOutOfRangeException(nameof(count), count, "Count must be a positive integer"); + if (startIndex > values.Length - count) throw ThrowHelper.ArgumentOutOfRangeException(nameof(startIndex), startIndex, "Start index must fit within the array"); + + if (count == 0) return System.Array.Empty(); + if (count == 1) return values[startIndex].GetBytes() ?? System.Array.Empty(); + + int size = 0; + for (int i = 0; i < count; i++) size = checked(size + values[startIndex + i].Count); + size = checked(size + (count - 1) * separator.Count); + + // if the size overflows, that means that the resulting buffer would need to be >= 2 GB, which is not possible! + if (size < 0) throw new OutOfMemoryException(); + + //note: we want to make sure the buffer of the writer will be the exact size (so that we can use the result as a byte[] without copying again) + var tmp = new byte[size]; + int p = 0; + for (int i = 0; i < count; i++) + { + if (i > 0) separator.WriteTo(tmp, ref p); + values[startIndex + i].WriteTo(tmp, ref p); + } + Contract.Assert(p == tmp.Length); + return tmp; + } + + /// Concatenates the specified elements of a slice sequence, using the specified separator between each element. + /// The slice to use as a separator. Can be empty. + /// A sequence will return the elements to concatenate. + /// A byte array that consists of the slices in delimited by the slice. -or- an empty array if has no elements, or and all the elements of are . + /// If is null. + [NotNull] + public static byte[] JoinBytes(Slice separator, [NotNull] IEnumerable values) + { + Contract.NotNull(values, nameof(values)); + var array = (values as Slice[]) ?? values.ToArray(); + return JoinBytes(separator, array, 0, array.Length); + } + + /// Returns a slice array that contains the sub-slices in that are delimited by . A parameter specifies whether to return empty array elements. + /// Input slice that must be split into sub-slices + /// Separator that delimits the sub-slices in . Cannot be empty or nil + /// to omit empty array alements from the array returned; or to include empty array elements in the array returned. + /// An array whose elements contain the sub-slices that are delimited by . + /// If is empty, or if is not one of the values. + /// If does not contain the delimiter, the returned array consists of a single element that repeats the input, or an empty array if input is itself empty. + /// To reduce memory usage, the sub-slices returned in the array will all share the same underlying buffer of the input slice. + [NotNull] + public static Slice[] Split(Slice input, Slice separator, StringSplitOptions options = StringSplitOptions.None) + { + // this method is made to behave the same way as String.Split(), especially the following edge cases + // - Empty.Split(..., StringSplitOptions.None) => { Empty } + // - Empty.Split(..., StringSplitOptions.RemoveEmptyEntries) => { } + // differences: + // - If input is Nil, it is considered equivalent to Empty + // - If separator is Nil or Empty, the method throws + + var list = new List(); + + if (separator.Count <= 0) throw ThrowHelper.ArgumentException(nameof(separator), "Separator must have at least one byte"); + if (options < StringSplitOptions.None || options > StringSplitOptions.RemoveEmptyEntries) throw ThrowHelper.ArgumentException(nameof(options)); + + bool skipEmpty = options.HasFlag(StringSplitOptions.RemoveEmptyEntries); + if (input.Count == 0) + { + return skipEmpty ? System.Array.Empty() : new[] { Slice.Empty }; + } + + while (input.Count > 0) + { + int p = Find(input, separator); + if (p < 0) + { // last chunk + break; + } + if (p == 0) + { // empty chunk + if (!skipEmpty) list.Add(Slice.Empty); + } + else + { + list.Add(input.Substring(0, p)); + } + // note: we checked earlier that separator.Count > 0, so we are guaranteed to advance the cursor + input = input.Substring(p + separator.Count); + } + + if (input.Count > 0 || !skipEmpty) + { + list.Add(input); + } + + return list.ToArray(); + } + + /// Returns a slice array that contains the sub-slices in by cutting fixed-length chunks or size . + /// Input slice that must be split into sub-slices + /// Size of each chunk that will be cut from . Must be greater or equal to 1. + /// + /// An array whose elements contain the sub-slices, each of size , except the last slice that may be smaller if the length of is not a multiple of . + /// If is then the array will be empty. + /// If it is then the array will we of length 1 and contain the empty slice. + /// + /// To reduce memory usage, the sub-slices returned in the array will all share the same underlying buffer of the input slice. + [NotNull] + public static Slice[] Split(Slice input, int stride) + { + Contract.GreaterOrEqual(stride, 1, nameof (stride)); + + if (input.IsNull) return System.Array.Empty(); + + if (input.Count <= stride) + { // single element + return new [] { input }; + } + + // how many slices? (last one may be incomplete) + int count = (input.Count + (stride - 1)) / stride; + var result = new Slice[count]; + + int p = 0; + int r = input.Count; + for(int i = 0; i < result.Length; i++) + { + Contract.Assert(r >= 0); + result[i] = new Slice(input.Array, input.Offset + p, Math.Min(r, stride)); + p += stride; + r -= stride; + } + + return result; + } + + /// Returns the first key lexicographically that does not have the passed in as a prefix + /// Slice to increment + /// New slice that is guaranteed to be the first key lexicographically higher than which does not have as a prefix + /// If the last byte is already equal to 0xFF, it will rollover to 0x00 and the next byte will be incremented. + /// If the Slice is equal to Slice.Nil + /// If the Slice is the empty string or consists only of 0xFF bytes + /// + /// Slice.Increment(Slice.FromString("ABC")) => "ABD" + /// Slice.Increment(Slice.FromHexa("01 FF")) => { 02 } + /// + public static Slice Increment(Slice slice) + { + if (slice.IsNull) throw ThrowHelper.ArgumentException(nameof(slice), "Cannot increment null buffer"); + + int lastNonFfByte; + var tmp = slice.GetBytesOrEmpty(); + for (lastNonFfByte = tmp.Length - 1; lastNonFfByte >= 0; --lastNonFfByte) + { + if (tmp[lastNonFfByte] != 0xFF) + { + ++tmp[lastNonFfByte]; + break; + } + } + + if (lastNonFfByte < 0) + { + throw ThrowHelper.ArgumentException(nameof(slice), "Cannot increment key"); //TODO: PoneyDB.Errors.CannotIncrementKey(); + } + + return new Slice(tmp, 0, lastNonFfByte + 1); + } + + /// Merge an array of keys with a same prefix, all sharing the same buffer + /// Prefix shared by all keys + /// Array of keys to pack + /// Array of slices (for all keys) that share the same underlying buffer + [NotNull] + public static Slice[] Merge(Slice prefix, [NotNull] Slice[] keys) + { + Contract.NotNull(keys, nameof(keys)); + + //REVIEW: merge this code with Slice.ConcatRange! + + if (keys.Length == 0) return System.Array.Empty(); + + // we can pre-allocate exactly the buffer by computing the total size of all keys + int size = keys.Length * prefix.Count; + for (int i = 0; i < keys.Length; i++) size += keys[i].Count; + + var writer = new SliceWriter(size); + var next = new List(keys.Length); + + //TODO: use multiple buffers if item count is huge ? + bool hasPrefix = prefix.IsPresent; + foreach (var key in keys) + { + if (hasPrefix) writer.WriteBytes(prefix); + writer.WriteBytes(key); + next.Add(writer.Position); + } + + return SplitIntoSegments(writer.Buffer, 0, next); + } + + /// Merge a sequence of keys with a same prefix, all sharing the same buffer + /// Prefix shared by all keys + /// Sequence of keys to pack + /// Array of slices (for all keys) that share the same underlying buffer + [NotNull] + public static Slice[] Merge(Slice prefix, [NotNull] IEnumerable keys) + { + Contract.NotNull(keys, nameof(keys)); + + //REVIEW: merge this code with Slice.ConcatRange! + + // use optimized version for arrays + if (keys is Slice[] array) return Merge(prefix, array); + + // pre-allocate with a count if we can get one... + var next = keys is ICollection coll ? new List(coll.Count) : new List(); + var writer = default(SliceWriter); + + //TODO: use multiple buffers if item count is huge ? + + bool hasPrefix = prefix.IsPresent; + foreach (var key in keys) + { + if (hasPrefix) writer.WriteBytes(prefix); + writer.WriteBytes(key); + next.Add(writer.Position); + } + + return SplitIntoSegments(writer.Buffer, 0, next); + } + + /// Creates a new slice that contains the same byte repeated + /// Byte that will fill the slice + /// Number of bytes + /// New slice that contains times the byte . + public static Slice Repeat(byte value, int count) + { + Contract.Positive(count, nameof(count), "count"); + if (count == 0) return Slice.Empty; + + var res = new byte[count]; + UnsafeHelpers.Fill(res, 0, count, value); + return new Slice(res); + } + + /// Creates a new slice that contains the same byte repeated + /// ASCII character (between 0 and 255) that will fill the slice. If is greater than 0xFF, only the 8 lowest bits will be used + /// Number of bytes + /// New slice that contains times the byte . + public static Slice Repeat(char value, int count) + { + Contract.Positive(count, nameof(count), "count"); + if (count == 0) return Slice.Empty; + + var res = new byte[count]; + UnsafeHelpers.Fill(res, 0, count, (byte) value); + return new Slice(res); + } + + /// Create a new slice filled with random bytes taken from a random number generator + /// Pseudo random generator to use (needs locking if instance is shared) + /// Number of random bytes to generate + /// Slice of bytes taken from + /// Warning: is not thread-safe ! If the instance is shared between threads, then it needs to be locked before calling this method. + public static Slice Random([NotNull] Random prng, int count) + { + Contract.NotNull(prng, nameof(prng)); + if (count < 0) throw ThrowHelper.ArgumentOutOfRangeException(nameof(count), count, "Count cannot be negative"); + if (count == 0) return Slice.Empty; + + var bytes = new byte[count]; + prng.NextBytes(bytes); + return new Slice(bytes, 0, count); + } + + /// Create a new slice filled with random bytes taken from a cryptographic random number generator + /// Random generator to use (needs locking if instance is shared) + /// Number of random bytes to generate + /// If true, produce a sequence of non-zero bytes. + /// Slice of bytes taken from + /// Warning: All RNG implementations may not be thread-safe ! If the instance is shared between threads, then it may need to be locked before calling this method. + public static Slice Random([NotNull] System.Security.Cryptography.RandomNumberGenerator rng, int count, bool nonZeroBytes = false) + { + Contract.NotNull(rng, nameof(rng)); + if (count < 0) throw ThrowHelper.ArgumentOutOfRangeException(nameof(count), count, "Count cannot be negative"); + if (count == 0) return Slice.Empty; + + var bytes = new byte[count]; + + if (nonZeroBytes) + rng.GetNonZeroBytes(bytes); + else + rng.GetBytes(bytes); + + return new Slice(bytes, 0, count); + } + + /// Returns the lowest of two keys + /// First key + /// Second key + /// The key that is BEFORE the other, using lexicographical order + /// If both keys are equal, then is returned + public static Slice Min(Slice a, Slice b) + { + return a.CompareTo(b) <= 0 ? a : b; + } + + /// Returns the lowest of three keys + /// First key + /// Second key + /// Second key + /// The key that is BEFORE the other two, using lexicographical order + public static Slice Min(Slice a, Slice b, Slice c) + { + return a.CompareTo(b) <= 0 + ? (a.CompareTo(c) <= 0 ? a : c) + : (b.CompareTo(c) <= 0 ? b : c); + } + + public static Slice Min(params Slice[] values) + { + switch (values.Length) + { + case 0: return Slice.Nil; + case 1: return values[0]; + case 2: return Min(values[0], values[1]); + case 3: return Min(values[0], values[1], values[3]); + default: + { + Slice min = values[0]; + for (int i = 1; i < values.Length; i++) + { + if (values[i].CompareTo(min) < 0) min = values[i]; + } + return min; + } + } + } + + /// Returns the highest of two keys + /// First key + /// Second key + /// The key that is AFTER the other, using lexicographical order + /// If both keys are equal, then is returned + public static Slice Max(Slice a, Slice b) + { + return a.CompareTo(b) >= 0 ? a : b; + } + + /// Returns the highest of three keys + /// First key + /// Second key + /// Second key + /// The key that is AFTER the other two, using lexicographical order + public static Slice Max(Slice a, Slice b, Slice c) + { + return a.CompareTo(b) >= 0 + ? (a.CompareTo(c) >= 0 ? a : c) + : (b.CompareTo(c) >= 0 ? b : c); + } + + public static Slice Max(params Slice[] values) + { + switch (values.Length) + { + case 0: return Slice.Nil; + case 1: return values[0]; + case 2: return Max(values[0], values[1]); + case 3: return Max(values[0], values[1], values[3]); + default: + { + Slice max = values[0]; + for (int i = 1; i < values.Length; i++) + { + if (values[i].CompareTo(max) > 0) max = values[i]; + } + return max; + } + } + } + + #region Slice arithmetics... + + /// Compare two slices for equality + /// True if the slices contains the same bytes + public static bool operator ==(Slice a, Slice b) + { + return a.Equals(b); + } + + /// Compare two slices for inequality + /// True if the slices do not contain the same bytes + public static bool operator !=(Slice a, Slice b) + { + return !a.Equals(b); + } + + /// Compare two slices + /// True if is lexicographically less than ; otherwise, false. + public static bool operator <(Slice a, Slice b) + { + return a.CompareTo(b) < 0; + } + + /// Compare two slices + /// True if is lexicographically less than or equal to ; otherwise, false. + public static bool operator <=(Slice a, Slice b) + { + return a.CompareTo(b) <= 0; + } + + /// Compare two slices + /// True if is lexicographically greater than ; otherwise, false. + public static bool operator >(Slice a, Slice b) + { + return a.CompareTo(b) > 0; + } + + /// Compare two slices + /// True if is lexicographically greater than or equal to ; otherwise, false. + public static bool operator >=(Slice a, Slice b) + { + return a.CompareTo(b) >= 0; + } + + /// Append/Merge two slices together + /// First slice + /// Second slice + /// Merged slices if both slices are contigous, or a new slice containg the content of the first slice, followed by the second + public static Slice operator +(Slice a, Slice b) + { + return a.Concat(b); + } + + /// Appends a byte at the end of the slice + /// First slice + /// Byte to append at the end + /// New slice with the byte appended + public static Slice operator +(Slice a, byte b) + { + if (a.Count == 0) return Slice.FromByte(b); + var tmp = new byte[a.Count + 1]; + UnsafeHelpers.CopyUnsafe(tmp, 0, a.Array, a.Offset, a.Count); + tmp[a.Count] = b; + return new Slice(tmp); + } + + /// Remove bytes at the end of slice + /// Smaller slice + public static Slice operator -(Slice s, int n) + { + if (n < 0) throw ThrowHelper.ArgumentOutOfRangeException(nameof(n), "Cannot subtract a negative number from a slice"); + if (n > s.Count) throw ThrowHelper.ArgumentOutOfRangeException(nameof(n), "Cannout substract more bytes than the slice contains"); + + if (n == 0) return s; + if (n == s.Count) return Slice.Empty; + + return new Slice(s.Array, s.Offset, s.Count - n); + } + + // note: We also need overloads with Nullable's to be able to do things like "if (slice == null)", "if (slice != null)" or "if (null != slice)". + // For structs that have "==" / "!=" operators, the compiler will think that when you write "slice == null", you really mean "(Slice?)slice == default(Slice?)", and that would ALWAYS false if you don't have specialized overloads to intercept. + + /// Determines whether two specified instances of are equal + public static bool operator ==(Slice? a, Slice? b) + { + return a.GetValueOrDefault().Equals(b.GetValueOrDefault()); + } + + /// Determines whether two specified instances of are not equal + public static bool operator !=(Slice? a, Slice? b) + { + return !a.GetValueOrDefault().Equals(b.GetValueOrDefault()); + } + + /// Determines whether one specified is less than another specified . + public static bool operator <(Slice? a, Slice? b) + { + return a.GetValueOrDefault() < b.GetValueOrDefault(); + } + + /// Determines whether one specified is less than or equal to another specified . + public static bool operator <=(Slice? a, Slice? b) + { + return a.GetValueOrDefault() <= b.GetValueOrDefault(); + } + + /// Determines whether one specified is greater than another specified . + public static bool operator >(Slice? a, Slice? b) + { + return a.GetValueOrDefault() > b.GetValueOrDefault(); + } + + /// Determines whether one specified is greater than or equal to another specified . + public static bool operator >=(Slice? a, Slice? b) + { + return a.GetValueOrDefault() >= b.GetValueOrDefault(); + } + + /// Concatenates two together. + public static Slice operator +(Slice? a, Slice? b) + { + // note: makes "slice + null" work! + return a.GetValueOrDefault().Concat(b.GetValueOrDefault()); + } + + #endregion + + /// Returns a printable representation of the key + /// You can roundtrip the result of calling slice.ToString() by passing it to (string) and get back the original slice. + public override string ToString() + { + return Dump(this); + } + + public string ToString(string format) + { + return ToString(format, null); + } + + /// Formats the slice using the specified encoding + /// A single format specifier that indicates how to format the value of this Slice. The parameter can be "N", "D", "X", or "P". If format is null or an empty string (""), "D" is used. A lower case character will usually produce lowercased hexadecimal letters. + /// This paramater is not used + /// + /// + /// The format D is the default, and produce a round-trippable version of the slice, using <XX> tokens for non-printables bytes. + /// The format N (or n) produces a compact hexadecimal string (without separators). + /// The format X (or x) produces an hexadecimal string with spaces between each bytes. + /// The format P is the equivalent of calling . + /// + public string ToString(string format, IFormatProvider provider) + { + switch (format ?? "D") + { + case "D": + case "d": + return Dump(this); + + case "N": + return ToHexaString(lower: false); + case "n": + return ToHexaString(lower: true); + + case "X": + return ToHexaString(' ', lower: false); + case "x": + return ToHexaString(' ', lower: true); + + case "P": + case "p": + return PrettyPrint(); + + case "K": + case "k": + return PrettyPrint(); //TODO: Key ! (cf USlice) + + case "V": + case "v": + return PrettyPrint(); //TODO: Value ! (cf USlice) + + default: + throw new FormatException("Format is invalid or not supported"); + } + } + + /// Returns a printable representation of a key + /// This may not be efficient, so it should only be use for testing/logging/troubleshooting + [NotNull] + public static string Dump(Slice value, int maxSize = 1024) //REVIEW: rename this to Encode(..) or Escape(..) + { + if (value.Count == 0) return value.HasValue ? "" : ""; + + value.EnsureSliceIsValid(); + + var buffer = value.Array; + int count = Math.Min(value.Count, maxSize); + int pos = value.Offset; + + var sb = new StringBuilder(count + 16); + while (count-- > 0) + { + int c = buffer[pos++]; + if (c < 32 || c >= 127 || c == 60) + { + sb.Append('<'); + int x = c >> 4; + sb.Append((char)(x + (x < 10 ? 48 : 55))); + x = c & 0xF; + sb.Append((char)(x + (x < 10 ? 48 : 55))); + sb.Append('>'); + } + else + { + sb.Append((char)c); + } + } + if (value.Count > maxSize) sb.Append("[\u2026]"); // Unicode for '...' + return sb.ToString(); + } + + /// Decode the string that was generated by slice.ToString() or Slice.Dump(), back into the original slice + /// This may not be efficient, so it should only be use for testing/logging/troubleshooting + public static Slice Unescape(string value) //REVIEW: rename this to Decode() if we changed Dump() to Encode() + { + var writer = default(SliceWriter); + for (int i = 0; i < value.Length; i++) + { + char c = value[i]; + if (c == '<') + { + if (value[i + 3] != '>') throw new FormatException($"Invalid escape character at offset {i}"); + c = (char)(NibbleToDecimal(value[i + 1]) << 4 | NibbleToDecimal(value[i + 2])); + i += 3; + } + writer.WriteByte((byte)c); + } + return writer.ToSlice(); + } + + #region Streams... + + /// Read the content of a stream into a slice + /// Source stream, that must be in a readable state + /// Slice containing the stream content (or if the stream is ) + /// If is null. + /// If the size of the stream exceeds or if it does not support reading. + public static Slice FromStream([NotNull] Stream data) + { + Contract.NotNull(data, nameof(data)); + + // special case for empty values + if (data == Stream.Null) return Slice.Nil; + if (!data.CanRead) throw ThrowHelper.InvalidOperationException("Cannot read from provided stream"); + + if (data.Length == 0) return Slice.Empty; + if (data.Length > int.MaxValue) throw ThrowHelper.InvalidOperationException("Streams of more than 2GB are not supported"); + //TODO: other checks? + + int length; + checked { length = (int)data.Length; } + + if (data is MemoryStream || data is UnmanagedMemoryStream) // other types of already completed streams ? + { // read synchronously + return LoadFromNonBlockingStream(data, length); + } + + // read asynchronoulsy + return LoadFromBlockingStream(data, length); + } + + /// Asynchronously read the content of a stream into a slice + /// Source stream, that must be in a readable state + /// Optional cancellation token for this operation + /// Slice containing the stream content (or if the stream is ) + /// If is null. + /// If the size of the stream exceeds or if it does not support reading. + public static Task FromStreamAsync([NotNull] Stream data, CancellationToken ct) + { + Contract.NotNull(data, nameof(data)); + + // special case for empty values + if (data == Stream.Null) return Task.FromResult(Slice.Nil); + if (!data.CanRead) throw ThrowHelper.InvalidOperationException("Cannot read from provided stream"); + + if (data.Length == 0) return Task.FromResult(Slice.Empty); + if (data.Length > int.MaxValue) throw ThrowHelper.InvalidOperationException("Streams of more than 2GB are not supported"); + //TODO: other checks? + + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); + + int length; + checked { length = (int)data.Length; } + + if (data is MemoryStream || data is UnmanagedMemoryStream) // other types of already completed streams ? + { // read synchronously + return Task.FromResult(LoadFromNonBlockingStream(data, length)); + } + + // read asynchronoulsy + return LoadFromBlockingStreamAsync(data, length, 0, ct); + } + + /// Read from a non-blocking stream that already contains all the data in memory (MemoryStream, UnmanagedStream, ...) + /// Source stream + /// Number of bytes to read from the stream + /// Slice containing the loaded data + private static Slice LoadFromNonBlockingStream([NotNull] Stream source, int length) + { + Contract.Requires(source != null && source.CanRead && source.Length <= int.MaxValue); + + if (source is MemoryStream ms) + { // Already holds onto a byte[] + + //note: should be use GetBuffer() ? It can throws and is dangerous (could mutate) + return ms.ToArray().AsSlice(); + } + + // read it in bulk, without buffering + + var buffer = new byte[length]; //TODO: round up to avoid fragmentation ? + + // note: reading should usually complete with only one big read, but loop until completed, just to be sure + int p = 0; + int r = length; + while (r > 0) + { + int n = source.Read(buffer, p, r); + if (n <= 0) throw ThrowHelper.InvalidOperationException($"Unexpected end of stream at {p:N0} / {length:N0} bytes"); + p += n; + r -= n; + } + Contract.Assert(r == 0 && p == length); + + return buffer.AsSlice(); + } + + /// Synchronously read from a blocking stream (FileStream, NetworkStream, ...) + /// Source stream + /// Number of bytes to read from the stream + /// If non zero, max amount of bytes to read in one chunk. If zero, tries to read everything at once + /// Slice containing the loaded data + private static Slice LoadFromBlockingStream([NotNull] Stream source, int length, int chunkSize = 0) + { + Contract.Requires(source != null && source.CanRead && source.Length <= int.MaxValue && chunkSize >= 0); + + if (chunkSize == 0) chunkSize = int.MaxValue; + + var buffer = new byte[length]; //TODO: round up to avoid fragmentation ? + + // note: reading should usually complete with only one big read, but loop until completed, just to be sure + int p = 0; + int r = length; + while (r > 0) + { + int c = Math.Max(r, chunkSize); + int n = source.Read(buffer, p, c); + if (n <= 0) throw ThrowHelper.InvalidOperationException($"Unexpected end of stream at {p:N0} / {length:N0} bytes"); + p += n; + r -= n; + } + Contract.Assert(r == 0 && p == length); + + return buffer.AsSlice(); + } + + /// Asynchronously read from a blocking stream (FileStream, NetworkStream, ...) + /// Source stream + /// Number of bytes to read from the stream + /// If non zero, max amount of bytes to read in one chunk. If zero, tries to read everything at once + /// Optional cancellation token for this operation + /// Slice containing the loaded data + private static async Task LoadFromBlockingStreamAsync([NotNull] Stream source, int length, int chunkSize, CancellationToken ct) + { + Contract.Requires(source != null && source.CanRead && source.Length <= int.MaxValue && chunkSize >= 0); + + if (chunkSize == 0) chunkSize = int.MaxValue; + + var buffer = new byte[length]; //TODO: round up to avoid fragmentation ? + + // note: reading should usually complete with only one big read, but loop until completed, just to be sure + int p = 0; + int r = length; + while (r > 0) + { + int c = Math.Min(r, chunkSize); + int n = await source.ReadAsync(buffer, p, c, ct); + if (n <= 0) throw ThrowHelper.InvalidOperationException($"Unexpected end of stream at {p:N0} / {length:N0} bytes"); + p += n; + r -= n; + } + Contract.Assert(r == 0 && p == length); + + return buffer.AsSlice(); + } + + #endregion + + #region Equality, Comparison... + + /// Checks if an object is equal to the current slice + /// Object that can be either another slice, a byte array, or a byte array segment. + /// true if the object represents a sequence of bytes that has the same size and same content as the current slice. + public override bool Equals(object obj) + { + switch (obj) + { + case null: return this.Array == null; + case Slice slice: return Equals(slice); + case ArraySegment segment: return Equals(segment); + case byte[] bytes: return Equals(bytes); + } + return false; + } + + /// Gets the hash code for this slice + /// A 32-bit signed hash code calculated from all the bytes in the slice. + public override int GetHashCode() + { + EnsureSliceIsValid(); + return this.Array == null ? 0 : UnsafeHelpers.ComputeHashCodeUnsafe(this.Array, this.Offset, this.Count); + } + + /// Checks if another slice is equal to the current slice. + /// Slice compared with the current instance + /// true if both slices have the same size and contain the same sequence of bytes; otherwise, false. + public bool Equals(Slice other) + { + other.EnsureSliceIsValid(); + this.EnsureSliceIsValid(); + + // note: Slice.Nil != Slice.Empty + if (this.Array == null) return other.Array == null; + if (other.Array == null) return false; + + return this.Count == other.Count && UnsafeHelpers.SameBytesUnsafe(this.Array, this.Offset, other.Array, other.Offset, this.Count); + } + + /// Lexicographically compare this slice with another one, and return an indication of their relative sort order + /// Slice to compare with this instance + /// Returns a NEGATIVE value if the current slice is LESS THAN , ZERO if it is EQUAL TO , and a POSITIVE value if it is GREATER THAN . + /// If both this instance and are Nil or Empty, the comparison will return ZERO. If only is Nil or Empty, it will return a NEGATIVE value. If only this instance is Nil or Empty, it will return a POSITIVE value. + public int CompareTo(Slice other) + { + if (this.Count == 0) return other.Count == 0 ? 0 : -1; + if (other.Count == 0) return +1; + other.EnsureSliceIsValid(); + this.EnsureSliceIsValid(); + return UnsafeHelpers.CompareUnsafe(this.Array, this.Offset, this.Count, other.Array, other.Offset, other.Count); + } + + /// Checks if the content of a byte array segment matches the current slice. + /// Byte array segment compared with the current instance + /// true if both segment and slice have the same size and contain the same sequence of bytes; otherwise, false. + public bool Equals(ArraySegment other) + { + return this.Count == other.Count && UnsafeHelpers.SameBytes(this.Array, this.Offset, other.Array, other.Offset, this.Count); + } + + /// Checks if the content of a byte array matches the current slice. + /// Byte array compared with the current instance + /// true if the both array and slice have the same size and contain the same sequence of bytes; otherwise, false. + public bool Equals(byte[] other) + { + if (other == null) return this.Array == null; + return this.Count == other.Length && UnsafeHelpers.SameBytes(this.Array, this.Offset, other, 0, this.Count); + } + + #endregion + + #region Sanity Checking... + + /// Verifies that the and fields represent a valid location in + /// This method is inlined for best performance + /// If the slice is not a valid section of a buffer + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void EnsureSliceIsValid() + { + // Conditions for a slice to be valid: + // - Count equal to 0 (other fields are ignored) + // - Count greather than 0 and Array not null and all the bytes of the slice are contained in the underlying buffer + + int count = this.Count; + if (count != 0) + { + var array = this.Array; + if (array == null || (uint) count > (long) array.Length - (uint) this.Offset) + { + throw MalformedSlice(this); + } + } + } + + /// Reject an invalid slice by throw an error with the appropriate diagnostic message. + /// Slice that is being naugthy + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception MalformedSlice(Slice slice) + { +#if DEBUG + // If you break here, that means that a slice is invalid (negative count, offset, ...), which may be a sign of memory corruption! + // You should walk up the stack to see what is going on ! + if (System.Diagnostics.Debugger.IsAttached) System.Diagnostics.Debugger.Break(); +#endif + + if (slice.Offset < 0) return UnsafeHelpers.Errors.SliceOffsetNotNeg(); + if (slice.Count < 0) return UnsafeHelpers.Errors.SliceCountNotNeg(); + if (slice.Count > 0) + { + if (slice.Array == null) return UnsafeHelpers.Errors.SliceBufferNotNull(); + if (slice.Offset + slice.Count > slice.Array.Length) return UnsafeHelpers.Errors.SliceBufferTooSmall(); + } + // maybe it's Lupus ? + return UnsafeHelpers.Errors.SliceInvalid(); + } + + #endregion + + /// Return the sum of the size of all the slices with an additionnal prefix + /// Size of a prefix that would be added before each slice + /// Array of slices + /// Combined total size of all the slices and the prefixes + public static int GetTotalSize(int prefix, [NotNull] Slice[] slices) + { + long size = prefix * slices.Length; + for (int i = 0; i < slices.Length; i++) + { + size += slices[i].Count; + } + return checked((int)size); + } + + /// Return the sum of the size of all the slices with an additionnal prefix + /// Size of a prefix that would be added before each slice + /// Array of slices + /// Combined total size of all the slices and the prefixes + public static int GetTotalSize(int prefix, [NotNull] Slice?[] slices) + { + long size = prefix * slices.Length; + for (int i = 0; i < slices.Length; i++) + { + size += slices[i].GetValueOrDefault().Count; + } + return checked((int)size); + } + + /// Return the sum of the size of all the slices with an additionnal prefix + /// Size of a prefix that would be added before each slice + /// Array of slices + /// Combined total size of all the slices and the prefixes + public static int GetTotalSize(int prefix, [NotNull] List slices) + { + long size = prefix * slices.Count; + foreach (var val in slices) + { + size += val.Count; + } + return checked((int)size); + } + + /// Return the sum of the size of all the slices with an additionnal prefix + /// Size of a prefix that would be added before each slice + /// Array of slices + /// Combined total size of all the slices and the prefixes + public static int GetTotalSize(int prefix, [NotNull] List slices) + { + long size = prefix * slices.Count; + foreach (var val in slices) + { + size += val.GetValueOrDefault().Count; + } + return checked((int)size); + } + + /// Return the sum of the size of all the slices with an additionnal prefix, and test if they all share the same buffer + /// Size of a prefix that would be added before each slice + /// Array of slices + /// Receives null if at least two slices are stored in a different buffer. If not null, return the common buffer for all the keys + /// Combined total size of all the slices and the prefixes + public static int GetTotalSizeAndCommonStore(int prefix, [NotNull] Slice[] slices, out byte[] commonStore) + { + if (slices.Length == 0) + { + commonStore = null; + return 0; + } + byte[] store = slices[0].Array; + if (slices.Length == 1) + { + commonStore = store; + return prefix + slices[0].Count; + } + + bool sameStore = true; + long size = slices[0].Count + slices.Length * prefix; + for (int i = 1; i < slices.Length; i++) + { + size += slices[i].Count; + sameStore &= (slices[i].Array == store); + } + commonStore = sameStore ? store : null; + return checked((int)size); + } + + /// Return the sum of the size of all the slices with an additionnal prefix, and test if they all share the same buffer + /// Size of a prefix that would be added before each slice + /// Array of slices + /// Receives null if at least two slices are stored in a different buffer. If not null, return the common buffer for all the keys + /// Combined total size of all the slices and the prefixes + public static int GetTotalSizeAndCommonStore(int prefix, [NotNull] List slices, out byte[] commonStore) + { + Contract.Requires(slices != null); + if (slices.Count == 0) + { + commonStore = null; + return 0; + } + byte[] store = slices[0].Array; + if (slices.Count == 1) + { + commonStore = store; + return prefix + slices[0].Count; + } + + bool sameStore = true; + long size = slices[0].Count + slices.Count * prefix; + foreach (var val in slices) + { + size += val.Count; + sameStore &= (val.Array == store); + } + commonStore = sameStore ? store : null; + return checked((int)size); + } + + /// Structure that keeps buffers from moving in memory during GC collections + /// + /// Caller must ensure that this structure is properly Disposed in all executions paths once the buffers are not needed anymore! + /// It is safe to call Dispose() multiple times (though the buffers will be unpinned on the first call) + /// + public struct Pinned : IDisposable + { + + /// GC Handle on the main buffer + internal GCHandle Handle; + + /// Additionnal GC Handles (optionnal) + internal readonly GCHandle[] Handles; + + internal object Owner; + + internal Pinned([NotNull] object owner, [NotNull] byte[] buffer, [CanBeNull] List extra) + { + Contract.Requires(owner != null && buffer != null); + + this.Owner = buffer; + this.Handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); + if (extra == null || extra.Count == 0) + { + this.Handles = null; + } + else + { + var handles = new GCHandle[extra.Count]; + this.Handles = handles; + int p = 0; + foreach (var chunk in extra) + { + handles[p++] = GCHandle.Alloc(chunk.Array, GCHandleType.Pinned); + } + handles[p] = GCHandle.Alloc(buffer); + } + } + + public bool IsAllocated => this.Handle.IsAllocated; + + public void Dispose() + { + if (this.Owner != null) + { + if (this.Handle.IsAllocated) this.Handle.Free(); + var handles = this.Handles; + if (handles != null) + { + for (int i = 0; i < handles.Length; i++) + { + if (handles[i].IsAllocated) handles[i].Free(); + } + } + this.Owner = null; + } + } + } + + [UsedImplicitly(ImplicitUseTargetFlags.WithMembers)] + private sealed class DebugView + { + private readonly Slice m_slice; + + public DebugView(Slice slice) + { + m_slice = slice; + } + + public int Count => m_slice.Count; + + public byte[] Data + { + get + { + if (m_slice.Count == 0) return m_slice.Array == null ? null : System.Array.Empty(); + if (m_slice.Offset == 0 && m_slice.Count == m_slice.Array.Length) return m_slice.Array; + var tmp = new byte[m_slice.Count]; + System.Array.Copy(m_slice.Array, m_slice.Offset, tmp, 0, m_slice.Count); + return tmp; + } + } + + public string Content => Slice.Dump(m_slice, maxSize: 1024); + + /// Encoding using only for display purpose: we don't want to throw in the 'Text' property if the input is not text! + [NotNull] + private static readonly UTF8Encoding Utf8NoBomEncodingNoThrow = new UTF8Encoding(encoderShouldEmitUTF8Identifier: false, throwOnInvalidBytes: false); + + public string Text + { + get + { + if (m_slice.Count == 0) return m_slice.Array == null ? null : String.Empty; + return EscapeString(new StringBuilder(m_slice.Count + 16), m_slice.Array, m_slice.Offset, m_slice.Count, Utf8NoBomEncodingNoThrow).ToString(); + } + } + + public string Hexa + { + get + { + if (m_slice.Count == 0) return m_slice.Array == null ? null : String.Empty; + return m_slice.Count <= 1024 + ? m_slice.ToHexaString(' ') + : m_slice.Substring(0, 1024).ToHexaString(' ') + "[\u2026]"; + } + } + + } + + } + + /// Helper methods for Slice + public static class SliceExtensions + { + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.NoInlining)] + private static Slice EmptyOrNil(byte[] array) + { + //note: we consider the "empty" or "nil" case less frequent, so we handle it in a non-inlined method + return array == null ? default(Slice) : Slice.Empty; + } + + /// Handle the Nil/Empty memoization + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.NoInlining)] + private static Slice EmptyOrNil([CanBeNull] byte[] array, int count) + { + //note: we consider the "empty" or "nil" case less frequent, so we handle it in a non-inlined method + if (array == null) return count == 0 ? default(Slice) : throw UnsafeHelpers.Errors.BufferArrayNotNull(); + return Slice.Empty; + } + + /// Return a slice that wraps the whole array + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice AsSlice([CanBeNull] this byte[] bytes) + { + return bytes != null && bytes.Length > 0 ? new Slice(bytes, 0, bytes.Length) : EmptyOrNil(bytes); + } + + /// Return the tail of the array, starting from the specified offset + /// Underlying buffer to slice + /// Offset to the first byte of the slice + /// + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice AsSlice([NotNull] this byte[] bytes, [Positive] int offset) + { + //note: this method is DANGEROUS! Caller may thing that it is passing a count instead of an offset. + Contract.NotNull(bytes, nameof(bytes)); + if ((uint) offset > (uint) bytes.Length) UnsafeHelpers.Errors.ThrowBufferArrayToSmall(); + return bytes.Length != 0 ? new Slice(bytes, offset, bytes.Length - offset) : Slice.Empty; + } + + /// Return a slice from the sub-section of the byte array + /// Underlying buffer to slice + /// Offset to the first element of the slice (if not empty) + /// Number of bytes to take + /// + /// Slice that maps the corresponding sub-section of the array. + /// If then either Slice.Empty or Slice.Nil will be returned, in order to not keep a reference to the whole buffer. + /// + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice AsSlice([CanBeNull] this byte[] bytes, [Positive] int offset, [Positive] int count) + { + //note: this method will frequently be called with offset==0, so we should optimize for this case! + if (bytes == null | count == 0) return EmptyOrNil(bytes, count); + + // bound check + // ReSharper disable once PossibleNullReferenceException + if ((uint) offset >= (uint) bytes.Length || (uint) count > (uint) (bytes.Length - offset)) UnsafeHelpers.Errors.ThrowOffsetOutsideSlice(); + + return new Slice(bytes, offset, count); + } + + /// Return a slice from the sub-section of the byte array + /// Underlying buffer to slice + /// Offset to the first element of the slice (if not empty) + /// Number of bytes to take + /// + /// Slice that maps the corresponding sub-section of the array. + /// If then either Slice.Empty or Slice.Nil will be returned, in order to not keep a reference to the whole buffer. + /// + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice AsSlice([CanBeNull] this byte[] bytes, uint offset, uint count) + { + //note: this method will frequently be called with offset==0, so we should optimize for this case! + if (bytes == null | count == 0) return EmptyOrNil(bytes, (int) count); + + // bound check + if (offset >= (uint) bytes.Length || count > ((uint) bytes.Length - offset)) UnsafeHelpers.Errors.ThrowOffsetOutsideSlice(); + + return new Slice(bytes, (int) offset, (int) count); + } + + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice AsSlice(this ArraySegment self) + { + // We trust the ArraySegment ctor to valide the arguments before hand. + // If somehow the arguments were corrupted (intentionally or not), then the same problem could have happened with the slice anyway! + + // ReSharper disable once AssignNullToNotNullAttribute + return self.Count != 0 ? new Slice(self.Array, self.Offset, self.Count) : EmptyOrNil(self.Array, self.Count); + } + + /// Return a slice from the sub-section of an array segment + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice AsSlice(this ArraySegment self, int offset, int count) + { + return AsSlice(self).Substring(offset, count); + } + +#if ENABLE_SPAN + /// Convert this into the equivalent ReadOnlySpan<byte>. + /// Both and will be converted into an empty span + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static ReadOnlySpan AsReadOnlySpan(this Slice self) + { + return new ReadOnlySpan(self.Array, self.Offset, self.Count); + } + + /// Convert this into the equivalent ReadOnlySpan<byte>. + /// If is + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static ReadOnlySpan AsReadOnlySpan(this Slice self, int start) + { + var x = self.Substring(start); + return new ReadOnlySpan(x.Array, x.Offset, x.Count); + } + + /// Convert this into the equivalent ReadOnlySpan<byte>. + /// If is + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static ReadOnlySpan AsReadOnlySpan(this Slice self, int start, int length) + { + var x = self.Substring(start, length); + return new ReadOnlySpan(x.Array, x.Offset, x.Count); + } + + /// Convert this into the equivalent Span<byte>. + /// Both and will be converted into an empty span + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Span AsSpan(this Slice self) + { + return new Span(self.Array, self.Offset, self.Count); + } + + /// Convert this into the equivalent Span<byte>. + /// If is + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Span AsSpan(this Slice self, int start) + { + var x = self.Substring(start); + return new Span(x.Array, x.Offset, x.Count); + } + + /// Convert this into the equivalent Span<byte>. + /// If is + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Span AsSpan(this Slice self, int start, int length) + { + var x = self.Substring(start, length); + return new Span(x.Array, x.Offset, x.Count); + } +#endif + + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static SliceReader ToSliceReader(this byte[] self) + { + return new SliceReader(self); + } + + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static SliceReader ToSliceReader(this byte[] self, int count) + { + return new SliceReader(self, 0, count); + } + + [Pure, DebuggerNonUserCode, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static SliceReader ToSliceReader(this byte[] self, int offset, int count) + { + return new SliceReader(self, offset, count); + } + + [Pure, NotNull, DebuggerNonUserCode] + public static SliceStream AsStream(this Slice slice) //REVIEW: => ToStream() ? + { + if (slice.IsNull) throw ThrowHelper.InvalidOperationException("Slice cannot be null"); + //TODO: have a singleton for the emtpy slice ? + return new SliceStream(slice); + } + +#if ENABLE_SPAN + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CopyTo(this ReadOnlySpan source, Slice destination) + { + source.CopyTo(new Span(destination.Array, destination.Offset, destination.Count)); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CopyTo(this Span source, Slice destination) + { + if (source.Length > 0) source.CopyTo(new Span(destination.Array, destination.Offset, destination.Count)); + } +#endif + + } + +} diff --git a/FoundationDB.Client/Utils/SliceBuffer.cs b/FoundationDB.Client/Shared/Memory/SliceBuffer.cs similarity index 65% rename from FoundationDB.Client/Utils/SliceBuffer.cs rename to FoundationDB.Client/Shared/Memory/SliceBuffer.cs index d2d31d336..c513f4ba5 100644 --- a/FoundationDB.Client/Utils/SliceBuffer.cs +++ b/FoundationDB.Client/Shared/Memory/SliceBuffer.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,19 +26,20 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client.Utils +namespace Doxense.Memory { - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.Diagnostics; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; /// Buffer that can be used to efficiently store multiple slices into as few chunks as possible /// - /// This class is usefull to centralize a lot of temporary slices whose lifetime is linked to a specific operation. Dropping the referce to the buffer will automatically reclaim all the slices that were stored with it. + /// This class is usefull to centralize a lot of temporary slices whose lifetime is linked to a specific operation. Dropping the reference to the buffer will automatically reclaim all the slices that were stored with it. /// This class is not thread safe. /// - [DebuggerDisplay("Pos={m_pos}, Remaining={m_remaining}, PageSize={m_pageSize}, Used={m_used+m_pos}, Allocated={m_allocated+m_pos+m_remaining}")] + [DebuggerDisplay("Pos={m_pos}, Remaining={m_remaining}, PageSize={m_pageSize}, Size={Size}, Allocated={Allocated}")] public sealed class SliceBuffer { private const int DefaultPageSize = 256; @@ -68,27 +69,18 @@ public SliceBuffer() /// Initial page size public SliceBuffer(int pageSize) { - if (pageSize < 0) throw new ArgumentOutOfRangeException("pageSize", "Page size cannt be less than zero"); - m_pageSize = pageSize == 0 ? DefaultPageSize : SliceHelpers.Align(pageSize); + if (pageSize < 0) throw new ArgumentOutOfRangeException(nameof(pageSize), "Page size cannt be less than zero"); + m_pageSize = pageSize == 0 ? DefaultPageSize : BitHelpers.AlignPowerOfTwo(pageSize, 16); } /// Gets the number of bytes used by all the slice allocated in this buffer - public int Size - { - get { return m_used + m_pos; } - } + public int Size => m_used + m_pos; /// Gets the total memory size allocated to store all the slices in this buffer - public int Allocated - { - get { return m_allocated + m_pos + m_remaining; } - } + public int Allocated => m_allocated + m_pos + m_remaining; /// Number of memory pages used by this buffer - public int PageCount - { - get { return m_chunks == null ? 1 : (m_chunks.Count + 1); } - } + public int PageCount => m_chunks?.Count + 1 ?? 1; /// Return the list of all the pages used by this buffer /// Array of pages used by the buffer @@ -96,76 +88,11 @@ public int PageCount public Slice[] GetPages() { var pages = new Slice[this.PageCount]; - if (m_chunks != null) m_chunks.CopyTo(pages); + m_chunks?.CopyTo(pages); pages[pages.Length - 1] = new Slice(m_current, 0, m_pos); return pages; } - /// Copy a pair of keys into the buffer, and return a new identical pair - /// Key range - /// Equivalent pair of keys, that are backed by the buffer. - public FdbKeyRange InternRange(FdbKeyRange range) - { - //TODO: if end is prefixed by begin, we could merge both keys (frequent when dealing with ranges on tuples that add \xFF - return new FdbKeyRange( - Intern(range.Begin, aligned: true), - Intern(range.End, aligned: true) - ); - } - - /// Copy a pair of keys into the buffer, and return a new identical pair - /// Begin key of the range - /// End key of the range - /// Equivalent pair of keys, that are backed by the buffer. - public FdbKeyRange InternRange(Slice begin, Slice end) - { - //TODO: if end is prefixed by begin, we could merge both keys (frequent when dealing with ranges on tuples that add \xFF - return new FdbKeyRange( - Intern(begin, aligned: true), - Intern(end, aligned: true) - ); - } - - /// Copy a key into the buffer, and return a new range containing only that key - /// Key to copy to the buffer - /// Range equivalent to [key, key + '\0') that is backed by the buffer. - public FdbKeyRange InternRangeFromKey(Slice key) - { - // Since the end key only adds \0 to the begin key, we can reuse the same bytes by making both overlap - var tmp = Intern(key, FdbKey.MinValue, aligned: true); - - return new FdbKeyRange( - tmp.Substring(0, key.Count), - tmp - ); - } - - /// Copy a key selector into the buffer, and return a new identical selector - /// Key selector to copy to the buffer - /// Equivalent key selector that is backed by the buffer. - public FdbKeySelector InternSelector(FdbKeySelector selector) - { - return new FdbKeySelector( - Intern(selector.Key, aligned: true), - selector.OrEqual, - selector.Offset - ); - } - - /// Copy a pair of key selectors into the buffer, and return a new identical pair - /// Pair of key selectors to copy to the buffer - /// Equivalent pair of key selectors that is backed by the buffer. - public FdbKeySelectorPair InternSelectorPair(FdbKeySelectorPair pair) - { - var begin = Intern(pair.Begin.Key, default(Slice), aligned: true); - var end = Intern(pair.End.Key, default(Slice), aligned: true); - - return new FdbKeySelectorPair( - new FdbKeySelector(begin, pair.Begin.OrEqual, pair.Begin.Offset), - new FdbKeySelector(end, pair.End.OrEqual, pair.End.Offset) - ); - } - /// Allocate an empty space in the buffer /// Number of bytes to allocate /// If true, align the start of the slice with the default padding size. @@ -173,7 +100,7 @@ public FdbKeySelectorPair InternSelectorPair(FdbKeySelectorPair pair) /// There is NO garantees that the allocated slice will be pre-filled with zeroes. public Slice Allocate(int count, bool aligned = false) { - if (count < 0) throw new ArgumentException("Cannot allocate less than zero bytes.", "count"); + if (count < 0) throw new ArgumentException("Cannot allocate less than zero bytes.", nameof(count)); const int ALIGNMENT = 4; @@ -182,19 +109,20 @@ public Slice Allocate(int count, bool aligned = false) return Slice.Empty; } - int start = m_pos; - int extra = aligned ? (ALIGNMENT - (start & (ALIGNMENT - 1))) : 0; - if (count + extra > m_remaining) + int p = m_pos; + int r = m_remaining; + int extra = aligned ? (ALIGNMENT - (p & (ALIGNMENT - 1))) : 0; + if (count + extra > r) { // does not fit return AllocateFallback(count); } Contract.Assert(m_current != null && m_pos >= 0); - m_pos += count + extra; - m_remaining -= count + extra; + m_pos = p + (count + extra); + m_remaining = r - (count + extra); Contract.Ensures(m_remaining >= 0); //note: we rely on the fact that the buffer was pre-filled with zeroes - return Slice.Create(m_current, start + extra, count); + return new Slice(m_current, p + extra, count); } private Slice AllocateFallback(int count) @@ -223,7 +151,7 @@ private Slice AllocateFallback(int count) m_pos = count; m_remaining = pageSize - count; - return Slice.Create(buffer, 0, count); + return new Slice(buffer, 0, count); } /// Copy a slice into the buffer, with optional alignement, and return a new identical slice. @@ -238,11 +166,11 @@ public Slice Intern(Slice data, bool aligned = false) return data.Memoize(); } - SliceHelpers.EnsureSliceIsValid(ref data); + data.EnsureSliceIsValid(); // allocate the slice var slice = Allocate(data.Count, aligned); - SliceHelpers.CopyBytesUnsafe(slice.Array, slice.Offset, data.Array, data.Offset, data.Count); + UnsafeHelpers.CopyUnsafe(slice.Array, slice.Offset, data.Array, data.Offset, data.Count); return slice; } @@ -260,12 +188,12 @@ internal Slice Intern(Slice data, Slice suffix, bool aligned = false) return suffix.Count > 0 ? suffix : data.Array == null ? Slice.Nil : Slice.Empty; } - SliceHelpers.EnsureSliceIsValid(ref data); - SliceHelpers.EnsureSliceIsValid(ref suffix); + data.EnsureSliceIsValid(); + suffix.EnsureSliceIsValid(); var slice = Allocate(data.Count + suffix.Count, aligned); - SliceHelpers.CopyBytesUnsafe(slice.Array, slice.Offset, data.Array, data.Offset, data.Count); - SliceHelpers.CopyBytesUnsafe(slice.Array, slice.Offset + data.Count, suffix.Array, suffix.Offset, suffix.Count); + UnsafeHelpers.CopyUnsafe(slice.Array, slice.Offset, data.Array, data.Offset, data.Count); + UnsafeHelpers.CopyUnsafe(slice.Array, slice.Offset + data.Count, suffix.Array, suffix.Offset, suffix.Count); return slice; } @@ -301,7 +229,15 @@ private void Reset(bool keep) m_used = 0; if (!keep) m_current = null; } - } + /// Return a lock that will prevent the underlying byte arrays used by this buffer from moving around in memory during the next GC. + /// Lock instance that MUST be disposed to release the GC lock. + /// Any data added to the buffer WHILE the buffer is pinned MAY NOT be pinned itself! For safety, caller should make sure to write everything to the buffer before pinning it + public Slice.Pinned Pin() + { + return new Slice.Pinned(this, m_current, m_chunks); + } + + } } diff --git a/FoundationDB.Client/Utils/SliceListStream.cs b/FoundationDB.Client/Shared/Memory/SliceListStream.cs similarity index 78% rename from FoundationDB.Client/Utils/SliceListStream.cs rename to FoundationDB.Client/Shared/Memory/SliceListStream.cs index 6acf1e90f..d987163d5 100644 --- a/FoundationDB.Client/Utils/SliceListStream.cs +++ b/FoundationDB.Client/Shared/Memory/SliceListStream.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,16 +26,18 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client +namespace Doxense.Memory { - using FoundationDB.Async; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.IO; using System.Linq; + using System.Runtime.CompilerServices; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; + + //REVIEW: this is somewhat similar to the proposed new IBufferList (from System.IO.Pipeline) ? /// Merge multiple slices into a single stream public sealed class SliceListStream : Stream @@ -49,13 +51,13 @@ public sealed class SliceListStream : Stream internal SliceListStream([NotNull] Slice[] slices) { - if (slices == null) throw new ArgumentNullException("slices"); + Contract.NotNull(slices, nameof(slices)); Init(slices); } public SliceListStream([NotNull] IEnumerable slices) { - if (slices == null) throw new ArgumentNullException("slices"); + Contract.NotNull(slices, nameof(slices)); Init(slices.ToArray()); } @@ -72,32 +74,20 @@ private void Init([NotNull] Slice[] slices) #region Seeking... - public override bool CanSeek - { - get { return m_slices != null; } - } + public override bool CanSeek => m_slices != null; public override long Position { - get - { - return m_position; - } - set - { - Seek(value, SeekOrigin.Begin); - } + get => m_position; + set => Seek(value, SeekOrigin.Begin); } - public override long Length - { - get { return m_length; } - } + public override long Length => m_length; public override long Seek(long offset, SeekOrigin origin) { - if (m_slices == null) StreamIsClosed(); - if (offset > int.MaxValue) throw new ArgumentOutOfRangeException("offset"); + if (m_slices == null) throw StreamIsClosed(); + if (offset > int.MaxValue) throw new ArgumentOutOfRangeException(nameof(offset)); switch (origin) { @@ -157,10 +147,7 @@ public override void SetLength(long value) #region Reading... - public override bool CanRead - { - get { return m_position < m_length; } - } + public override bool CanRead => m_position < m_length; private bool AdvanceToNextSlice() { @@ -198,7 +185,7 @@ public override int Read(byte[] buffer, int offset, int count) { ValidateBuffer(buffer, offset, count); - if (m_slices == null) StreamIsClosed(); + if (m_slices == null) throw StreamIsClosed(); Contract.Ensures(m_position >= 0 && m_position <= m_length); @@ -245,15 +232,13 @@ public override int Read(byte[] buffer, int offset, int count) return read; } -#if !NET_4_0 - - public override Task ReadAsync(byte[] buffer, int offset, int count, System.Threading.CancellationToken cancellationToken) + public override Task ReadAsync(byte[] buffer, int offset, int count, System.Threading.CancellationToken ct) { ValidateBuffer(buffer, offset, count); - if (cancellationToken.IsCancellationRequested) + if (ct.IsCancellationRequested) { - return TaskHelpers.FromCancellation(cancellationToken); + return Task.FromCanceled(ct); } try @@ -266,60 +251,51 @@ public override Task ReadAsync(byte[] buffer, int offset, int count, System } catch (Exception e) { - return TaskHelpers.FromException(e); + return Task.FromException(e); } } -#endif - #endregion #region Writing... - public override bool CanWrite - { - get { return false; } - } + public override bool CanWrite => false; public override void Write(byte[] buffer, int offset, int count) { throw new NotSupportedException(); } -#if !NET_4_0 - - public override Task WriteAsync(byte[] buffer, int offset, int count, System.Threading.CancellationToken cancellationToken) + public override Task WriteAsync(byte[] buffer, int offset, int count, System.Threading.CancellationToken ct) { - return TaskHelpers.FromException(new NotSupportedException()); + return Task.FromException(new NotSupportedException()); } -#endif - public override void Flush() { // Not supported, but don't throw here } - public override Task FlushAsync(System.Threading.CancellationToken cancellationToken) + public override Task FlushAsync(System.Threading.CancellationToken ct) { // Not supported, but don't throw here - return TaskHelpers.CompletedTask; + return Task.CompletedTask; } #endregion private static void ValidateBuffer(byte[] buffer, int offset, int count) { - if (buffer == null) throw new ArgumentNullException("buffer"); - if (count < 0) throw new ArgumentOutOfRangeException("count", "Count cannot be less than zero"); - if (offset < 0) throw new ArgumentOutOfRangeException("offset", "Offset cannot be less than zero"); - if (offset > buffer.Length - count) throw new ArgumentException("Offset and count must fit inside the buffer"); + Contract.NotNull(buffer, nameof(buffer)); + if (count < 0) throw ThrowHelper.ArgumentOutOfRangeException(nameof(count), "Count cannot be less than zero"); + if (offset < 0) throw ThrowHelper.ArgumentOutOfRangeException(nameof(offset), "Offset cannot be less than zero"); + if (offset > buffer.Length - count) throw ThrowHelper.ArgumentException(nameof(offset), "Offset and count must fit inside the buffer"); } - [ContractAnnotation("=> halt")] - private static void StreamIsClosed() + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + private static Exception StreamIsClosed() { - throw new ObjectDisposedException(null, "The stream was already closed"); + return ThrowHelper.ObjectDisposedException("The stream was already closed"); } protected override void Dispose(bool disposing) diff --git a/FoundationDB.Client/Utils/SlicePairComparer.cs b/FoundationDB.Client/Shared/Memory/SlicePairComparer.cs similarity index 95% rename from FoundationDB.Client/Utils/SlicePairComparer.cs rename to FoundationDB.Client/Shared/Memory/SlicePairComparer.cs index d8e58a17a..12e447bd3 100644 --- a/FoundationDB.Client/Utils/SlicePairComparer.cs +++ b/FoundationDB.Client/Shared/Memory/SlicePairComparer.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,15 +26,17 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client +namespace Doxense.Memory { - using FoundationDB.Client.Utils; using System; using System.Collections.Generic; + using Doxense.Diagnostics.Contracts; /// Performs optimized equality and comparison checks on key/value pairs of public sealed class SlicePairComparer : IComparer>, IEqualityComparer> { + //TODO: move this inside Slmice? (Slice.PairComparer.Default ...) + private const int BOTH = 0; private const int KEY_ONLY = 1; private const int VALUE_ONLY = 2; diff --git a/FoundationDB.Client/Shared/Memory/SliceReader.cs b/FoundationDB.Client/Shared/Memory/SliceReader.cs new file mode 100644 index 000000000..5dfc3ef9e --- /dev/null +++ b/FoundationDB.Client/Shared/Memory/SliceReader.cs @@ -0,0 +1,422 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Memory +{ + using System; + using System.Diagnostics; + using System.Runtime.CompilerServices; + using System.Text; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; + + /// Helper class that holds the internal state used to parse tuples from slices + /// This struct MUST be passed by reference! + [PublicAPI, DebuggerDisplay("{Position}/{Buffer.Count}, NextByte={PeekByte()}")] + [DebuggerNonUserCode] //remove this when you need to troubleshoot this class! + public struct SliceReader + { + + /// Buffer containing the tuple being parsed + public readonly Slice Buffer; + + /// Current position inside the buffer + public int Position; + + /// Creates a new reader over a slice + /// Slice that will be used as the underlying buffer + public SliceReader(Slice buffer) + { + buffer.EnsureSliceIsValid(); + this.Buffer = buffer; + this.Position = 0; + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public SliceReader(Slice buffer, int offset) + { + buffer.EnsureSliceIsValid(); + this.Buffer = buffer.Substring(offset); + this.Position = 0; + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public SliceReader([NotNull] byte[] buffer) + { + this.Buffer = new Slice(buffer, 0, buffer.Length); + this.Position = 0; + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public SliceReader([NotNull] byte[] buffer, int offset, int count) + { + this.Buffer = new Slice(buffer, offset, count); + this.Position = 0; + } + + /// Returns true if there are more bytes to parse + public bool HasMore => this.Position < this.Buffer.Count; + + /// Returns the number of bytes remaining + public int Remaining => Math.Max(0, this.Buffer.Count - this.Position); + + /// Returns a slice with all the bytes read so far in the buffer + public Slice Head => this.Buffer.Substring(0, this.Position); + + /// Returns a slice with all the remaining bytes in the buffer + public Slice Tail => this.Buffer.Substring(this.Position); + + /// Ensure that there are at least bytes remaining in the buffer + [MethodImpl(MethodImplOptions.AggressiveInlining)] + [DebuggerNonUserCode] + public void EnsureBytes(int count) + { + if (count < 0 || checked(this.Position + count) > this.Buffer.Count) throw ThrowNotEnoughBytes(count); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + [DebuggerNonUserCode] + private static Exception ThrowNotEnoughBytes(int count) + { + return ThrowHelper.FormatException($"The buffer does not have enough data to satisfy a read of {count} byte(s)"); + } + + /// Return the value of the next byte in the buffer, or -1 if we reached the end + [Pure] + public int PeekByte() + { + int p = this.Position; + return p < this.Buffer.Count ? this.Buffer[p] : -1; + } + + /// Return the value of the byte at a specified offset from the current position, or -1 if this is after the end, or before the start + [Pure] + public int PeekByteAt(int offset) + { + int p = this.Position + offset; + return p < this.Buffer.Count && p >= 0 ? this.Buffer[p] : -1; + } + + public Slice PeekBytes(int count) + { + return this.Buffer.Substring(this.Position, count); + } + + /// Attempt to peek at the next bytes from the reader, without advancing the pointer + /// Number of bytes to peek + /// Receives the corresponding slice if there are enough bytes remaining. + /// If true, the next are available in . If false, there are not enough bytes remaining in the buffer. + public bool TryPeekBytes(int count, out Slice bytes) + { + if (this.Remaining < count) + { + bytes = default(Slice); + return false; + } + bytes = this.Buffer.Substring(this.Position, count); + return true; + } + + /// Skip the next bytes of the buffer + public void Skip(int count) + { + EnsureBytes(count); + + this.Position += count; + } + + /// Read the next byte from the buffer + public byte ReadByte() + { + EnsureBytes(1); + + int p = this.Position; + byte b = this.Buffer[p]; + this.Position = p + 1; + return b; + } + + /// Read the next bytes from the buffer + public Slice ReadBytes(int count) + { + if (count == 0) return Slice.Empty; + + EnsureBytes(count); + int p = this.Position; + this.Position = p + count; + return this.Buffer.Substring(p, count); + } + + /// Read the next bytes from the buffer + public Slice ReadBytes(uint count) + { + int n = checked((int) count); + EnsureBytes(n); + + int p = this.Position; + this.Position = p + n; + return this.Buffer.Substring(p, n); + } + + /// Read until returns true, or we reach the end of the buffer + [Pure] + public Slice ReadWhile([NotNull] Func handler) + { + unsafe + { + int start = this.Position; + int count = 0; + fixed (byte* bytes = &this.Buffer.DangerousGetPinnableReference()) + { + byte* ptr = bytes; + byte* end = bytes + this.Remaining; + while (ptr < end) + { + if (!handler(*ptr, count)) + { + break; + } + ++ptr; + ++count; + } + this.Position = start + count; + return this.Buffer.Substring(start, count); + } + } + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Slice ReadToEnd() + { + return ReadBytes(this.Remaining); + } + + /// Read the next 2 bytes as an unsigned 16-bit integer, encoded in little-endian + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public ushort ReadFixed16() + { + return ReadBytes(2).ToUInt16(); + } + + /// Read the next 3 bytes as an unsigned 24-bit integer, encoded in little-endian + /// Bits 24 to 31 will always be zero + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public uint ReadFixed24() + { + return ReadBytes(3).ToUInt24(); + } + + + /// Read the next 4 bytes as an unsigned 32-bit integer, encoded in little-endian + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public uint ReadFixed32() + { + return ReadBytes(4).ToUInt32(); + } + + /// Read the next 8 bytes as an unsigned 64-bit integer, encoded in little-endian + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public ulong ReadFixed64() + { + return ReadBytes(8).ToUInt64(); + } + + /// Read the next 2 bytes as an unsigned 16-bit integer, encoded in big-endian + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public ushort ReadFixed16BE() + { + return ReadBytes(2).ToUInt16BE(); + } + + /// Read the next 3 bytes as an unsigned 24-bit integer, encoded in big-endian + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public uint ReadFixed24BE() + { + return ReadBytes(3).ToUInt24BE(); + } + + /// Read the next 4 bytes as an unsigned 32-bit integer, encoded in big-endian + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public uint ReadFixed32BE() + { + return ReadBytes(4).ToUInt32BE(); + } + + /// Read the next 8 bytes as an unsigned 64-bit integer, encoded in big-endian + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public ulong ReadFixed64BE() + { + return ReadBytes(8).ToUInt64BE(); + } + + /// Read the next 4 bytes as an IEEE 32-bit floating point number + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public float ReadSingle() + { + return ReadBytes(4).ToSingle(); + } + + /// Read the next 8 bytes as an IEEE 64-bit floating point number + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public double ReadDouble() + { + return ReadBytes(8).ToDouble(); + } + + /// Read an encoded nul-terminated byte array from the buffer + [Pure] + public Slice ReadByteString() + { + var buffer = this.Buffer.Array; + int start = this.Buffer.Offset + this.Position; + int p = start; + int end = this.Buffer.Offset + this.Buffer.Count; + + while (p < end) + { + byte b = buffer[p++]; + if (b == 0) + { + //TODO: decode \0\xFF ? + if (p < end && buffer[p] == 0xFF) + { + // skip the next byte and continue + p++; + continue; + } + + this.Position = p - this.Buffer.Offset; + return new Slice(buffer, start, p - start); + } + } + + throw ThrowHelper.FormatException("Truncated byte string (expected terminal NUL not found)"); + } + + /// Reads a 7-bit encoded unsigned int (aka 'Varint16') from the buffer, and advances the cursor + /// Can Read up to 3 bytes from the input + [Pure] + public ushort ReadVarInt16() + { + //note: this could read up to 21 bits of data, so we check for overflow + return checked((ushort)ReadVarInt(3)); + } + + /// Reads a 7-bit encoded unsigned int (aka 'Varint32') from the buffer, and advances the cursor + /// Can Read up to 5 bytes from the input + [Pure] + public uint ReadVarInt32() + { + //note: this could read up to 35 bits of data, so we check for overflow + return checked((uint)ReadVarInt(5)); + } + + /// Reads a 7-bit encoded unsigned long (aka 'Varint32') from the buffer, and advances the cursor + /// Can Read up to 10 bytes from the input + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public ulong ReadVarInt64() + { + return ReadVarInt(10); + } + + /// Reads a Base 128 Varint from the input + /// Maximum number of bytes allowed (5 for 32 bits, 10 for 64 bits) + private ulong ReadVarInt(int count) + { + var buffer = this.Buffer.Array; + int p = this.Buffer.Offset + this.Position; + int end = this.Buffer.Offset + this.Buffer.Count; + + ulong x = 0; + int s = 0; + + // read bytes until the MSB is unset + while (count-- > 0) + { + if (p > end) throw ThrowHelper.FormatException("Truncated Varint"); + byte b = buffer[p++]; + + x |= (b & 0x7FUL) << s; + if (b < 0x80) + { + this.Position = p - this.Buffer.Offset; + return x; + } + s += 7; + } + throw ThrowHelper.FormatException("Malformed Varint"); + } + + /// Reads a variable sized slice, by first reading its size (stored as a Varint32) and then the data + [Pure] + public Slice ReadVarBytes() + { + uint size = ReadVarInt32(); + if (size > int.MaxValue) throw ThrowHelper.FormatException("Malformed variable-sized array"); + if (size == 0) return Slice.Empty; + return ReadBytes((int)size); + } + + /// Reads an utf-8 encoded string prefixed by a variable-sized length + [Pure, NotNull] + public string ReadVarString() + { + var str = ReadVarBytes(); + return str.ToStringUtf8(); + } + + /// Reads a string prefixed by a variable-sized length, using the specified encoding + /// Encoding used for this string (or UTF-8 if null) + [Pure, NotNull] + public string ReadVarString([CanBeNull] Encoding encoding) + { + if (encoding == null || encoding.Equals(Encoding.UTF8)) + { // optimized path for utf-8 + return ReadVarString(); + } + // generic decoding + var bytes = ReadVarBytes(); + return bytes.Count > 0 ? encoding.GetString(bytes.Array, bytes.Offset, bytes.Count) : string.Empty; + } + + /// Reads a 128-bit UUID + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Uuid128 ReadUuid128() + { + return ReadBytes(16).ToUuid128(); + } + + /// Reads a 64-bit UUID + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Uuid64 ReadUuid64() + { + return ReadBytes(8).ToUuid64(); + } + } + +} diff --git a/FoundationDB.Client/Utils/SliceStream.cs b/FoundationDB.Client/Shared/Memory/SliceStream.cs similarity index 79% rename from FoundationDB.Client/Utils/SliceStream.cs rename to FoundationDB.Client/Shared/Memory/SliceStream.cs index 3c79e3e41..cdb0370ca 100644 --- a/FoundationDB.Client/Utils/SliceStream.cs +++ b/FoundationDB.Client/Shared/Memory/SliceStream.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,13 +26,13 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client +namespace Doxense.Memory { - using FoundationDB.Async; - using FoundationDB.Client.Utils; using System; using System.IO; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; /// Stream that wraps a Slice for reading /// This stream is optimized for blocking and async reads @@ -51,35 +51,23 @@ public SliceStream(Slice slice) #region Seeking... /// Returns true if the underlying slice is not null - public override bool CanSeek - { - get { return m_slice.HasValue; } - } + public override bool CanSeek => m_slice.HasValue; /// Gets or sets the current position in the underlying slice public override long Position { - get - { - return m_position; - } - set - { - Seek(value, SeekOrigin.Begin); - } + get => m_position; + set => Seek(value, SeekOrigin.Begin); } /// Getes the length of the underlying slice - public override long Length - { - get { return m_slice.Count; } - } + public override long Length => m_slice.Count; /// Seeks to a specific location in the underlying slice public override long Seek(long offset, SeekOrigin origin) { if (!m_slice.HasValue) StreamIsClosed(); - if (offset > int.MaxValue) throw new ArgumentOutOfRangeException("offset"); + if (offset > int.MaxValue) throw new ArgumentOutOfRangeException(nameof(offset)); switch (origin) { @@ -127,10 +115,7 @@ public override void SetLength(long value) #region Reading... /// Returns true unless the current position is after the end of the underlying slice - public override bool CanRead - { - get { return m_position < m_slice.Count; } - } + public override bool CanRead => m_position < m_slice.Count; /// Reads from byte from the underyling slice and advances the position within the slice by one byte, or returns -1 if the end of the slice has been reached. public override int ReadByte() @@ -177,16 +162,14 @@ public override int Read(byte[] buffer, int offset, int count) return remaining; } -#if !NET_4_0 - /// Asynchronously reads a sequence of bytes from the underlying slice and advances the position within the slice by the number of bytes read. - public override Task ReadAsync(byte[] buffer, int offset, int count, System.Threading.CancellationToken cancellationToken) + public override Task ReadAsync(byte[] buffer, int offset, int count, System.Threading.CancellationToken ct) { ValidateBuffer(buffer, offset, count); - if (cancellationToken.IsCancellationRequested) + if (ct.IsCancellationRequested) { - return TaskHelpers.FromCancellation(cancellationToken); + return Task.FromCanceled(ct); } try @@ -199,38 +182,38 @@ public override Task ReadAsync(byte[] buffer, int offset, int count, System } catch (Exception e) { - return TaskHelpers.FromException(e); + return Task.FromException(e); } } /// Asynchronously reads the bytes from the underlying slice and writes them to another stream, using a specified buffer size and cancellation token. - public override Task CopyToAsync(Stream destination, int bufferSize, System.Threading.CancellationToken cancellationToken) + public override Task CopyToAsync(Stream destination, int bufferSize, System.Threading.CancellationToken ct) { Contract.Ensures(m_position >= 0 && m_position <= m_slice.Count); - if (destination == null) throw new ArgumentNullException("destination"); - if (!destination.CanWrite) throw new ArgumentException("The destination stream cannot be written to", "destination"); + Contract.NotNull(destination, nameof(destination)); + if (!destination.CanWrite) throw new ArgumentException("The destination stream cannot be written to", nameof(destination)); int remaining = m_slice.Count - m_position; - if (remaining <= 0) return TaskHelpers.CompletedTask; + if (remaining <= 0) return Task.CompletedTask; // simulate the read m_position += remaining; // we can write everyting in one go, so just call WriteAsync and return that - return destination.WriteAsync(m_slice.Array, m_slice.Offset, remaining, cancellationToken); + return destination.WriteAsync(m_slice.Array, m_slice.Offset, remaining, ct); } -#endif - #endregion #region Writing... /// Always return false - public override bool CanWrite + public override bool CanWrite => false; + + public override void WriteByte(byte value) { - get { return false; } + throw new NotSupportedException(); } /// This methods is not supported @@ -239,16 +222,12 @@ public override void Write(byte[] buffer, int offset, int count) throw new NotSupportedException(); } -#if !NET_4_0 - /// This methods is not supported - public override Task WriteAsync(byte[] buffer, int offset, int count, System.Threading.CancellationToken cancellationToken) + public override Task WriteAsync(byte[] buffer, int offset, int count, System.Threading.CancellationToken ct) { - return TaskHelpers.FromException(new NotSupportedException()); + return Task.FromException(new NotSupportedException()); } -#endif - /// This methods does nothing. public override void Flush() { @@ -256,25 +235,25 @@ public override void Flush() } /// This methods does nothing. - public override Task FlushAsync(System.Threading.CancellationToken cancellationToken) + public override Task FlushAsync(System.Threading.CancellationToken ct) { // Not supported, but don't throw here - return TaskHelpers.CompletedTask; + return Task.CompletedTask; } #endregion private static void ValidateBuffer(byte[] buffer, int offset, int count) { - if (buffer == null) throw new ArgumentNullException("buffer"); - if (count < 0) throw new ArgumentOutOfRangeException("count", "Count cannot be less than zero"); - if (offset < 0) throw new ArgumentOutOfRangeException("offset", "Offset cannot be less than zero"); - if (offset > buffer.Length - count) throw new ArgumentException("Offset and count must fit inside the buffer"); + Contract.NotNull(buffer, nameof(buffer)); + if (count < 0) throw ThrowHelper.ArgumentOutOfRangeException(nameof(count), "Count cannot be less than zero."); + if ((uint) offset > buffer.Length - count) throw ThrowHelper.ArgumentException(nameof(offset), "Buffer is too small."); } + [ContractAnnotation("=> halt")] private static void StreamIsClosed() { - throw new ObjectDisposedException(null, "The stream was already closed"); + throw ThrowHelper.ObjectDisposedException("The stream was already closed"); } /// Closes the stream diff --git a/FoundationDB.Client/Shared/Memory/SliceWriter.cs b/FoundationDB.Client/Shared/Memory/SliceWriter.cs new file mode 100644 index 000000000..70661724e --- /dev/null +++ b/FoundationDB.Client/Shared/Memory/SliceWriter.cs @@ -0,0 +1,2294 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +//#define ENABLE_ARRAY_POOL +//#define ENABLE_SPAN + +namespace Doxense.Memory +{ + using System; + using System.Diagnostics; + using System.Globalization; + using System.Runtime.CompilerServices; + using System.Text; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; +#if ENABLE_SPAN + using System.Runtime.InteropServices; +#endif + + /// Slice buffer that emulates a pseudo-stream using a byte array that will automatically grow in size, if necessary + /// This struct MUST be passed by reference! + [PublicAPI, DebuggerDisplay("Position={Position}, Capacity={Capacity}"), DebuggerTypeProxy(typeof(SliceWriter.DebugView))] + [DebuggerNonUserCode] //remove this when you need to troubleshoot this class! + public struct SliceWriter + { + // Invariant + // * Valid data always start at offset 0 + // * 'this.Position' is equal to the current size as well as the offset of the next available free spot + // * 'this.Buffer' is either null (meaning newly created stream), or is at least as big as this.Position + + #region Private Members... + + /// Buffer holding the data + public byte[] Buffer; + + /// Position in the buffer ( == number of already written bytes) + public int Position; + + #endregion + + #region Constructors... + + /// Create a new empty binary buffer with an initial allocated size + /// Initial capacity of the buffer + public SliceWriter(int capacity) + { + Contract.Positive(capacity, nameof(capacity)); + +#if ENABLE_ARRAY_POOL + this.Buffer = capacity == 0 ? Array.Empty() : ArrayPool.Shared.Rent(capacity); +#else + this.Buffer = capacity == 0 ? Array.Empty() : new byte[capacity]; +#endif + this.Position = 0; + } + + /// Create a new binary writer using an existing buffer + /// Initial buffer + /// Since the content of the will be modified, only a temporary or scratch buffer should be used. If the writer needs to grow, a new buffer will be allocated. + public SliceWriter([NotNull] byte[] buffer) + : this(buffer, 0) + { } + + /// Create a new binary buffer using an existing buffer and with the cursor to a specific location + /// Since the content of the will be modified, only a temporary or scratch buffer should be used. If the writer needs to grow, a new buffer will be allocated. + public SliceWriter([NotNull] byte[] buffer, int index) + { + Contract.NotNull(buffer, nameof(buffer)); + Contract.Between(index, 0, buffer.Length, nameof(index)); + + this.Buffer = buffer; + this.Position = index; + } + + /// Creates a new binary buffer, initialized by copying pre-existing data + /// Data that will be copied at the start of the buffer + /// Optional initial capacity of the buffer + /// The cursor will already be placed at the end of the prefix + public SliceWriter(Slice prefix, int capacity = 0) + { + prefix.EnsureSliceIsValid(); + Contract.Positive(capacity, nameof(capacity)); + + int n = prefix.Count; + Contract.Assert(n >= 0); + + if (capacity == 0) + { // most frequent usage is to add a packed integer at the end of a prefix + capacity = BitHelpers.AlignPowerOfTwo(n + 8, 16); + } + else + { + capacity = BitHelpers.AlignPowerOfTwo(Math.Max(capacity, n), 16); + } + +#if ENABLE_ARRAY_POOL + var buffer = ArrayPool.Shared.Rent(capacity); +#else + var buffer = new byte[capacity]; +#endif + if (n > 0) prefix.CopyTo(buffer, 0); + + this.Buffer = buffer; + this.Position = n; + } + + #endregion + + #region Public Properties... + + /// Returns true if the buffer contains at least some data + public bool HasData => this.Position > 0; + + /// Capacity of the internal buffer + public int Capacity => this.Buffer?.Length ?? 0; + + /// Return the byte at the specified index + /// Index in the buffer (0-based if positive, from the end if negative) + public byte this[int index] + { + [Pure] + get + { + int pos = this.Position; + Contract.Assert(this.Buffer != null && pos >= 0); + //note: we will get bound checking for free in release builds + if (index < 0) index += pos; + if ((uint) index >= pos) throw ThrowHelper.IndexOutOfRangeException(); + return this.Buffer[index]; + } + } + + /// Returns a slice pointing to a segment inside the buffer + /// The starting position of the substring. Positive values means from the start, negative values means from the end + /// The end position (excluded) of the substring. Positive values means from the start, negative values means from the end + /// Slice that corresponds to the section selected. If the if equal to or greater than then an empty Slice is returned + /// If either or is outside of the currently allocated buffer. + public Slice this[int? beginInclusive, int? endExclusive] + { + [Pure] + get + { + int from = beginInclusive ?? 0; + int pos = this.Position; + int until = endExclusive ?? pos; + + // remap negative indexes + if (from < 0) from += pos; + if (until < 0) until += pos; + + // bound check + if ((uint) from >= pos) throw ThrowHelper.ArgumentOutOfRangeException(nameof(beginInclusive), beginInclusive, "The start index must be inside the bounds of the buffer."); + if ((uint) until > pos) throw ThrowHelper.ArgumentOutOfRangeException(nameof(endExclusive), endExclusive, "The end index must be inside the bounds of the buffer."); + + // chop chop + int count = until - from; + return count > 0 ? new Slice(this.Buffer, from, count) : Slice.Empty; + } + } + + #endregion + + /// Returns a byte array filled with the contents of the buffer + /// The buffer is copied in the byte array. And change to one will not impact the other + [Pure, NotNull] + public byte[] GetBytes() + { + int p = this.Position; + if (p == 0) return Array.Empty(); + + var bytes = new byte[p]; + if (p > 0) + { + Contract.Assert(this.Buffer != null && this.Buffer.Length >= this.Position); + UnsafeHelpers.CopyUnsafe(bytes, 0, this.Buffer, 0, bytes.Length); + } + return bytes; + } + + /// Returns a buffer segment pointing to the content of the buffer + /// Any change to the segment will change the buffer ! + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public ArraySegment ToArraySegment() + { + return ToSlice(); + } + + /// Returns a slice pointing to the content of the buffer + /// Any change to the slice will change the buffer ! + [Pure] + public Slice ToSlice() + { + var buffer = this.Buffer; + var p = this.Position; + if (buffer == null | p == 0) + { // empty buffer + return Slice.Empty; + } + Contract.Assert(buffer.Length >= p, "Current position is outside of the buffer"); + return new Slice(buffer, 0, p); + } + + /// Returns a slice pointing to the first bytes of the buffer + /// Size of the segment to return. + /// Slice that contains the first bytes written to this buffer + /// Any change to the slice will change the buffer ! + /// + /// ({HELLO WORLD}).Head(5) => {HELLO} + /// ({HELLO WORLD}).Head(1) => {H} + /// {{HELLO WORLD}).Head(0) => {} + /// + /// If is less than zero, or larger than the current buffer size + [Pure] + public Slice Head(int count) + { + if (count == 0) return Slice.Empty; + if ((uint) count > this.Position) throw ThrowHelper.ArgumentOutOfRangeException(nameof(count), "Buffer is too small"); + return new Slice(this.Buffer, 0, count); + } + + /// Returns a slice pointing to the first bytes of the buffer + /// Size of the segment to return. + /// Slice that contains the first bytes written to this buffer + /// Any change to the slice will change the buffer ! + /// + /// ({HELLO WORLD}).Head(5) => {HELLO} + /// ({HELLO WORLD}).Head(1) => {H} + /// {{HELLO WORLD}).Head(0) => {} + /// + /// If is less than zero, or larger than the current buffer size + [Pure] + public Slice Head(uint count) + { + if (count == 0) return Slice.Empty; + if (count > this.Position) throw ThrowHelper.ArgumentOutOfRangeException(nameof(count), "Buffer is too small"); + return new Slice(this.Buffer, 0, (int) count); + } + + /// Returns a slice pointer to the last bytes of the buffer + /// Size of the segment to return. + /// Slice that contains the last bytes written to this buffer + /// Any change to the slice will change the buffer ! + /// + /// ({HELLO WORLD}).Tail(5) => {WORLD} + /// ({HELLO WORLD}).Tail(1) => {D} + /// {{HELLO WORLD}).Tail(0) => {} + /// + /// If is less than zero, or larger than the current buffer size + public Slice Tail(int count) + { + if (count == 0) return Slice.Empty; + int p = this.Position; + if ((uint) count > p) throw ThrowHelper.ArgumentOutOfRangeException(nameof(count), "Buffer is too small"); + return new Slice(this.Buffer, p - count, count); + } + + /// Returns a slice pointer to the last bytes of the buffer + /// Size of the segment to return. + /// Slice that contains the last bytes written to this buffer + /// Any change to the slice will change the buffer ! + /// + /// ({HELLO WORLD}).Tail(5) => {WORLD} + /// ({HELLO WORLD}).Tail(1) => {D} + /// {{HELLO WORLD}).Tail(0) => {} + /// + /// If is less than zero, or larger than the current buffer size + public Slice Tail(uint count) + { + if (count == 0) return Slice.Empty; + int p = this.Position; + if (count > p) throw ThrowHelper.ArgumentOutOfRangeException(nameof(count), "Buffer is too small"); + return new Slice(this.Buffer, p - (int) count, (int) count); + } + + /// Returns a slice pointing to a segment inside the buffer + /// Offset of the segment from the start of the buffer + /// Any change to the slice will change the buffer ! + /// If is less then zero, or after the current position + [Pure] + public Slice Substring(int offset) + { + int p = this.Position; + if (offset < 0 || offset > p) throw ThrowHelper.ArgumentException(nameof(offset), "Offset must be inside the buffer"); + int count = p - offset; + return count > 0 ? new Slice(this.Buffer, offset, p - offset) : Slice.Empty; + } + + /// Returns a slice pointing to a segment inside the buffer + /// Offset of the segment from the start of the buffer + /// Size of the segment + /// Any change to the slice will change the buffer ! + /// If either or are less then zero, or do not fit inside the current buffer + [Pure] + public Slice Substring(int offset, int count) + { + int p = this.Position; + if ((uint) offset >= p) throw ThrowHelper.ArgumentException(nameof(offset), "Offset must be inside the buffer"); + if (count < 0 | offset + count > p) throw ThrowHelper.ArgumentException(nameof(count), "The buffer is too small"); + + return count > 0 ? new Slice(this.Buffer, offset, count) : Slice.Empty; + } + + /// Truncate the buffer by setting the cursor to the specified position. + /// New size of the buffer + /// If the buffer was smaller, it will be resized and filled with zeroes. If it was biffer, the cursor will be set to the specified position, but previous data will not be deleted. + public void SetLength(int position) + { + Contract.Requires(position >= 0); + + int p = this.Position; + if (p < position) + { + int missing = position - p; + var buffer = EnsureBytes(missing); + //TODO: native memset() ? + Array.Clear(buffer, p, missing); + } + this.Position = position; + } + + /// Delete the first N bytes of the buffer, and shift the remaining to the front + /// Number of bytes to remove at the head of the buffer + /// New size of the buffer (or 0 if it is empty) + /// This should be called after every successfull write to the underlying stream, to update the buffer. + public int Flush(int bytes) //REVIEW: plutot renommer en "RemoveHead"? ou faire un vrai "RemoveAt(offset, count)" ? + { + if (bytes == 0) return this.Position; + if (bytes < 0) throw ThrowHelper.ArgumentOutOfRangeException(nameof(bytes)); + + if (bytes < this.Position) + { // copy the left over data to the start of the buffer + int remaining = this.Position - bytes; + UnsafeHelpers.CopyUnsafe(this.Buffer, 0, this.Buffer, bytes, remaining); + this.Position = remaining; + return remaining; + } + else + { + //REVIEW: should we throw if there are less bytes in the buffer than we want to flush ? + this.Position = 0; + return 0; + } + } + + /// Empties the current buffer after a succesfull write + /// If true, fill the existing buffer with zeroes, if it is reused, to ensure that no previous data can leak. + /// If the current buffer is large enough, and less than 1/8th was used, then it will be discarded and a new smaller one will be allocated as needed + public void Reset(bool zeroes = false) + { + if (this.Position > 0) + { + Contract.Assert(this.Buffer != null && this.Buffer.Length >= this.Position); + // reduce size ? + // If the buffer exceeds 64K and we used less than 1/8 of it the last time, we will "shrink" the buffer + if (this.Buffer.Length > 65536 && this.Position <= (this.Buffer.Length >> 3)) + { // kill the buffer + this.Buffer = null; + //TODO: return to a central buffer pool? + } + else if (zeroes) + { // Clear it + unsafe + { + fixed (byte* ptr = this.Buffer) + { + UnsafeHelpers.ClearUnsafe(ptr, checked((uint)this.Buffer.Length)); + } + } + } + this.Position = 0; + } + } + + /// Advance the cursor of the buffer without writing anything, and return the previous position + /// Number of bytes to skip + /// Pad value (0xFF by default) + /// Position of the cursor BEFORE moving it. Can be used as a marker to go back later and fill some value + /// Will fill the skipped bytes with + public int Skip(int skip, byte pad = 0xFF) + { + Contract.Requires(skip >= 0); + + var buffer = EnsureBytes(skip); + int p = this.Position; + if (skip == 0) return p; + if (skip <= 8) + { + for (int i = 0; i < skip; i++) + { + buffer[p + i] = pad; + } + } + else + { + unsafe + { + fixed (byte* ptr = &buffer[p]) + { + UnsafeHelpers.FillUnsafe(ptr, checked((uint) skip), pad); + } + } + } + this.Position = p + skip; + return p; + } + + /// Advance the cursor by the specified amount, and return the skipped over chunk (that can be filled later by the caller) + /// Number of bytes to allocate + /// Pad value (0xFF by default) + /// Slice that corresponds to the reserved segment in the buffer + /// Will fill the reserved segment with and the cursor will be positionned immediately after the segment. + public Slice Allocate(int count, byte pad = 0xFF) + { + Contract.Positive(count, nameof(count)); + if (count == 0) return Slice.Empty; + + int offset = Skip(count, pad); + return new Slice(this.Buffer, offset, count); + } + + /// Advance the cursor by the amount required end up on an aligned byte position + /// Number of bytes to align to + /// Pad value (0 by default) + public void Align(int aligment, byte pad = 0) + { + Contract.Requires(aligment > 0); + int r = this.Position % aligment; + if (r > 0) Skip(aligment - r, pad); + } + + /// Rewinds the cursor to a previous position in the buffer, while saving the current position + /// Will receive the current cursor position + /// Previous position in the buffer + public void Rewind(out int cursor, int position) + { + Contract.Requires(position >= 0 && position <= this.Position); + cursor = this.Position; + this.Position = position; + } + + #region Bytes... + + /// Add a byte to the end of the buffer, and advance the cursor + /// Byte, 8 bits + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void WriteByte(byte value) + { + var buffer = EnsureBytes(1); + int p = this.Position; + buffer[p] = value; + this.Position = p + 1; + } + + /// Add a byte to the end of the buffer, and advance the cursor + /// Byte, 8 bits + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void WriteByte(int value) + { + var buffer = EnsureBytes(1); + int p = this.Position; + buffer[p] = (byte) value; + this.Position = p + 1; + } + + /// Add a byte to the end of the buffer, and advance the cursor + /// Byte, 8 bits + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void WriteByte(sbyte value) + { + var buffer = EnsureBytes(1); + int p = this.Position; + buffer[p] = (byte) value; + this.Position = p + 1; + } + + /// Add a 1-byte boolean to the end of the buffer, and advance the cursor + /// Boolean, encoded as either 0 or 1. + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void WriteByte(bool value) + { + var buffer = EnsureBytes(1); + int p = this.Position; + buffer[p] = value ? (byte) 1 : (byte) 0; + this.Position = p + 1; + } + + /// Dangerously write a sigle byte at the end of the buffer, without any capacity checks! + /// + /// This method DOES NOT check the buffer capacity before writing, and caller MUST have resized the buffer beforehand! + /// Failure to do so may introduce memory correction (buffer overflow!). + /// This should ONLY be used in performance-sensitive code paths that have been audited thoroughly! + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void UnsafeWriteByte(byte value) + { + Contract.Requires(this.Buffer != null && this.Position < this.Buffer.Length); + this.Buffer[this.Position++] = value; + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void WriteBytes(byte value1, byte value2) + { + var buffer = EnsureBytes(2); + int p = this.Position; + buffer[p] = value1; + buffer[p + 1] = value2; + this.Position = p + 2; + } + + /// Dangerously write two bytes at the end of the buffer, without any capacity checks! + /// + /// This method DOES NOT check the buffer capacity before writing, and caller MUST have resized the buffer beforehand! + /// Failure to do so may introduce memory correction (buffer overflow!). + /// This should ONLY be used in performance-sensitive code paths that have been audited thoroughly! + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void UnsafeWriteBytes(byte value1, byte value2) + { + Contract.Requires(this.Buffer != null && this.Position + 1 < this.Buffer.Length); + int p = this.Position; + this.Buffer[p] = value1; + this.Buffer[p + 1] = value2; + this.Position = p + 2; + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void WriteBytes(byte value1, byte value2, byte value3) + { + var buffer = EnsureBytes(3); + int p = this.Position; + buffer[p] = value1; + buffer[p + 1] = value2; + buffer[p + 2] = value3; + this.Position = p + 3; + } + + /// Dangerously write three bytes at the end of the buffer, without any capacity checks! + /// + /// This method DOES NOT check the buffer capacity before writing, and caller MUST have resized the buffer beforehand! + /// Failure to do so may introduce memory correction (buffer overflow!). + /// This should ONLY be used in performance-sensitive code paths that have been audited thoroughly! + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void UnsafeWriteBytes(byte value1, byte value2, byte value3) + { + Contract.Requires(this.Buffer != null && this.Position + 2 < this.Buffer.Length); + var buffer = this.Buffer; + int p = this.Position; + buffer[p] = value1; + buffer[p + 1] = value2; + buffer[p + 2] = value3; + this.Position = p + 3; + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void WriteBytes(byte value1, byte value2, byte value3, byte value4) + { + var buffer = EnsureBytes(4); + int p = this.Position; + buffer[p] = value1; + buffer[p + 1] = value2; + buffer[p + 2] = value3; + buffer[p + 3] = value4; + this.Position = p + 4; + } + + /// Dangerously write four bytes at the end of the buffer, without any capacity checks! + /// + /// This method DOES NOT check the buffer capacity before writing, and caller MUST have resized the buffer beforehand! + /// Failure to do so may introduce memory correction (buffer overflow!). + /// This should ONLY be used in performance-sensitive code paths that have been audited thoroughly! + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void UnsafeWriteBytes(byte value1, byte value2, byte value3, byte value4) + { + Contract.Requires(this.Buffer != null && this.Position + 3 < this.Buffer.Length); + var buffer = this.Buffer; + int p = this.Position; + buffer[p] = value1; + buffer[p + 1] = value2; + buffer[p + 2] = value3; + buffer[p + 3] = value4; + this.Position = p + 4; + } + + /// Dangerously write five bytes at the end of the buffer, without any capacity checks! + /// + /// This method DOES NOT check the buffer capacity before writing, and caller MUST have resized the buffer beforehand! + /// Failure to do so may introduce memory correction (buffer overflow!). + /// This should ONLY be used in performance-sensitive code paths that have been audited thoroughly! + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void WriteBytes(byte value1, byte value2, byte value3, byte value4, byte value5) + { + var buffer = EnsureBytes(5); + int p = this.Position; + buffer[p] = value1; + buffer[p + 1] = value2; + buffer[p + 2] = value3; + buffer[p + 3] = value4; + buffer[p + 4] = value5; + this.Position = p + 5; + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void UnsafeWriteBytes(byte value1, byte value2, byte value3, byte value4, byte value5) + { + Contract.Requires(this.Buffer != null && this.Position + 4 < this.Buffer.Length); + var buffer = this.Buffer; + int p = this.Position; + buffer[p] = value1; + buffer[p + 1] = value2; + buffer[p + 2] = value3; + buffer[p + 3] = value4; + buffer[p + 4] = value5; + this.Position = p + 5; + } + + /// Write a byte array to the end of the buffer + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void WriteBytes([CanBeNull] byte[] data) + { + if (data != null) + { + WriteBytes(data, 0, data.Length); + } + } + + /// Write a chunk of a byte array to the end of the buffer + public void WriteBytes(byte[] data, int offset, int count) + { + if (count > 0) + { + UnsafeHelpers.EnsureBufferIsValidNotNull(data, offset, count); + int p = this.Position; + UnsafeHelpers.CopyUnsafe(EnsureBytes(count), p, data, offset, count); + this.Position = checked(p + count); + } + } + + /// Write a chunk of a byte array to the end of the buffer, with a prefix + public void WriteBytes(byte prefix, byte[] data, int offset, int count) + { + if (count >= 0) + { + if (count > 0) UnsafeHelpers.EnsureBufferIsValidNotNull(data, offset, count); + var buffer = EnsureBytes(count + 1); + int p = this.Position; + buffer[p] = prefix; + if (count > 0) UnsafeHelpers.CopyUnsafe(buffer, p + 1, data, offset, count); + this.Position = checked(p + 1 + count); + } + } + + /// Dangerously write a chunk of memory to the end of the buffer, without any capacity checks! + /// + /// This method DOES NOT check the buffer capacity before writing, and caller MUST have resized the buffer beforehand! + /// Failure to do so may introduce memory correction (buffer overflow!). + /// This should ONLY be used in performance-sensitive code paths that have been audited thoroughly! + /// + public void UnsafeWriteBytes(byte[] data, int offset, int count) + { + Contract.Requires(this.Buffer != null && this.Position >= 0 && data != null && count >= 0 && this.Position + count <= this.Buffer.Length && offset >= 0 && offset + count <= data.Length); + + if (count > 0) + { + int p = this.Position; + UnsafeHelpers.CopyUnsafe(this.Buffer, p, data, offset, count); + this.Position = checked(p + count); + } + } + + /// Write a segment of bytes to the end of the buffer + public void WriteBytes(Slice data) + { + data.EnsureSliceIsValid(); + + int count = data.Count; + if (count > 0) + { + int p = this.Position; + UnsafeHelpers.CopyUnsafe(EnsureBytes(count), p, data.Array, data.Offset, count); + this.Position = checked(p + count); + } + } + + /// Write a segment of bytes to the end of the buffer + public void WriteBytes(ref Slice data) + { + data.EnsureSliceIsValid(); + + int count = data.Count; + if (count > 0) + { + int p = this.Position; + UnsafeHelpers.CopyUnsafe(EnsureBytes(count), p, data.Array, data.Offset, count); + this.Position = checked(p + count); + } + } + +#if ENABLE_SPAN + /// Write a segment of bytes to the end of the buffer + public void WriteBytes(ReadOnlySpan data) + { + int count = data.Length; + if (count > 0) + { + int p = this.Position; + UnsafeHelpers.CopyUnsafe(EnsureBytes(count), p, data); + } + } +#endif + + /// Write a segment of bytes to the end of the buffer, with a prefix + public void WriteBytes(byte prefix, Slice data) + { + data.EnsureSliceIsValid(); + + int count = data.Count; + var buffer = EnsureBytes(count + 1); + int p = this.Position; + buffer[p] = prefix; + if (count > 0) UnsafeHelpers.CopyUnsafe(buffer, p + 1, data.Array, data.Offset, count); + this.Position = checked(p + count + 1); + } + +#if ENABLE_SPAN + /// Write a segment of bytes to the end of the buffer, with a prefix + public void WriteBytes(byte prefix, ReadOnlySpan data) + { + int count = data.Length; + var buffer = EnsureBytes(count + 1); + int p = this.Position; + buffer[p] = prefix; + if (count > 0) + { + UnsafeHelpers.CopyUnsafe(buffer, p + 1, data); + } + this.Position = checked(p + count + 1); + } +#endif + + /// Write a segment of bytes to the end of the buffer + public unsafe void WriteBytes(byte* data, uint count) + { + if (count == 0) return; + if (data == null) throw ThrowHelper.ArgumentNullException(nameof(data)); + + var buffer = EnsureBytes(count); + int p = this.Position; + Contract.Assert(buffer != null && p >= 0 && p + count <= buffer.Length); + + //note: we compute the end offset BEFORE, to protect against arithmetic overflow + int q = checked((int)(p + count)); + UnsafeHelpers.CopyUnsafe(buffer, p, data, count); + this.Position = q; + } + + /// Append a segment of bytes with a prefix to the end of the buffer + /// Byte added before the data + /// Pointer to the start of the data to append + /// Number of bytes to append (excluding the prefix) + public unsafe void WriteBytes(byte prefix, byte* data, uint count) + { + if (count != 0 && data == null) throw ThrowHelper.ArgumentNullException(nameof(data)); + + var buffer = EnsureBytes(count + 1); + int p = this.Position; + Contract.Assert(buffer != null && p >= 0 && p + 1 + count <= buffer.Length); + + //note: we compute the end offset BEFORE, to protect against arithmetic overflow + int q = checked((int)(p + 1 +count)); + buffer[p] = prefix; + UnsafeHelpers.CopyUnsafe(buffer, p + 1, data, count); + this.Position = q; + } + + /// Dangerously write a segment of bytes at the end of the buffer, without any capacity checks! + /// + /// This method DOES NOT check the buffer capacity before writing, and caller MUST have resized the buffer beforehand! + /// Failure to do so may introduce memory correction (buffer overflow!). + /// This should ONLY be used in performance-sensitive code paths that have been audited thoroughly! + /// + public unsafe void UnsafeWriteBytes(byte* data, uint count) + { + if (count != 0) + { + int p = this.Position; + Contract.Requires(this.Buffer != null && p >= 0 && data != null && p + count <= this.Buffer.Length); + + int q = checked((int)(p + count)); + UnsafeHelpers.CopyUnsafe(this.Buffer, p, data, count); + this.Position = q; + } + } + + // Appending is used when the caller want to get a Slice that points to the location where the bytes where written in the internal buffer + + /// Append a byte array to the end of the buffer + public Slice AppendBytes(byte[] data) + { + if (data == null) return Slice.Empty; + return AppendBytes(data, 0, data.Length); + } + + /// Append a chunk of a byte array to the end of the buffer + [Pure] + public Slice AppendBytes(byte[] data, int offset, int count) + { + if (count == 0) return Slice.Empty; + + UnsafeHelpers.EnsureBufferIsValidNotNull(data, offset, count); + int p = this.Position; + var buffer = EnsureBytes(count); + UnsafeHelpers.CopyUnsafe(buffer, p, data, offset, count); + this.Position = checked(p + count); + return new Slice(buffer, p, count); + } + + /// Append a segment of bytes to the end of the buffer + /// Buffer containing the data to append + /// Slice that maps the interned data using the writer's buffer. + /// If you do not need the resulting Slice, you should call instead! + [Pure] + public Slice AppendBytes(Slice data) + { + data.EnsureSliceIsValid(); + + int count = data.Count; + if (count == 0) return Slice.Empty; + + int p = this.Position; + var buffer = EnsureBytes(count); + UnsafeHelpers.CopyUnsafe(buffer, p, data.Array, data.Offset, count); + this.Position = checked(p + count); + return new Slice(buffer, p, count); + } + + /// Write a segment of bytes to the end of the buffer + /// Buffer containing the data to append + /// Slice that maps the interned data using the writer's buffer. + /// If you do not need the resulting Slice, you should call instead! + [Pure] + public Slice AppendBytes(ref Slice data) + { + data.EnsureSliceIsValid(); + + int count = data.Count; + if (count == 0) return Slice.Empty; + + int p = this.Position; + var buffer = EnsureBytes(count); + UnsafeHelpers.CopyUnsafe(buffer, p, data.Array, data.Offset, count); + this.Position = checked(p + count); + return new Slice(buffer, p, count); + } + + /// Append a segment of bytes to the end of the buffer + /// Pointer to the start of the data to append + /// Number of bytes to append + /// Slice that maps to the section of buffer that contains the appended data + /// If you do not need the resulting Slice, you should call instead! + [Pure] + public unsafe Slice AppendBytes(byte* data, uint count) + { + if (count == 0) return Slice.Empty; + if (data == null) throw ThrowHelper.ArgumentNullException(nameof(data)); + + var buffer = EnsureBytes(count); + int p = this.Position; + Contract.Assert(buffer != null && p >= 0 && p + count <= buffer.Length); + + int q = checked((int)(p + count)); + UnsafeHelpers.CopyUnsafe(buffer, p, data, count); + this.Position = q; + return new Slice(buffer, p, q - p); + } + + #endregion + + #region Fixed, Little-Endian + + /// Writes a 16-bit unsigned integer, using little-endian encoding + /// Advances the cursor by 2 bytes + public void WriteFixed16(short value) + { + int p = this.Position; + unsafe + { + fixed (byte* ptr = &EnsureBytes(2)[p]) + { + UnsafeHelpers.StoreInt16LE(ptr, value); + } + } + this.Position = p + 2; + } + + /// Writes a 16-bit unsigned integer, using little-endian encoding + /// Advances the cursor by 2 bytes + public void WriteFixed16(ushort value) + { + int p = this.Position; + unsafe + { + fixed (byte* ptr = &EnsureBytes(2)[p]) + { + UnsafeHelpers.StoreUInt16LE(ptr, value); + } + } + this.Position = p + 2; + } + + /// Writes a 16-bit unsigned integer, using little-endian encoding + /// Advances the cursor by 2 bytes + public void WriteFixed24(int value) + { + int p = this.Position; + unsafe + { + fixed (byte* ptr = &EnsureBytes(3)[p]) + { + UnsafeHelpers.StoreUInt24LE(ptr, (uint) value); + } + } + this.Position = p + 3; + } + + /// Writes a 16-bit unsigned integer, using little-endian encoding + /// Advances the cursor by 2 bytes + public void WriteFixed24(uint value) + { + int p = this.Position; + unsafe + { + fixed (byte* ptr = &EnsureBytes(3)[p]) + { + UnsafeHelpers.StoreUInt24LE(ptr, value); + } + } + this.Position = p + 3; + } + + /// Writes a 32-bit signed integer, using little-endian encoding + /// Advances the cursor by 4 bytes + public void WriteFixed32(int value) + { + int p = this.Position; + unsafe + { + fixed (byte* ptr = &EnsureBytes(4)[p]) + { + UnsafeHelpers.WriteFixed32Unsafe(ptr, (uint) value); + } + } + this.Position = p + 4; + } + + /// Writes a 32-bit unsigned integer, using little-endian encoding + /// Advances the cursor by 4 bytes + public void WriteFixed32(uint value) + { + int p = this.Position; + unsafe + { + fixed (byte* ptr = &EnsureBytes(4)[p]) + { + UnsafeHelpers.WriteFixed32Unsafe(ptr, value); + } + } + this.Position = p + 4; + } + + /// Writes a 64-bit signed integer, using little-endian encoding + /// Advances the cursor by 8 bytes + public void WriteFixed64(long value) + { + int p = this.Position; + unsafe + { + fixed (byte* ptr = &EnsureBytes(8)[p]) + { + UnsafeHelpers.WriteFixed64Unsafe(ptr, (ulong) value); + } + } + this.Position = p + 8; + } + + /// Writes a 64-bit unsigned integer, using little-endian encoding + /// Advances the cursor by 8 bytes + public void WriteFixed64(ulong value) + { + int p = this.Position; + unsafe + { + fixed (byte* ptr = &EnsureBytes(8)[p]) + { + UnsafeHelpers.WriteFixed64Unsafe(ptr, value); + } + } + this.Position = p + 8; + } + + #endregion + + #region Fixed, Big-Endian + + /// Writes a 16-bit signed integer, using big-endian encoding + /// Advances the cursor by 2 bytes + public void WriteFixed16BE(int value) + { + var buffer = EnsureBytes(2); + int p = this.Position; + buffer[p] = (byte)(value >> 8); + buffer[p + 1] = (byte)value; + this.Position = p + 2; + } + + /// Writes a 16-bit unsigned integer, using big-endian encoding + /// Advances the cursor by 2 bytes + public void WriteFixed16BE(uint value) + { + var buffer = EnsureBytes(2); + int p = this.Position; + buffer[p] = (byte)(value >> 8); + buffer[p + 1] = (byte)value; + this.Position = p + 2; + } + + /// Writes a 24-bit signed integer, using big-endian encoding + /// Advances the cursor by 2 bytes + public void WriteFixed24BE(int value) + { + var buffer = EnsureBytes(3); + int p = this.Position; + unsafe + { + fixed (byte* ptr = &buffer[p]) + { + UnsafeHelpers.StoreInt24BE(ptr, value); + } + } + this.Position = p + 3; + } + + /// Writes a 24-bit unsigned integer, using big-endian encoding + /// Advances the cursor by 3 bytes + public void WriteFixed24BE(uint value) + { + var buffer = EnsureBytes(3); + int p = this.Position; + unsafe + { + fixed (byte* ptr = &buffer[p]) + { + UnsafeHelpers.StoreUInt24BE(ptr, value); + } + } + this.Position = p + 3; + } + + /// Writes a 32-bit signed integer, using big-endian encoding + /// Advances the cursor by 4 bytes + public void WriteFixed32BE(int value) + { + var buffer = EnsureBytes(4); + int p = this.Position; + buffer[p] = (byte)(value >> 24); + buffer[p + 1] = (byte)(value >> 16); + buffer[p + 2] = (byte)(value >> 8); + buffer[p + 3] = (byte)(value); + this.Position = p + 4; + } + + /// Writes a 32-bit unsigned integer, using big-endian encoding + /// Advances the cursor by 4 bytes + public void WriteFixed32BE(uint value) + { + var buffer = EnsureBytes(4); + int p = this.Position; + buffer[p] = (byte)(value >> 24); + buffer[p + 1] = (byte)(value >> 16); + buffer[p + 2] = (byte)(value >> 8); + buffer[p + 3] = (byte)(value); + this.Position = p + 4; + } + + /// Writes a 64-bit signed integer, using big-endian encoding + /// Advances the cursor by 8 bytes + public void WriteFixed64BE(long value) + { + var buffer = EnsureBytes(8); + int p = this.Position; + buffer[p] = (byte)(value >> 56); + buffer[p + 1] = (byte)(value >> 48); + buffer[p + 2] = (byte)(value >> 40); + buffer[p + 3] = (byte)(value >> 32); + buffer[p + 4] = (byte)(value >> 24); + buffer[p + 5] = (byte)(value >> 16); + buffer[p + 6] = (byte)(value >> 8); + buffer[p + 7] = (byte)(value); + this.Position = p + 8; + } + + /// Writes a 64-bit unsigned integer, using big-endian encoding + /// Advances the cursor by 8 bytes + public void WriteFixed64BE(ulong value) + { + var buffer = EnsureBytes(8); + int p = this.Position; + buffer[p] = (byte)(value >> 56); + buffer[p + 1] = (byte)(value >> 48); + buffer[p + 2] = (byte)(value >> 40); + buffer[p + 3] = (byte)(value >> 32); + buffer[p + 4] = (byte)(value >> 24); + buffer[p + 5] = (byte)(value >> 16); + buffer[p + 6] = (byte)(value >> 8); + buffer[p + 7] = (byte)(value); + this.Position = p + 8; + } + + #endregion + + #region Decimals... + + public void WriteSingle(float value) + { + var buffer = EnsureBytes(4); + int p = this.Position; + unsafe + { + fixed (byte* ptr = &buffer[p]) + { + *((int*)ptr) = *(int*)(&value); + } + } + this.Position = p + 4; + } + + public void WriteSingle(byte prefix, float value) + { + var buffer = EnsureBytes(5); + int p = this.Position; + unsafe + { + fixed (byte* ptr = &buffer[p]) + { + ptr[0] = prefix; + *((int*)(ptr + 1)) = *(int*)(&value); + } + } + this.Position = p + 5; + } + + public void WriteDouble(double value) + { + var buffer = EnsureBytes(8); + int p = this.Position; + unsafe + { + fixed (byte* ptr = &buffer[p]) + { + *((long*)ptr) = *(long*)(&value); + } + } + this.Position = p + 8; + } + + public void WriteDouble(byte prefix, double value) + { + var buffer = EnsureBytes(9); + int p = this.Position; + unsafe + { + fixed (byte* ptr = &buffer[p]) + { + ptr[0] = prefix; + *((long*)(ptr + 1)) = *(long*)(&value); + } + } + this.Position = p + 9; + } + + #endregion + + #region Variable size + + #region VarInts... + + /// Writes a 7-bit encoded unsigned int (aka 'Varint16') at the end, and advances the cursor + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void WriteVarInt16(ushort value) + { + if (value < (1 << 7)) + { + WriteByte((byte)value); + } + else + { + WriteVarInt16Slow(value); + } + } + + private void WriteVarInt16Slow(ushort value) + { + const uint MASK = 128; + //note: value is known to be >= 128 + if (value < (1 << 14)) + { + WriteBytes( + (byte)(value | MASK), + (byte)(value >> 7) + ); + } + else + { + WriteBytes( + (byte)(value | MASK), + (byte)((value >> 7) | MASK), + (byte)(value >> 14) + ); + } + + } + + /// Writes a 7-bit encoded unsigned int (aka 'Varint32') at the end, and advances the cursor + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void WriteVarInt32(uint value) + { + if (value < (1 << 7)) + { + WriteByte((byte) value); + } + else + { + WriteVarInt32Slow(value); + } + } + + private void WriteVarInt32Slow(uint value) + { + const uint MASK = 128; + //note: value is known to be >= 128 + if (value < (1 << 14)) + { + WriteBytes( + (byte)(value | MASK), + (byte)(value >> 7) + ); + } + else if (value < (1 << 21)) + { + WriteBytes( + (byte)(value | MASK), + (byte)((value >> 7) | MASK), + (byte)(value >> 14) + ); + } + else if (value < (1 << 28)) + { + WriteBytes( + (byte)(value | MASK), + (byte)((value >> 7) | MASK), + (byte)((value >> 14) | MASK), + (byte)(value >> 21) + ); + } + else + { + WriteBytes( + (byte)(value | MASK), + (byte)((value >> 7) | MASK), + (byte)((value >> 14) | MASK), + (byte)((value >> 21) | MASK), + (byte)(value >> 28) + ); + } + } + + /// Writes a 7-bit encoded unsigned long (aka 'Varint64') at the end, and advances the cursor + public void WriteVarInt64(ulong value) + { + //note: if the size if 64-bits, we probably expact values to always be way above 128 so no need to optimize for this case here + + const uint MASK = 128; + // max encoded size is 10 bytes + var buffer = EnsureBytes(UnsafeHelpers.SizeOfVarInt(value)); + int p = this.Position; + while (value >= MASK) + { + buffer[p++] = (byte) ((value & (MASK - 1)) | MASK); + value >>= 7; + } + buffer[p++] = (byte) value; + this.Position = p; + } + + #endregion + + #region VarBytes... + + /// Writes a length-prefixed byte array, and advances the cursor + public void WriteVarBytes(Slice value) + { + //REVIEW: what should we do for Slice.Nil ? + + value.EnsureSliceIsValid(); + int n = value.Count; + if (n >= 128) + { + WriteVarBytesSlow(value); + return; + } + + var buffer = EnsureBytes(n + 1); + int p = this.Position; + // write the count (single byte) + buffer[p] = (byte)n; + // write the bytes + if (n > 0) UnsafeHelpers.CopyUnsafe(buffer, p + 1, value.Array, value.Offset, n); + this.Position = checked(p + n + 1); + } + +#if ENABLE_SPAN + /// Writes a length-prefixed byte array, and advances the cursor + public void WriteVarBytes(ReadOnlySpan value) + { + int n = value.Length; + if (n >= 128) + { + WriteVarBytesSlow(value); + return; + } + + var buffer = EnsureBytes(n + 1); + int p = this.Position; + // write the count (single byte) + buffer[p] = (byte)n; + // write the bytes + if (n > 0) + { + UnsafeHelpers.CopyUnsafe(buffer, p + 1, value); + } + this.Position = checked(p + n + 1); + } +#endif + +#if ENABLE_SPAN + private void WriteVarBytesSlow(ReadOnlySpan value) + { + int n = value.Length; + EnsureBytes(checked(n + 5)); + // write the count + WriteVarInt32((uint) n); + // write the bytes + int p = this.Position; + UnsafeHelpers.CopyUnsafe(this.Buffer, p, value); + this.Position = checked(p + n); + } +#else + private void WriteVarBytesSlow(Slice value) + { + int n = value.Count; + EnsureBytes(checked(n + 5)); + // write the count + WriteVarInt32((uint) n); + // write the bytes + int p = this.Position; + UnsafeHelpers.CopyUnsafe(this.Buffer, p, value.Array, value.Offset, n); + this.Position = checked(p + n); + } +#endif + + /// Writes a length-prefixed byte array, and advances the cursor + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void WriteVarBytes([NotNull] byte[] bytes) + { + Contract.Requires(bytes != null); + WriteVarBytes(bytes.AsSlice()); + } + + /// Writes a length-prefixed byte array, and advances the cursor + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void WriteVarBytes([NotNull] byte[] bytes, int offset, int count) + { + Contract.Requires(count == 0 || bytes != null); + WriteVarBytes(bytes.AsSlice(offset, count)); + } + + public unsafe void WriteVarBytes(byte* data, uint count) + { + if (count >= 128) + { + WriteVarBytesSlow(data, count); + return; + } + + var buffer = EnsureBytes(count + 1); + int p = this.Position; + // write the count (single byte) + buffer[p] = (byte) count; + // write the bytes + if (count > 0) + { + Contract.Assert(data != null); + UnsafeHelpers.CopyUnsafe(buffer, p + 1, data, count); + } + this.Position = checked(p + (int) count + 1); + } + + private unsafe void WriteVarBytesSlow(byte* data, uint n) + { + Contract.Assert(data != null); + + // 32-bit varint may take up to 5 bytes + EnsureBytes(n + 5); + + // write the count + WriteVarInt32(n); + // write the bytes + int p = this.Position; + UnsafeHelpers.CopyUnsafe(this.Buffer, p, data, n); + this.Position = checked((int)(p + n)); + } + + #endregion + + #region VarString... + + // all VarStrings are encoded as a VarInt that contains the number of following encoded bytes + // => caller MUST KNOWN the encoding! (usually UTF-8) + // => the string's length is NOT stored! + + /// Write a variabe-sized string, using the specified encoding + /// + /// + public void WriteVarString(string value, Encoding encoding = null) + { + if (encoding == null) + { + WriteVarStringUtf8(value); + return; + } + int byteCount = encoding.GetByteCount(value); + if (byteCount == 0) + { + WriteByte(0); + return; + } + WriteVarInt32((uint) byteCount); + int p = this.Position; + int n = encoding.GetBytes(s: value, charIndex: 0, charCount: value.Length, bytes: this.Buffer, byteIndex: p); + this.Position = checked(p + n); + } + + /// Write a variable-sized string, encoded using UTF-8 + /// String to append + /// The null and empty string will be stored the same way. Caller must use a different technique if they must be stored differently. + public void WriteVarStringUtf8(string value) + { + // Format: + // - VarInt Number of following bytes + // - Byte[] UTF-8 encoded bytes + // Examples: + // - "" => { 0x00 } + // - "ABC" => { 0x03 'A' 'B' 'C' } + // - "Héllo" => { 0x06 'h' 0xC3 0xA9 'l' 'l' 'o' } + + // We need to know the encoded size beforehand, because we need to write the size first! + int byteCount = Encoding.UTF8.GetByteCount(value); + if (byteCount == 0) + { // nul or empty string + WriteByte(0); + } + else if (byteCount == value.Length) + { // ASCII! + WriteVarAsciiInternal(value); + } + else + { // contains non-ASCII characters, we will need to encode + WriteVarStringUtf8Internal(value, byteCount); + } + } + + private void WriteVarStringUtf8Internal(string value, int byteCount) + { + Contract.Assert(value != null && byteCount > 0 && byteCount >= value.Length); + EnsureBytes(byteCount + UnsafeHelpers.SizeOfVarBytes(byteCount)); + WriteVarInt32((uint)byteCount); + int p = this.Position; + int n = Encoding.UTF8.GetBytes(s: value, charIndex: 0, charCount: value.Length, bytes: this.Buffer, byteIndex: p); + this.Position = checked(p + n); + } + + /// Write a variable-sized string, which is known to only contain ASCII characters (0..127) + /// This is faster than when the caller KNOWS that the string is ASCII only. This should only be used with keywords and constants, NOT with user input! + /// If the string contains characters above 127 + public void WriteVarStringAscii(string value) + { + if (string.IsNullOrEmpty(value)) + { + WriteByte(0); + } + else + { + WriteVarAsciiInternal(value); + } + } + + /// Write a variable string that is known to only contain ASCII characters + private unsafe void WriteVarAsciiInternal(string value) + { + // Caller must ensure that string is ASCII only! (otherwise it will be corrupted) + Contract.Requires(!string.IsNullOrEmpty(value)); + + int len = value.Length; + var buffer = EnsureBytes(len + UnsafeHelpers.SizeOfVarBytes(len)); + int p = this.Position; + + fixed (byte* bytes = &buffer[p]) + fixed (char* chars = value) + { + var outp = UnsafeHelpers.WriteVarInt32Unsafe(bytes, (uint) value.Length); + p += (int) (outp - bytes); + int mask = 0; + for (int i = 0; i < len; i++) + { + var c = chars[i]; + mask |= c; + outp[i] = (byte)c; + } + if (mask >= 128) throw ThrowHelper.ArgumentException(nameof(value), "The specified string must only contain ASCII characters."); + } + this.Position = checked(p + value.Length); + } + + #endregion + + #endregion + + #region UUIDs... + + /// Write a 128-bit UUID, and advances the cursor + public void WriteUuid128(Uuid128 value) + { + var buffer = EnsureBytes(16); + int p = this.Position; + unsafe + { + fixed (byte* ptr = &buffer[p]) + { + value.WriteToUnsafe(ptr); + } + } + this.Position = p + 16; + } + + /// Write a 128-bit UUID, and advances the cursor + public void UnsafeWriteUuid128(Uuid128 value) + { + Contract.Requires(this.Buffer != null && this.Position + 15 < this.Buffer.Length); + int p = this.Position; + unsafe + { + fixed (byte* ptr = &this.Buffer[p]) + { + value.WriteToUnsafe(ptr); + } + } + this.Position = p + 16; + } + + /// Write a 128-bit UUID, and advances the cursor + public void WriteUuid64(Uuid64 value) + { + var buffer = EnsureBytes(8); + int p = this.Position; + unsafe + { + fixed (byte* ptr = &buffer[p]) + { + value.WriteToUnsafe(ptr); + } + } + this.Position = p + 8; + } + + /// Write a 128-bit UUID, and advances the cursor + public void UnsafeWriteUuid64(Uuid64 value) + { + Contract.Requires(this.Buffer != null && this.Position + 7 < this.Buffer.Length); + int p = this.Position; + unsafe + { + fixed (byte* ptr = &this.Buffer[p]) + { + value.WriteToUnsafe(ptr); + } + } + this.Position = p + 8; + } + + #endregion + + #region Fixed-Size Text + + /// Write a string using UTF-8 + /// Text to write + /// Number of bytes written + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public int WriteString(string value) + { + return WriteStringUtf8(value); + } + +#if ENABLE_SPAN + /// Write a string using UTF-8 + /// Text to write + /// Number of bytes written + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public int WriteString(ReadOnlySpan value) + { + return WriteStringUtf8(value); + } +#endif + + /// Write a string using the specified encoding + /// Text to write + /// Encoding used to convert the text to bytes + /// Number of bytes written + public int WriteString(string value, Encoding encoding) + { + if (string.IsNullOrEmpty(value)) return 0; + + // In order to estimate the required capacity, we try to guess for very small strings, but compute the actual value for larger strings, + // so that we don't waste to much memory (up to 6x the string length in the worst case scenario) + var buffer = EnsureBytes(value.Length > 128 ? encoding.GetByteCount(value) : encoding.GetMaxByteCount(value.Length)); + + int p = this.Position; + int n = encoding.GetBytes(value, 0, value.Length, buffer, p); + this.Position = p + n; + return n; + } + + /// Write a string using UTF-8 + /// Text to write + /// Number of bytes written + public int WriteStringUtf8(string value) + { + if (string.IsNullOrEmpty(value)) return 0; + + // In order to estimate the required capacity, we try to guess for very small strings, but compute the actual value for larger strings, + // so that we don't waste to much memory (up to 6x the string length in the worst case scenario) + var buffer = EnsureBytes(value.Length > 128 + ? Encoding.UTF8.GetByteCount(value) + : Encoding.UTF8.GetMaxByteCount(value.Length)); + + int p = this.Position; + int n = Encoding.UTF8.GetBytes(s: value, charIndex: 0, charCount: value.Length, bytes: buffer, byteIndex: p); + this.Position = checked(p + n); + return n; + } + +#if ENABLE_SPAN + /// Write a string using UTF-8 + /// Number of bytes written + public int WriteStringUtf8(char[] chars, int offset, int count) + { + return WriteStringUtf8(new ReadOnlySpan(chars, offset, count)); + } + + /// Write a string using UTF-8 + /// Number of bytes written + public int WriteStringUtf8(ReadOnlySpan chars) + { + int count = chars.Length; + if (count == 0) return 0; + + unsafe + { + fixed (char* inp = &MemoryMarshal.GetReference(chars)) + { + // pour estimer la capacité, on fait une estimation a la louche pour des petites strings, mais on va calculer la bonne valeur pour des string plus grandes, + // afin d'éviter de gaspiller trop de mémoire (potentiellement jusqu'a 6 fois la taille) + var buffer = EnsureBytes(count > 128 + ? Encoding.UTF8.GetByteCount(inp, count) + : Encoding.UTF8.GetMaxByteCount(count)); + + int p = this.Position; + fixed (byte* outp = &buffer[p]) + { + int n = Encoding.UTF8.GetBytes(chars: inp, charCount: count, bytes: outp, byteCount: buffer.Length - p); + this.Position = checked(p + n); + return n; + } + } + } + } +#endif + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + private static Exception FailInvalidUtf8CodePoint() + { + return new DecoderFallbackException("Failed to encode invalid Unicode CodePoint into UTF-8"); + } + + /// Write a string that only contains ASCII + /// String with characters only in the 0..127 range + /// Faster than when writing Magic Strings or ascii keywords + /// Number of bytes written + public int WriteStringAscii(string value) + { + Contract.Requires(value != null); + + var buffer = EnsureBytes(value.Length); + int p = this.Position; + foreach (var c in value) + { + buffer[p++] = (byte) c; + } + this.Position = p; + return value.Length; + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void WriteBase10(int value) + { + if ((uint) value <= 9) + { + WriteByte('0' + value); + } + else + { + WriteBase10Slow(value); + } + } + + public void WriteBase10(long value) + { + if ((ulong) value <= 9) + { + WriteByte('0' + (int) value); + } + else if (value <= int.MaxValue) + { + WriteBase10Slow((int) value); + } + else + { + WriteBase10Slower(value); + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void WriteBase10(uint value) + { + if (value <= 9) + { + WriteByte('0' + (int) value); + } + else + { + WriteBase10Slow(value); + } + } + + public void WriteBase10(ulong value) + { + if (value <= 9) + { + WriteByte('0' + (int) value); + } + else if (value <= uint.MaxValue) + { + WriteBase10Slow((uint) value); + } + else + { + WriteBase10Slower(value); + } + } + + private void WriteBase10Slow(int value) + { + if (value < 0) + { // negative numbers + if (value == int.MinValue) + { // cannot do Abs(MinValue), so special case for this one + WriteStringAscii("-2147483648"); + return; + } + WriteByte('-'); + value = -value; + } + + if (value < 10) + { + WriteByte((byte) ('0' + value)); + } + else if (value < 100) + { + WriteBytes( + (byte) ('0' + (value / 10)), + (byte) ('0' + (value % 10)) + ); + } + else if (value < 1000) + { + WriteBytes( + (byte) ('0' + (value / 100)), + (byte) ('0' + (value / 10) % 10), + (byte) ('0' + (value % 10)) + ); + } + else if (value < 10 * 1000) + { + WriteBytes( + (byte) ('0' + (value / 1000)), + (byte) ('0' + (value / 100) % 10), + (byte) ('0' + (value / 10) % 10), + (byte) ('0' + (value % 10)) + ); + } + else if (value < 100 * 1000) + { + WriteBytes( + (byte) ('0' + (value / 10000)), + (byte) ('0' + (value / 1000) % 10), + (byte) ('0' + (value / 100) % 10), + (byte) ('0' + (value / 10) % 10), + (byte) ('0' + (value % 10)) + ); + } + else + { + WriteBase10Slower(value); + } + } + + private void WriteBase10Slower(long value) + { + //TODO: OPTIMIZE: sans allocations? + WriteStringAscii(value.ToString(CultureInfo.InvariantCulture)); + } + + private void WriteBase10Slow(uint value) + { + if (value < 10) + { + WriteByte((byte) ('0' + value)); + } + else if (value < 100) + { + WriteBytes( + (byte) ('0' + (value / 10)), + (byte) ('0' + (value % 10)) + ); + } + else if (value < 1000) + { + WriteBytes( + (byte) ('0' + (value / 100)), + (byte) ('0' + (value / 10) % 10), + (byte) ('0' + (value % 10)) + ); + } + else if (value < 10 * 1000) + { + WriteBytes( + (byte) ('0' + (value / 1000)), + (byte) ('0' + (value / 100) % 10), + (byte) ('0' + (value / 10) % 10), + (byte) ('0' + (value % 10)) + ); + } + else if (value < 100 * 1000) + { + WriteBytes( + (byte) ('0' + (value / 10000)), + (byte) ('0' + (value / 1000) % 10), + (byte) ('0' + (value / 100) % 10), + (byte) ('0' + (value / 10) % 10), + (byte) ('0' + (value % 10)) + ); + } + else + { + WriteBase10Slower(value); + } + } + + private void WriteBase10Slower(ulong value) + { + //TODO: OPTIMIZE: sans allocations? + WriteStringAscii(value.ToString(CultureInfo.InvariantCulture)); + } + + #endregion + + #region Patching + + #region 8-bits... + + /// Overwrite a section of the buffer that was already written, with the specified data + /// Offset from the start of the buffer where to start replacing + /// Data that will overwrite the buffer at the specified + /// You must ensure that replaced section does not overlap with the current position! + public void PatchBytes(int index, Slice data) + { + if (index + data.Count > this.Position) throw ThrowHelper.IndexOutOfRangeException(); + data.CopyTo(this.Buffer, index); + } + + /// Overwrite a section of the buffer that was already written, with the specified data + /// You must ensure that replaced section does not overlap with the current position! + public void PatchBytes(int index, byte[] buffer, int offset, int count) + { + if (index + count > this.Position) throw ThrowHelper.IndexOutOfRangeException(); + System.Buffer.BlockCopy(buffer, offset, this.Buffer, index, count); + } + + /// Overwrite a byte of the buffer that was already written + /// You must ensure that replaced byte is before the current position! + public void PatchByte(int index, byte value) + { + if ((uint) index >= this.Position) throw ThrowHelper.IndexOutOfRangeException(); + this.Buffer[index] = value; + } + + /// Overwrite a byte of the buffer that was already written + /// You must ensure that replaced byte is before the current position! + public void PatchByte(int index, int value) + { + //note: convenience method, because C# compiler likes to produce 'int' when combining bits together + if ((uint) index >= this.Position) throw ThrowHelper.IndexOutOfRangeException(); + this.Buffer[index] = (byte) value; + } + + #endregion + + #region 16-bits... + + /// Overwrite a word of the buffer that was already written + /// You must ensure that replaced word is before the current position! + public void PatchInt16(int index, short value) + { + if (index + 2 > this.Position) ThrowHelper.ThrowIndexOutOfRangeException(); + unsafe + { + fixed (byte* ptr = &this.Buffer[index]) + { + UnsafeHelpers.WriteFixed16Unsafe(ptr, (ushort) value); + } + } + } + + /// Overwrite a word of the buffer that was already written + /// You must ensure that replaced word is before the current position! + public void PatchUInt16(int index, ushort value) + { + if (index + 2 > this.Position) ThrowHelper.ThrowIndexOutOfRangeException(); + unsafe + { + fixed (byte* ptr = &this.Buffer[index]) + { + UnsafeHelpers.WriteFixed16Unsafe(ptr, value); + } + } + } + + /// Overwrite a word of the buffer that was already written + /// You must ensure that replaced word is before the current position! + public void PatchInt16BE(int index, short value) + { + if (index + 2 > this.Position) ThrowHelper.ThrowIndexOutOfRangeException(); + unsafe + { + fixed (byte* ptr = &this.Buffer[index]) + { + UnsafeHelpers.WriteFixed16BEUnsafe(ptr, (ushort) value); + } + } + } + + /// Overwrite a word of the buffer that was already written + /// You must ensure that replaced word is before the current position! + public void PatchUInt16BE(int index, ushort value) + { + if (index + 2 > this.Position) ThrowHelper.ThrowIndexOutOfRangeException(); + unsafe + { + fixed (byte* ptr = &this.Buffer[index]) + { + UnsafeHelpers.WriteFixed16BEUnsafe(ptr, value); + } + } + } + + #endregion + + #region 32-bits... + + /// Overwrite a dword of the buffer that was already written + /// You must ensure that replaced dword is before the current position! + public void PatchInt32(int index, int value) + { + if (index + 4 > this.Position) ThrowHelper.ThrowIndexOutOfRangeException(); + unsafe + { + fixed (byte* ptr = &this.Buffer[index]) + { + UnsafeHelpers.WriteFixed32Unsafe(ptr, (uint) value); + } + } + } + + /// Overwrite a dword of the buffer that was already written + /// You must ensure that replaced dword is before the current position! + public void PatchUInt32(int index, uint value) + { + if (index + 4 > this.Position) ThrowHelper.ThrowIndexOutOfRangeException(); + unsafe + { + fixed (byte* ptr = &this.Buffer[index]) + { + UnsafeHelpers.WriteFixed32Unsafe(ptr, value); + } + } + } + + /// Overwrite a dword of the buffer that was already written + /// You must ensure that replaced dword is before the current position! + public void PatchInt32BE(int index, int value) + { + if (index + 4 > this.Position) ThrowHelper.ThrowIndexOutOfRangeException(); + unsafe + { + fixed (byte* ptr = &this.Buffer[index]) + { + UnsafeHelpers.WriteFixed32BEUnsafe(ptr, (uint) value); + } + } + } + + /// Overwrite a dword of the buffer that was already written + /// You must ensure that replaced dword is before the current position! + public void PatchUInt32BE(int index, uint value) + { + if (index + 4 > this.Position) ThrowHelper.ThrowIndexOutOfRangeException(); + unsafe + { + fixed (byte* ptr = &this.Buffer[index]) + { + UnsafeHelpers.WriteFixed32BEUnsafe(ptr, value); + } + } + } + + #endregion + + #region 64-bits... + + /// Overwrite a qword of the buffer that was already written + /// You must ensure that replaced qword is before the current position! + public void PatchInt64(int index, long value) + { + if (index + 8 > this.Position) ThrowHelper.ThrowIndexOutOfRangeException(); + unsafe + { + fixed (byte* ptr = &this.Buffer[index]) + { + UnsafeHelpers.WriteFixed64Unsafe(ptr, (ulong) value); + } + } + } + + /// Overwrite a qword of the buffer that was already written + /// You must ensure that replaced qword is before the current position! + public void PatchUInt64(int index, ulong value) + { + if (index + 8 > this.Position) ThrowHelper.ThrowIndexOutOfRangeException(); + unsafe + { + fixed (byte* ptr = &this.Buffer[index]) + { + UnsafeHelpers.WriteFixed64Unsafe(ptr, value); + } + } + } + + /// Overwrite a qword of the buffer that was already written + /// You must ensure that replaced qword is before the current position! + public void PatchInt64BE(int index, long value) + { + if (index + 8 > this.Position) ThrowHelper.ThrowIndexOutOfRangeException(); + unsafe + { + fixed (byte* ptr = &this.Buffer[index]) + { + UnsafeHelpers.WriteFixed64BEUnsafe(ptr, (ulong) value); + } + } + } + + /// Overwrite a qword of the buffer that was already written + /// You must ensure that replaced qword is before the current position! + public void PatchUInt64BE(int index, ulong value) + { + if (index + 8 > this.Position) ThrowHelper.ThrowIndexOutOfRangeException(); + unsafe + { + fixed (byte* ptr = &this.Buffer[index]) + { + UnsafeHelpers.WriteFixed64BEUnsafe(ptr, value); + } + } + } + + #endregion + + #endregion + + /// Return the remaining capacity in the current underlying buffer + public int RemainingCapacity + { + get + { + var buffer = this.Buffer; + if (buffer == null || this.Position >= buffer.Length) return 0; + return buffer.Length - this.Position; + } + } + + /// Ensures that we can fit the specified amount of data at the end of the buffer + /// Number of bytes that will be written + /// If the buffer is too small, it will be resized, and all previously written data will be copied + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public byte[] EnsureBytes(int count) + { + //REVIEW: en C#7 on pourrait retourner le tuple (buffer, pos) ! + + Contract.Requires(count >= 0); + var buffer = this.Buffer; + if (buffer == null || this.Position + count > buffer.Length) + { + buffer = GrowBuffer(ref this.Buffer, this.Position + count); + Contract.Ensures(buffer != null && buffer.Length >= this.Position + count); + } + return buffer; + } + +#if ENABLE_ARRAY_POOL + + /// Ensures that we can fit the specified amount of data at the end of the buffer + /// Number of bytes that will be written + /// + /// If the buffer is too small, it will be resized, and all previously written data will be copied + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public byte[] EnsureBytes(int count, ArrayPool pool) + { + //REVIEW: en C#7 on pourrait retourner le tuple (buffer, pos) ! + + Contract.Requires(count >= 0); + var buffer = this.Buffer; + if (buffer == null || this.Position + count > buffer.Length) + { + buffer = GrowBuffer(ref this.Buffer, this.Position + count, pool); + Contract.Ensures(buffer != null && buffer.Length >= this.Position + count); + } + return buffer; + } + +#endif + + /// Ensures that we can fit the specified amount of data at the end of the buffer + /// Number of bytes that will be written + /// If the buffer is too small, it will be resized, and all previously written data will be copied + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public byte[] EnsureBytes(uint count) + { + return EnsureBytes(checked((int) count)); + } + + /// Ensures that we can fit data at a specifc offset in the buffer + /// Offset into the buffer (from the start) + /// Number of bytes that will be written at this offset + /// If the buffer is too small, it will be resized, and all previously written data will be copied + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void EnsureOffsetAndSize(int offset, int count) + { + Contract.Requires(offset >= 0 && count >= 0); + if (this.Buffer == null || offset + count > this.Buffer.Length) + { + GrowBuffer(ref this.Buffer, offset + count); + } + } + + /// Resize a buffer by doubling its capacity + /// Reference to the variable holding the buffer to create/resize. If null, a new buffer will be allocated. If not, the content of the buffer will be copied into the new buffer. + /// Mininum guaranteed buffer size after resizing. + /// The buffer will be resized to the maximum between the previous size multiplied by 2, and . The capacity will always be rounded to a multiple of 16 to reduce memory fragmentation + [NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static byte[] GrowBuffer( + ref byte[] buffer, + int minimumCapacity = 0 +#if ENABLE_ARRAY_POOL + , ArrayPool pool = null +#endif + ) + { + Contract.Requires(minimumCapacity >= 0); + + // double the size of the buffer, or use the minimum required + long newSize = Math.Max(buffer == null ? 0 : (((long) buffer.Length) << 1), minimumCapacity); + + // .NET (as of 4.5) cannot allocate an array with more than 2^31 - 1 items... + if (newSize > 0x7fffffffL) throw FailCannotGrowBuffer(); + + // round up to 16 bytes, to reduce fragmentation + int size = BitHelpers.AlignPowerOfTwo((int) newSize, 16); + +#if ENABLE_ARRAY_POOL + if (pool == null) + { + Array.Resize(ref buffer, size); + } + else + { // use the pool to resize the buffer + pool.Resize(ref buffer, size); + } +#else + Array.Resize(ref buffer, size); +#endif + return buffer; + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + private static Exception FailCannotGrowBuffer() + { +#if DEBUG + // If you breakpoint here, that means that you probably have an uncheked maximum buffer size, or a runaway while(..) { append(..) } code in your layer code ! + // => you should ALWAYS ensure a reasonable maximum size of your allocations ! + if (System.Diagnostics.Debugger.IsAttached) System.Diagnostics.Debugger.Break(); +#endif + // note: some methods in the BCL do throw an OutOfMemoryException when attempting to allocated more than 2^31 + return new OutOfMemoryException("Buffer cannot be resized, because it would exceed the maximum allowed size"); + } + + [UsedImplicitly(ImplicitUseTargetFlags.WithMembers)] + private sealed class DebugView + { + + public DebugView(SliceWriter writer) + { + this.Data = new Slice(writer.Buffer, 0, writer.Position); + this.Position = writer.Position; + this.Capacity = writer.Buffer.Length; + } + + public Slice Data { get; } + + public int Position { get; } + + public int Capacity { get; } + + } + + } + +} diff --git a/FoundationDB.Client/Shared/Memory/UnsafeHelpers.cs b/FoundationDB.Client/Shared/Memory/UnsafeHelpers.cs new file mode 100644 index 000000000..1ae85413a --- /dev/null +++ b/FoundationDB.Client/Shared/Memory/UnsafeHelpers.cs @@ -0,0 +1,3589 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +// If defined, means that the host process will ALWAYS run in a Little Endian context, and we can use some optimizations to speed up encoding and decoding values to and from memory buffers. +// If undefined, then fallback to architecture-agnostic way of handling bit and little endian values +// note: when enabled, the code assumes that the CPU supports unaligned stores and loads +#define EXPECT_LITTLE_ENDIAN_HOST + +// Enable the use of Span and ReadOnlySpan +//#define ENABLE_SPAN + +//note: we would like to use Vector from System.Numerics.Vectors (which is converted to SIMD by the JIT), but this is not really practical just yet: +// - v4.0 of the assembly does NOT have Vector, which was removed between beta, and only came back in 4.1-beta +// - the ctor Vector(byte* ptr, int offset) is currently private, which means that we cannot use it with unsafe pointers yet +// - there does not seem to be any SIMD way to implement memcmp with the current Vector API, unless doing some trickery with substracting and looking for 0s + +namespace Doxense.Memory +{ + using System; + using System.Diagnostics; + using System.IO; + using System.Runtime.CompilerServices; + using System.Runtime.ConstrainedExecution; + using System.Runtime.InteropServices; + using System.Security; + using JetBrains.Annotations; + using Doxense.Diagnostics.Contracts; + + /// Helper methods for dealing with unmanaged memory. HANDLE WITH CARE! + /// Use of this class is unsafe. YOU HAVE BEEN WARNED! + [DebuggerNonUserCode] // <-- remove this when debugging the class itself! + public static unsafe class UnsafeHelpers + { + +#if EXPECT_LITTLE_ENDIAN_HOST + private const bool IsLittleEndian = true; +#else + //note: should be optimized as a const by the JIT! + private static readonly bool IsLittleEndian = BitConverter.IsLittleEndian; +#endif + + /// Validates that and represent a valid location in + /// If is 0, then is allowed to be null + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void EnsureBufferIsValid(byte[] array, int offset, int count) + { + // note: same test has for a Slice + if (count != 0 && (array == null || (uint) offset > (uint) array.Length || (uint) count > (uint) (array.Length - offset))) + { + throw Errors.MalformedBuffer(array, offset, count); + } + } + + /// Validates that and represent a valid location in + /// If is 0, then is allowed to be null + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void EnsureBufferIsValid(byte[] array, uint offset, uint count) + { + // note: same test has for a Slice + if (count != 0 && (array == null || (long) count > (long) array.Length - offset)) + { + throw Errors.MalformedBuffer(array, offset, count); + } + } + + /// Validates that and represent a valid location in + /// is not allowed to be null, even if is 0. + [ContractAnnotation("array:null => halt"), MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void EnsureBufferIsValidNotNull(byte[] array, int offset, int count) + { + // note: same test has for a Slice + if (array == null || (uint) offset > (uint) array.Length || (uint) count > (uint) (array.Length - offset)) + { + throw Errors.MalformedBuffer(array, offset, count); + } + } + + /// Validates that and represent a valid location in + /// is not allowed to be null, even if is 0. + [ContractAnnotation("array:null => halt"), MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void EnsureBufferIsValidNotNull(byte[] array, uint offset, uint count) + { + // note: same test has for a Slice + if (array == null || (long) count > (long) array.Length - offset) + { + throw Errors.MalformedBuffer(array, offset, count); + } + } + + /// Validates that an unmanged buffer represents a valid memory location + /// If is 0, then is allowed to be null + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void EnsureBufferIsValid(byte* bytes, long count) + { + if (count != 0 & (bytes == null || count < 0)) + { + throw Errors.MalformedBuffer(bytes, count); + } + } + + /// Validates that an unmanaged buffer represents a valid memory location + /// is not allowed to be null, even if is 0. + [ContractAnnotation("bytes:null => halt")] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void EnsureBufferIsValidNotNull(byte* bytes, long count) + { + if (bytes == null || count < 0) + { + throw Errors.MalformedBuffer(bytes, count); + } + } + + /// Compare two byte segments for equality + /// Left buffer + /// Start offset in left buffer + /// Right buffer + /// Start offset in right buffer + /// Number of bytes to compare + /// true if all bytes are the same in both segments + [Pure] + public static bool SameBytes(byte[] left, int leftOffset, byte[] right, int rightOffset, int count) + { + EnsureBufferIsValid(left, leftOffset, count); + EnsureBufferIsValid(right, rightOffset, count); + + if (left == null || right == null) return left == right; + return SameBytesUnsafe(left, leftOffset, right, rightOffset, count); + } + +#if ENABLE_SPAN + /// Compare two spans for equality + /// Left buffer + /// Right buffer + /// true if all bytes are the same in both segments + public static bool SameBytes(ReadOnlySpan left, ReadOnlySpan right) + { + if (left.Length != right.Length) return false; + //REVIEW: is there a more direct wait to compare two spans ?? (did not find anything in ReadOnlySpan, MemoryExtensions nor MemoryMarshal ... ?) + fixed (byte* pLeft = &MemoryMarshal.GetReference(left)) + fixed (byte* pRight = &MemoryMarshal.GetReference(right)) + { + //TODO: version of comapre that is optimized for equality checks! + return 0 == CompareUnsafe(pLeft, pRight, (uint) left.Length); + } + } +#endif + + /// Compare two byte segments for equality, without validating the arguments + /// Left buffer + /// Start offset in left buffer + /// Right buffer + /// Start offset in right buffer + /// Number of bytes to compare + /// true if all bytes are the same in both segments + [Pure] + public static bool SameBytesUnsafe([NotNull] byte[] left, int leftOffset, [NotNull] byte[] right, int rightOffset, int count) + { + Contract.Requires(left != null && leftOffset >= 0 && right != null && rightOffset >= 0 && count >= 0); + + if (count == 0 || (object.ReferenceEquals(left, right) && leftOffset == rightOffset)) + { // empty, or same segment of the same buffer + return true; + } + + fixed (byte* pLeft = &left[leftOffset]) + fixed (byte* pRight = &right[rightOffset]) + { + //TODO: version of comapre that is optimized for equality checks! + return 0 == CompareUnsafe(pLeft, pRight, checked((uint)count)); + } + } + + /// Compare two byte buffers lexicographically + /// Left buffer + /// Right buffer + /// Returns zero if both buffers are identical (same bytes), a negative value if left is lexicographically less than right, or a positive value if left is lexicographically greater than right + /// The comparison algorithm respect the following: + /// * "A" < "B" + /// * "A" < "AA" + /// * "AA" < "B" + /// + [Pure] + public static int Compare([NotNull] byte[] left, [NotNull] byte[] right) + { + Contract.NotNull(left, nameof(left)); + Contract.NotNull(right, nameof(right)); + return CompareUnsafe(left, 0, left.Length, right, 0, right.Length); + } + + /// Compare two byte segments lexicographically + /// Left buffer + /// Start offset in left buffer + /// Number of bytes in left buffer + /// Right buffer + /// Start offset in right buffer + /// Number of bytes in right buffer + /// Returns zero if segments are identical (same bytes), a negative value if left is lexicographically less than right, or a positive value if left is lexicographically greater than right + /// The comparison algorithm respect the following: + /// * "A" < "B" + /// * "A" < "AA" + /// * "AA" < "B" + /// + [Pure] + public static int Compare([NotNull] byte[] left, int leftOffset, int leftCount, [NotNull] byte[] right, int rightOffset, int rightCount) + { + EnsureBufferIsValidNotNull(left, leftOffset, leftCount); + EnsureBufferIsValidNotNull(right, rightOffset, rightCount); + + return CompareUnsafe(left, leftOffset, leftCount, right, rightOffset, rightCount); + } + + /// Compare two byte segments lexicographically, without validating the arguments + /// Left buffer + /// Start offset in left buffer + /// Number of bytes in left buffer + /// Right buffer + /// Start offset in right buffer + /// Number of bytes in right buffer + /// Returns zero if segments are identical (same bytes), a negative value if left is lexicographically less than right, or a positive value if left is lexicographically greater than right + /// The comparison algorithm respect the following: + /// * "A" < "B" + /// * "A" < "AA" + /// * "AA" < "B" + /// + [Pure] + public static int CompareUnsafe([NotNull] byte[] left, int leftOffset, int leftCount, [NotNull] byte[] right, int rightOffset, int rightCount) + { + Contract.Requires(left != null && right != null && leftOffset >= 0 && leftCount >= 0 && rightOffset >= 0 && rightCount >= 0); + + if (object.ReferenceEquals(left, right) && leftCount == rightCount && leftOffset == rightOffset) + { // same segment in the same buffer + return 0; + } + + fixed (byte* pLeft = &left[leftOffset]) + fixed (byte* pRight = &right[rightOffset]) + { + return CompareUnsafe(pLeft, (uint) leftCount, pRight, (uint) rightCount); + } + } + + /// Ensure that the specified temporary buffer is large enough + /// Pointer to a temporary scratch buffer (previous data will not be maintained) + /// Minimum expected capacity + /// Same buffer if it was large enough, or a new allocated buffer with length greater than or equal to + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte[] EnsureCapacity(ref byte[] buffer, int minCapacity) + { + if (buffer == null || buffer.Length < minCapacity) + { + buffer = AllocateAligned(minCapacity); + } + return buffer; + } + + /// Ensure that the specified temporary buffer is large enough + /// Pointer to a temporary scratch buffer (previous data will not be maintained) + /// Minimum expected capacity + /// Same buffer if it was large enough, or a new allocated buffer with length greater than or equal to + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte[] EnsureCapacity(ref byte[] buffer, uint minCapacity) + { + if (minCapacity > int.MaxValue) throw FailBufferTooLarge(minCapacity); + if (buffer == null || buffer.Length < (int) minCapacity) + { + buffer = AllocateAligned((int) minCapacity); + } + return buffer; + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + private static byte[] AllocateAligned(int minCapacity) + { + if (minCapacity < 0) throw FailBufferTooLarge(minCapacity); //note: probably an integer overlofw (unsigned -> signed) + return new byte[BitHelpers.AlignPowerOfTwo(minCapacity, 8)]; + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + private static Exception FailBufferTooLarge(long minCapacity) + { + return new ArgumentOutOfRangeException(nameof(minCapacity), minCapacity, "Cannot allocate buffer larger than 2GB."); + } + + /// Copy the content of a byte segment into another. CAUTION: The arguments are NOT in the same order as Buffer.BlockCopy() or Array.Copy() ! + /// Destination buffer + /// Offset in destination buffer + /// Source buffer + /// Offset in source buffer + /// Number of bytes to copy + /// CAUTION: THE ARGUMENTS ARE REVERSED! They are in the same order as memcpy() and memmove(), with destination first, and source second! + [DebuggerStepThrough] + public static void Copy(byte[] dst, int dstOffset, byte[] src, int srcOffset, int count) + { + if (count > 0) + { + EnsureBufferIsValidNotNull(dst, dstOffset, count); + EnsureBufferIsValidNotNull(src, srcOffset, count); + + fixed (byte* pDst = &dst[dstOffset]) // throw if dst == null or dstOffset outside of the array + fixed (byte* pSrc = &src[srcOffset]) // throw if src == null or srcOffset outside of the array + { + Buffer.MemoryCopy(pSrc, pDst, dst.Length - dstOffset, count); + } + } + } + + /// Copy the content of a byte segment into another. CAUTION: The arguments are NOT in the same order as Buffer.BlockCopy() or Array.Copy() ! + /// Destination buffer + /// Offset in destination buffer + /// Source buffer + /// Offset in source buffer + /// Number of bytes to copy + /// CAUTION: THE ARGUMENTS ARE REVERSED! They are in the same order as memcpy() and memmove(), with destination first, and source second! + [DebuggerStepThrough] + public static void Copy(byte[] dst, uint dstOffset, byte[] src, uint srcOffset, uint count) + { + if (count > 0) + { + EnsureBufferIsValidNotNull(dst, dstOffset, count); + EnsureBufferIsValidNotNull(src, srcOffset, count); + + fixed (byte* pDst = &dst[dstOffset]) // throw if dst == null or dstOffset outside of the array + fixed (byte* pSrc = &src[srcOffset]) // throw if src == null or srcOffset outside of the array + { + Buffer.MemoryCopy(pSrc, pDst, dst.Length - dstOffset, count); + } + } + } + +#if ENABLE_SPAN + public static void Copy(Span destination, byte[] src, int srcOffset, int count) + { + if (count > 0) + { + new ReadOnlySpan(src, srcOffset, count).CopyTo(destination); + } + } + + public static void Copy(Span destination, Slice source) + { + if (source.Count > 0) + { + new ReadOnlySpan(source.Array, source.Offset, source.Count).CopyTo(destination); + } + } + + public static void Copy(byte[] dst, int dstOffset, ReadOnlySpan source) + { + if (source.Length > 0) + { + source.CopyTo(new Span(dst).Slice(dstOffset)); + } + } + + public static void Copy(Slice destination, ReadOnlySpan source) + { + if (source.Length > 0) + { + source.CopyTo(new Span(destination.Array, destination.Offset, destination.Count)); + } + } +#endif + + /// Copy the content of a byte segment into another, without validating the arguments. CAUTION: The arguments are NOT in the same order as Buffer.BlockCopy() or Array.Copy() ! + /// Destination buffer + /// Offset in destination buffer + /// Source buffer + /// Offset in source buffer + /// Number of bytes to copy + /// CAUTION: THE ARGUMENTS ARE REVERSED! They are in the same order as memcpy() and memmove(), with destination first, and source second! + [DebuggerStepThrough] + public static void CopyUnsafe([NotNull] byte[] dst, int dstOffset, [NotNull] byte[] src, int srcOffset, int count) + { + //Contract.Requires(count >= 0); + if (count > 0) + { + //Contract.Requires(dst != null && dstOffset >= 0 && src != null && srcOffset >= 0); + + fixed (byte* pDst = &dst[dstOffset]) + fixed (byte* pSrc = &src[srcOffset]) + { + Buffer.MemoryCopy(pSrc, pDst, count, count); + } + } + } + +#if ENABLE_SPAN + /// Copy the content of a native byte segment into a managed segment, without validating the arguments. + /// Destination buffer + /// Offset in destination buffer + /// Point to the source buffer + /// Number of bytes to copy + /// CAUTION: THE ARGUMENTS ARE REVERSED! They are in the same order as memcpy() and memmove(), with destination first, and source second! + [DebuggerStepThrough] + public static void CopyUnsafe([NotNull] byte[] dst, int dstOffset, ReadOnlySpan src) + { + //Contract.Requires(dst != null && dstOffset >= 0 && src.Length >= 0); + + fixed (byte* pDst = &dst[dstOffset]) + fixed (byte* pSrc = &MemoryMarshal.GetReference(src)) + { + Buffer.MemoryCopy(pSrc, pDst, src.Length, src.Length); + } + } +#endif + + /// Copy the content of a native byte segment into a managed segment, without validating the arguments. + /// Destination buffer + /// Offset in destination buffer + /// Point to the source buffer + /// Number of bytes to copy + /// CAUTION: THE ARGUMENTS ARE REVERSED! They are in the same order as memcpy() and memmove(), with destination first, and source second! + [DebuggerStepThrough] + public static void CopyUnsafe([NotNull] byte[] dst, int dstOffset, byte* src, int count) + { + //Contract.Requires(dst != null && src != null && dstOffset >= 0 && count >= 0); + + fixed (byte* pDst = &dst[dstOffset]) + { + Buffer.MemoryCopy(src, pDst, count, count); + } + } + + /// Copy the content of a native byte segment into a managed segment, without validating the arguments. + /// Destination buffer + /// Offset in destination buffer + /// Point to the source buffer + /// Number of bytes to copy + /// CAUTION: THE ARGUMENTS ARE REVERSED! They are in the same order as memcpy() and memmove(), with destination first, and source second! + [DebuggerStepThrough] + public static void CopyUnsafe([NotNull] byte[] dst, int dstOffset, byte* src, uint count) + { + //Contact.Requires(dst != null && src != null && dstOffset >= 0); + + fixed (byte* pDst = &dst[dstOffset]) + { + Buffer.MemoryCopy(src, pDst, count, count); + } + } + + /// Copy a managed slice to the specified memory location + /// Where to copy the bytes + /// Reference to the first byte to copy + /// Number of bytes to copy + [SecurityCritical, ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CopyUnsafe(byte* dest, ref byte src, int count) + { + if (count > 0) + { + Contract.Requires(dest != null); + fixed (byte* ptr = &src) + { + Buffer.MemoryCopy(ptr, dest, count, count); + } + } + } + + /// Copy a managed slice to the specified memory location + /// Where to copy the bytes + /// Slice of managed memory that will be copied to the destination + [SecurityCritical, ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CopyUnsafe(byte* dest, Slice src) + { + int count = src.Count; + if (count > 0) + { + Contract.Requires(dest != null && src.Array != null && src.Offset >= 0 && src.Count >= 0); + fixed (byte* ptr = &src.DangerousGetPinnableReference()) + { + Buffer.MemoryCopy(ptr, dest, count, count); + } + } + } + + [SecurityCritical, ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CopyUnsafe(Slice dest, byte* src, uint count) + { + if (count > 0) + { + Contract.Requires(dest.Array != null && dest.Offset >= 0 && dest.Count >= 0 && src != null); + fixed (byte* ptr = &dest.DangerousGetPinnableReference()) + { + Buffer.MemoryCopy(src, ptr, dest.Count, count); + } + } + } + + /// Dangerously copy native memory from one location to another + /// Where to copy the bytes + /// Where to read the bytes + /// Number of bytes to copy + [SecurityCritical, ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CopyUnsafe([NotNull] byte* dest, [NotNull] byte* src, uint count) + { + Contract.Requires(dest != null && src != null); + Buffer.MemoryCopy(src, dest, count, count); + } + + /// Compare two buffers in memory, using the lexicographical order, without checking the arguments + /// Pointer to the first buffer + /// Size (in bytes) of the first buffer + /// Pointer to the second buffer + /// Size (in bytes) of the second buffer + /// The returned value will be < 0 if is "before" , 0 if is the same as , and < 0 if is "after" right. + [SecurityCritical, ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int CompareUnsafe(byte* left, uint leftCount, byte* right, uint rightCount) + { + Contract.Requires((left != null || leftCount == 0) && (right != null || rightCount == 0)); + + int c = CompareUnsafe(left, right, Math.Min(leftCount, rightCount)); + return c != 0 ? c : (int) (leftCount - rightCount); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int CompareUnsafe(byte* left, byte* right, uint count) + { + // the most frequent case is to compare keys that are natural or GUIDs, + // in which case there is a very high probability that the first byte is different already + // => we check for that case immediately + if (count != 0 && *left != *right) return *left - *right; + //REVIEW: we could special case count==4 or count==8 because they are probably frequent (FreeSpace map uses 4, indexes may use 8, ...) + return CompareUnsafeInternal(left, right, count); + } + + /// Compare two buffers in memory, using the lexicographical order, without checking the arguments + /// Pointer to the first buffer + /// Pointer to the second buffer + /// Size (in bytes) of both buffers + /// The returned value will be < 0 if is "before" , 0 if is the same as , and < 0 if is "after" right. + [Pure, MethodImpl(MethodImplOptions.NoInlining)] + private static int CompareUnsafeInternal(byte* left, byte* right, uint count) + { + Contract.Requires(count == 0 || (left != null && right != null)); + + // We would like to always use memcmp (fastest), but the overhead of PInvoke makes it slower for small keys (<= 256) + // For these, we will use a custom implementation which is a bit slower than memcmp but faster than the overhead of PInvoke. + + if (count == 0) return 0; + + // the minimum size to amortize the cost of P/Invoke seems to be over 256 bytes, On My Machine(tm) + if (count > 256) + { + return _memcmp(left, right, count); + } + + // we will scan the strings by XORing together segments of 8 bytes (then 4, then 2, ...) looking for the first segment that contains at least one difference (ie: at least one bit set after XORing) + // then, if we find a difference, we will "fine tune" the pointers to locate the first byte that is different + // then, we will return the difference between the bytes at this location + + // Sample scenario: + // __ cursor ___ first difference is at byte (cursor + 4) + // v v + // LEFT : .. AA AA AA AA AA AA AA AA .. + // RIGHT: .. AA AA AA AA BB AA AA AA .. + // XOR : ( 00 00 00 00 11 00 00 00 ) + // + // The result of the XOR is 0x11000000 and is not equal to 0, so the first difference is within these 8 bytes + // The first 4 bytes of the result are 0, which means that the difference is at offset 4 (ie: we needed to SHR 8 the result 4 times before having at least one bit set in 0..7 + // + // L XOR R: 00 00 00 00 11 00 00 00 + // offset : +0 +1 +2 +3 +4 +5 +6 +7 + // ^^__ first non-zero byte + + // number of 16-bytes segments to scan + long x; + if (count >= 16) + { + long y; + byte* end = left + (count & ~0xF); + while (left < end) + { + // parallelize the reads + x = *(long*) left ^ *(long*) right; + y = *(long*) (left + 8) ^ *(long*) (right + 8); + if (x != 0) + { + goto fine_tune_8; + } + if (y != 0) + { + x = y; + goto fine_tune_8_with_offset; + } + left += 16; + right += 16; + } + + if ((count & 0xF) == 0) + { // size is multiple of 16 with no differences => equal + return 0; // fast path for Guid keys + } + } + + // use the last 4 bits in the count to parse the tail + + if ((count & 8) != 0) + { // at least 8 bytes remaining + x = *(long*) left ^ *(long*) right; + if (x != 0) goto fine_tune_8; + if ((count & 7) == 0) return 0; // fast path for long keys + left += 8; + right += 8; + } + if ((count & 4) != 0) + { // at least 4 bytes remaining + x = *(int*) left ^ *(int*) right; + if (x != 0) goto fine_tune_4; + if ((count & 3) == 0) return 0; // fast path for int keys + left += 4; + right += 4; + } + if ((count & 2) != 0) + { // at least 2 bytes remaining + x = *(short*) left ^ *(short*) right; + if (x != 0) goto fine_tune_2; + left += 2; + right += 2; + } + if ((count & 1) != 0) + { // at least one byte remaining + return left[0] - right[0]; + } + // both strings are equal + return 0; + + fine_tune_8_with_offset: + // adjust the pointers (we were looking at the upper 8 bytes in a 16-bytes segment + left += 8; + right += 8; + + fine_tune_8: + // the difference is somewhere in the last 8 bytes + if ((uint)x == 0) + { // it is not in the first 4 bytes + x >>= 32; + left += 4; + right += 4; + } + fine_tune_4: + // the difference is somewhere in the last 4 bytes + if ((ushort) x == 0) + { // if is not in the first 2 bytes + // the difference is either at +2 or +3 + return (x & 0xFF0000) == 0 + ? left[3] - right[3] + : left[2] - right[2]; + } + + fine_tune_2: + // the difference is somewhere in the last 2 bytes + return (x & 0xFF) == 0 + ? left[1] - right[1] + : left[0] - right[0]; + } + + [Pure, MethodImpl(MethodImplOptions.NoInlining)] + private static int _memcmp([NotNull] byte* left, byte* right, uint count) + { + return NativeMethods.memcmp(left, right, (UIntPtr) count); + } + + /// Fill the content of a managed segment with zeroes + public static void Clear([NotNull] byte[] bytes, int offset, int count) + { + if (count > 0) + { + EnsureBufferIsValidNotNull(bytes, offset, count); + fixed (byte* ptr = &bytes[offset]) + { + ClearUnsafe(ptr, (uint) count); + } + } + } + + /// Fill the content of a managed segment with zeroes + public static void Clear([NotNull] byte[] bytes, uint offset, uint count) + { + if (count > 0) + { + EnsureBufferIsValidNotNull(bytes, offset, count); + fixed (byte* ptr = &bytes[offset]) + { + ClearUnsafe(ptr, count); + } + } + } + + /// Fill the content of a managed slice with zeroes + public static void Clear(Slice buffer) + { + Clear(buffer.Array, buffer.Offset, buffer.Count); + } + + /// Fill the content of an unmanaged buffer with zeroes, without checking the arguments + /// WARNING: invalid use of this method WILL corrupt the heap! + [SecurityCritical, ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] + public static void ClearUnsafe([NotNull] byte* ptr, uint length) + { + Contract.Requires(ptr != null); + switch (length) + { + case 0: + return; + case 1: + *ptr = 0; + return; + case 2: + *(short*) ptr = 0; + return; + case 3: + *(short*) ptr = 0; + *(ptr + 2) = 0; + return; + case 4: + *(int*) ptr = 0; + return; + case 5: + ((int*) ptr)[0] = 0; + *(ptr + 4) = 0; + return; + case 6: + *(int*) ptr = 0; + *(short*) (ptr + 4) = 0; + return; + case 7: + *(int*)ptr = 0; + *(short*)(ptr + 4) = 0; + *(ptr + 6) = 0; + return; + case 8: + *(long*)ptr = 0; + return; + } + + if (length >= 512) + { // PInvoke into the native memset + _memset(ptr, 0, length); + return; + } + + while (length >= 16) + { + ((long*) ptr)[0] = 0; + ((long*) ptr)[1] = 0; + ptr += 16; + length -= 16; + } + if ((length & 8) != 0) + { + ((long*)ptr)[0] = 0; + ptr += 8; + } + if ((length & 4) != 0) + { + ((uint*) ptr)[0] = 0; + ptr += 4; + } + if ((length & 2) != 0) + { + ((short*)ptr)[0] = 0; + ptr += 2; + } + if ((length & 1) != 0) + { + *ptr = 0; + } + } + + /// Fill the content of an unmanaged buffer with zeroes, without checking the arguments + /// WARNING: invalid use of this method WILL corrupt the heap! + public static void ClearUnsafe([NotNull] byte* ptr, ulong length) + { + //pre-check incase of uint overflow + if (length >= 512) + { + Contract.Requires(ptr != null); + _memset(ptr, 0, length); + } + else + { + ClearUnsafe(ptr, (uint) length); + } + } + + /// Fill the content of an unamanged array with zeroes, without checking the arguments + /// Pointer to the start of the array + /// Number of items to clear + /// Size (in bytes) of one item + /// Will clear * elements in the array + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void ClearUnsafe([NotNull] void* ptr, [Positive] int count, uint sizeOfItem) + { + ClearUnsafe((byte*) ptr, checked((uint) count * sizeOfItem)); + } + + /// Fill the content of a managed segment with the same byte repeated + public static void Fill([NotNull] byte[] bytes, int offset, int count, byte filler) + { + if (count > 0) + { + EnsureBufferIsValidNotNull(bytes, offset, count); + fixed (byte* ptr = &bytes[offset]) + { + if (filler == 0) + { + ClearUnsafe(ptr, (uint)count); + } + else + { + _memset(ptr, filler, (uint)count); + } + } + } + } + + [SecurityCritical, ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void FillUnsafe([NotNull] byte* ptr, uint count, byte filler) + { + if (count != 0) + { + Contract.Requires(ptr != null); + _memset(ptr, filler, count); + } + } + + public static void FillUnsafe([NotNull] byte* ptr, ulong count, byte filler) + { + if (count != 0) + { + Contract.Requires(ptr != null); + _memset(ptr, filler, count); + } + } + + [SecurityCritical] + [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] + [MethodImpl(MethodImplOptions.NoInlining)] + private static void _memset([NotNull] byte* ptr, byte filler, uint count) + { + NativeMethods.memset(ptr, filler, (UIntPtr) count); + } + + [SecurityCritical] + [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] + [MethodImpl(MethodImplOptions.NoInlining)] + private static void _memset([NotNull] byte* ptr, byte filler, ulong count) + { + NativeMethods.memset(ptr, filler, (UIntPtr) count); + } + + /// Add padding bytes to the end of buffer if it is not aligned to a specific value, and advance the cursor + /// Start of a buffer that may need padding + /// Size of the buffer + /// Required alignement of the buffer size, which MUST be a power of two. If the buffer is not aligned, additional 0 bytes are added at the end. + /// Address of the next byte after the buffer, with padding included + [NotNull] + public static byte* PadBuffer([NotNull] byte* buffer, uint size, uint alignment) + { + Contract.PointerNotNull(buffer, nameof(buffer)); + Contract.PowerOfTwo(alignment, nameof(alignment)); + uint pad = size % (alignment - 1); + byte* ptr = buffer + size; + if (pad != 0) + { + ClearUnsafe(ptr, pad); + ptr += alignment - pad; + } + return ptr; + } + + /// Compute the hash code of a byte segment + /// Buffer + /// Offset of the start of the segment in the buffer + /// Number of bytes in the segment + /// A 32-bit signed hash code calculated from all the bytes in the segment. + /// This should only be used for dictionaries or hashset that reside in memory only! The hashcode could change at any time in future versions. + public static int ComputeHashCode(byte[] bytes, int offset, int count) + { + if (count == 0) return unchecked((int) 2166136261); + EnsureBufferIsValidNotNull(bytes, offset, count); + fixed (byte* ptr = &bytes[offset]) + { + return ComputeHashCodeUnsafe(ptr, (uint) count); + } + } + + /// Compute the hash code of a byte buffer + /// This should only be used for dictionaries or hashset that reside in memory only! The hashcode could change at any time in future versions. + public static int ComputeHashCode(byte* bytes, uint count) + { + if (count == 0) return unchecked((int) 2166136261); + EnsureBufferIsValidNotNull(bytes, count); + return ComputeHashCodeUnsafe(bytes, count); + } + + /// Compute the hash code of a byte buffer + /// Array that contains the byte buffer (ignored if count == 0) + /// Offset of the first byte in the buffer (ignored if count == 0) + /// Number of bytes in the buffer + /// A 32-bit signed hash code calculated from all the bytes in the segment. + /// + /// If count == 0, then the value of is ignored. + /// This should only be used for dictionaries or hashset that reside in memory only! The hashcode could change at any time in future versions. + /// + internal static int ComputeHashCodeUnsafe([NotNull] byte[] bytes, int offset, int count) + { + if (count == 0) return unchecked((int) 2166136261); + fixed (byte* ptr = &bytes[offset]) + { + return ComputeHashCodeUnsafe(ptr, (uint) count); + } + } + + /// Compute the hash code of a byte buffer + /// Pointer to the first byte of the buffer (ignored if count == 0) + /// Number of bytes in the buffer + /// A 32-bit signed hash code calculated from all the bytes in the segment. + /// This should only be used for dictionaries or hashset that reside in memory only! The hashcode could change at any time in future versions. + internal static int ComputeHashCodeUnsafe([NotNull] byte* bytes, uint count) + { + //note: callers should have handled the case where bytes == null, but they can call us with count == 0 + Contract.Requires(bytes != null); + + //TODO: use a better hash algorithm? (xxHash, CityHash, SipHash, ...?) + // => will be called a lot when Slices are used as keys in an hash-based dictionary (like Dictionary) + // => won't matter much for *ordered* dictionary that will probably use IComparer.Compare(..) instead of the IEqalityComparer.GetHashCode()/Equals() combo + // => we don't need a cryptographic hash, just something fast and suitable for use with hashtables... + // => probably best to select an algorithm that works on 32-bit or 64-bit chunks + + // : unoptimized 32 bits FNV-1a implementation + uint h = 2166136261; // FNV1 32 bits offset basis + uint n = count; + while (n > 0) + { + h = unchecked ((h ^ *bytes++) * 16777619); // FNV1 32 prime + --n; + } + return unchecked((int) h); + // + } + + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteBytesUnsafe([NotNull] byte* cursor, [NotNull] byte* data, uint count) + { + Contract.Requires(cursor != null && data != null); + if (count > 0) System.Buffer.MemoryCopy(data, cursor, count, count); + return cursor + count; + } + + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteBytes([NotNull] byte* cursor, [NotNull] byte* stop, [NotNull] byte* data, uint count) + { + Contract.Requires(cursor != null && stop != null && data != null); + if (count > 0) + { + if (cursor + count > stop) throw Errors.BufferOutOfBound(); + System.Buffer.MemoryCopy(data, cursor, count, count); + } + return cursor + count; + } + + #region VarInt Encoding... + + // VarInt encoding uses 7-bit per byte for the value, and uses the 8th bit as a "continue" (1) or "stop" (0) bit. + // The values is stored in Little Endian, ie: first the 7 lowest bits, then the next 7 lowest bits, until the 7 highest bits. + // + // ex: 0xxxxxxx = 1 byte (<= 127) + // 1xxxxxxx 0xxxxxxx = 2 bytes (<= 16383) + // 1xxxxxxx 1xxxxxxx 0xxxxxxx = 3 bytes (<= 2097151) + // + // The number of bytes required to store uint.MaxValue is 5 bytes, and for ulong.MaxValue is 9 bytes. + + /// Return the size (in bytes) that a 32-bit number would need when encoded as a VarInt + /// Number that needs to be encoded + /// Number of bytes needed (1-5) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static uint SizeOfVarInt(uint value) + { + return value < (1U << 7) ? 1 : SizeOfVarIntSlow(value); + } + + private static uint SizeOfVarIntSlow(uint value) + { + // count is already known to be >= 128 + if (value < (1U << 14)) return 2; + if (value < (1U << 21)) return 3; + if (value < (1U << 28)) return 4; + return 5; + } + + /// Return the size (in bytes) that a 64-bit number would need when encoded as a VarInt + /// Number that needs to be encoded + /// Number of bytes needed (1-10) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static uint SizeOfVarInt(ulong value) + { + return value < (1UL << 7) ? 1 : SizeOfVarIntSlow(value); + } + + private static uint SizeOfVarIntSlow(ulong value) + { + // value is already known to be >= 128 + if (value < (1UL << 14)) return 2; + if (value < (1UL << 21)) return 3; + if (value < (1UL << 28)) return 4; + if (value < (1UL << 35)) return 5; + if (value < (1UL << 42)) return 6; + if (value < (1UL << 49)) return 7; + if (value < (1UL << 56)) return 8; + if (value < (1UL << 63)) return 9; + return 10; + } + + /// Return the size (in bytes) that a variable-size array of bytes would need when encoded as a VarBytes + /// Size (in bytes) of the array + /// Number of bytes needed to encoded the size of the array, and the array itself (1 + N <= size <= 5 + N) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static uint SizeOfVarBytes(uint size) + { + return checked(size + SizeOfVarInt(size)); + } + /// Return the size (in bytes) that a variable-size array of bytes would need when encoded as a VarBytes + /// Size (in bytes) of the array + /// Number of bytes needed to encoded the size of the array, and the array itself (1 + N <= size <= 5 + N) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int SizeOfVarBytes(int size) + { + return checked(size + (int) SizeOfVarInt((uint) size)); + } + + /// Append a variable sized number to the output buffer + /// Pointer to the next free byte in the buffer + /// Value of the number to output + /// Pointer updated with the number of bytes written + /// Will write between 1 and 3 bytes + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteVarInt16Unsafe([NotNull] byte* cursor, uint value) + { + Contract.Requires(cursor != null); + //note: use of '&' is intentional (prevent a branch in the generated code) + if (value < 0x80) + { + *cursor = (byte) value; + return cursor + 1; + } + return WriteVarInt32UnsafeSlow(cursor, value); + } + + /// Append a variable sized number to the output buffer + /// Pointer to the next free byte in the buffer + /// + /// Value of the number to output + /// Pointer updated with the number of bytes written + /// Will write between 1 and 3 bytes + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteVarInt16([NotNull] byte* cursor, [NotNull] byte* stop, ushort value) + { + Contract.Requires(cursor != null && stop != null); + //note: use of '&' is intentional (prevent a branch in the generated code) + if (cursor < stop & value < 0x80) + { + *cursor = (byte) value; + return cursor + 1; + } + return WriteVarInt32Slow(cursor, stop, value); + } + + /// Reads a 7-bit encoded unsigned int (aka 'Varint16') from the buffer, and advances the cursor + /// Can read up to 3 bytes from the input + [NotNull] + public static byte* ReadVarint16([NotNull] byte* cursor, [NotNull] byte* stop, out ushort value) + { + Contract.Requires(cursor != null && stop != null); + if (cursor < stop && (value = *cursor) < 0x80) + { + return cursor + 1; + } + return ReadVarint16Slow(cursor, stop, out value); + } + + /// Reads a 7-bit encoded unsigned int (aka 'Varint32') from the buffer, and advances the cursor + /// Can read up to 5 bytes from the input + [NotNull] + private static byte* ReadVarint16Slow([NotNull] byte* cursor, [NotNull] byte* stop, out ushort value) + { + uint n; + + // unless cursor >= stop, we already know that the first byte has the MSB set + if (cursor >= stop) goto overflow; + uint b = cursor[0]; + Contract.Assert(b >= 0x80); + uint res = b & 0x7F; + + if (cursor + 1 >= stop) goto overflow; + b = cursor[1]; + res |= (b & 0x7F) << 7; + if (b < 0x80) + { + n = 2; + goto done; + } + + if (cursor + 2 >= stop) goto overflow; + b = cursor[2]; + // third should only have 2 bits worth of data + if (b >= 0x04) throw Errors.VarIntOverflow(); + res |= (b & 0x3) << 14; + n = 3; + //TODO: check overflow bits? + + done: + value = (ushort) res; + return cursor + n; + + overflow: + value = 0; + throw Errors.VarIntTruncated(); + } + + /// Reads a 7-bit encoded unsigned int (aka 'Varint16') from the buffer, and advances the cursor + /// Can read up to 3 bytes from the input + [NotNull] + public static byte* ReadVarint16Unsafe([NotNull] byte* cursor, out ushort value) + { + Contract.Requires(cursor != null); + uint n = 1; + + //TODO: we expect most values to be small (count or array length), so we should optimize for single byte varints where byte[0] <= 127 should be inlined, and defer to a slower method if >= 128. + + uint b = cursor[0]; + uint res = b & 0x7F; + if (b < 0x80) + { + goto done; + } + + b = cursor[1]; + res |= (b & 0x7F) << 7; + if (b < 0x80) + { + n = 2; + goto done; + } + + b = cursor[2]; + // third should only have 2 bits worth of data + if (b >= 0x04) throw Errors.VarIntOverflow(); + res |= (b & 0x3) << 14; + n = 3; + + done: + value = (ushort) res; + return cursor + n; + } + + /// Append a variable sized number to the output buffer + /// Pointer to the next free byte in the buffer + /// Value of the number to output + /// Pointer updated with the number of bytes written + /// Will write between 1 and 5 bytes + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteVarInt32Unsafe([NotNull] byte* cursor, uint value) + { + Contract.Requires(cursor != null); + if (value < 0x80) + { + *cursor = (byte) value; + return cursor + 1; + } + return WriteVarInt32UnsafeSlow(cursor, value); + } + + /// Append a variable sized number to the output buffer + /// Pointer to the next free byte in the buffer + /// Value of the number to output + /// Pointer updated with the number of bytes written + /// Will write between 1 and 5 bytes + [NotNull, MethodImpl(MethodImplOptions.NoInlining)] + private static byte* WriteVarInt32UnsafeSlow([NotNull] byte* cursor, uint value) + { + byte* ptr = cursor; + while (value >= 0x80) + { + *ptr = (byte)(value | 0x80); + value >>= 7; + ++ptr; + } + *ptr = (byte)value; + return ptr + 1; + } + + /// Append a variable sized number to the output buffer + /// Pointer to the next free byte in the buffer + /// + /// Value of the number to output + /// Pointer updated with the number of bytes written + /// Will write between 1 and 5 bytes + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteVarInt32([NotNull] byte* cursor, [NotNull] byte* stop, uint value) + { + Contract.Requires(cursor != null && stop != null); + //note: use of '&' is intentional (prevent a branch in the generated code) + if (cursor < stop & value < 0x80) + { + *cursor = (byte)value; + return cursor + 1; + } + return WriteVarInt32Slow(cursor, stop, value); + } + + [NotNull, MethodImpl(MethodImplOptions.NoInlining)] + private static byte* WriteVarInt32Slow([NotNull] byte* cursor, [NotNull] byte* stop, uint value) + { + //note: we know that value >= 128 (or that cursor is >= stop, in which case we will immediately fail below) + byte* ptr = cursor; + do + { + if (ptr >= stop) throw Errors.BufferOutOfBound(); + *ptr = (byte) (value | 0x80); + value >>= 7; + ++ptr; + } while (value >= 0x80); + + if (ptr >= stop) throw Errors.BufferOutOfBound(); + *ptr = (byte) value; + return ptr + 1; + } + + /// Reads a 7-bit encoded unsigned int (aka 'Varint32') from the buffer, and advances the cursor + /// Can read up to 5 bytes from the input + [NotNull] + public static byte* ReadVarint32Unsafe([NotNull] byte* cursor, out uint value) + { + Contract.Requires(cursor != null); + uint n = 1; + + //TODO: we expect most values to be small (count or array length), so we should optimize for single byte varints where byte[0] <= 127 should be inlined, and defer to a slower method if >= 128. + + uint b = cursor[0]; + uint res = b & 0x7F; + if (b < 0x80) + { + goto done; + } + + b = cursor[1]; + res |= (b & 0x7F) << 7; + if (b < 0x80) + { + n = 2; + goto done; + } + + b = cursor[2]; + res |= (b & 0x7F) << 14; + if (b < 0x80) + { + n = 3; + goto done; + } + + b = cursor[3]; + res |= (b & 0x7F) << 21; + if (b < 0x80) + { + n = 4; + goto done; + } + + // the fifth byte should only have 4 bits worth of data + b = cursor[4]; + if (b >= 0x20) throw Errors.VarIntOverflow(); + res |= (b & 0x1F) << 28; + n = 5; + + done: + value = res; + return cursor + n; + } + + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* ReadVarint32([NotNull] byte* cursor, [NotNull] byte* stop, out uint value) + { + Contract.Requires(cursor != null && stop != null); + if (cursor < stop && (value = *cursor) < 0x80) + { + return cursor + 1; + } + return ReadVarint32Slow(cursor, stop, out value); + } + + /// Reads a 7-bit encoded unsigned int (aka 'Varint32') from the buffer, and advances the cursor + /// Can read up to 5 bytes from the input + [NotNull] + private static byte* ReadVarint32Slow([NotNull] byte* cursor, [NotNull] byte* stop, out uint value) + { + uint n; + + // unless cursor >= stop, we already know that the first byte has the MSB set + if (cursor >= stop) goto overflow; + uint b = cursor[0]; + Contract.Assert(b >= 0x80); + uint res = b & 0x7F; + + if (cursor + 1 >= stop) goto overflow; + b = cursor[1]; + res |= (b & 0x7F) << 7; + if (b < 0x80) + { + n = 2; + goto done; + } + + if (cursor + 2 >= stop) goto overflow; + b = cursor[2]; + res |= (b & 0x7F) << 14; + if (b < 0x80) + { + n = 3; + goto done; + } + + if (cursor + 3 >= stop) goto overflow; + b = cursor[3]; + res |= (b & 0x7F) << 21; + if (b < 0x80) + { + n = 4; + goto done; + } + + // the fifth byte should only have 4 bits worth of data + if (cursor + 4 >= stop) goto overflow; + b = cursor[4]; + if (b >= 0x20) throw Errors.VarIntOverflow(); + res |= (b & 0x1F) << 28; + n = 5; + + done: + value = res; + return cursor + n; + + overflow: + value = 0; + throw Errors.VarIntTruncated(); + } + + /// Append a variable sized number to the output buffer + /// Pointer to the next free byte in the buffer + /// Value of the number to output + /// Pointer updated with the number of bytes written + /// Will write between 1 and 10 bytes + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteVarInt64Unsafe([NotNull] byte* cursor, ulong value) + { + Contract.Requires(cursor != null); + if (value < 0x80) + { + *cursor = (byte)value; + return cursor + 1; + } + return WriteVarInt64UnsafeSlow(cursor, value); + } + + [NotNull, MethodImpl(MethodImplOptions.NoInlining)] + private static byte* WriteVarInt64UnsafeSlow([NotNull] byte* cursor, ulong value) + { + //note: we know that value >= 128 + byte* ptr = cursor; + do + { + *ptr = (byte) (value | 0x80); + value >>= 7; + ++ptr; + } while (value >= 0x80); + *ptr = (byte)value; + return ptr + 1; + } + + /// Append a variable sized number to the output buffer + /// Pointer to the next free byte in the buffer + /// Stop address (to prevent overflow) + /// Value of the number to output + /// Pointer updated with the number of bytes written + /// Will write between 1 and 10 bytes + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteVarInt64([NotNull] byte* cursor, byte* stop, ulong value) + { + Contract.Requires(cursor != null && stop != null); + //note: use of '&' is intentional (prevent a branch in the generated code) + if (cursor < stop & value < 0x80) + { + *cursor = (byte) value; + return cursor + 1; + } + return WriteVarInt64Slow(cursor, stop, value); + } + + [NotNull, MethodImpl(MethodImplOptions.NoInlining)] + private static byte* WriteVarInt64Slow([NotNull] byte* cursor, byte* stop, ulong value) + { + //note: we know that value >= 128 (or that cursor is >= stop, in which case we will immediately fail below) + byte* ptr = cursor; + do + { + if (ptr >= stop) throw Errors.BufferOutOfBound(); + *ptr = (byte) (value | 0x80); + value >>= 7; + ++ptr; + } while (value >= 0x80); + + if (ptr >= stop) throw Errors.BufferOutOfBound(); + *ptr = (byte)value; + return ptr + 1; + } + + /// Reads a 7-bit encoded unsigned long (aka 'Varint32') from the buffer, and advances the cursor + /// Can read up to 10 bytes from the input + [NotNull] + public static byte* ReadVarint64Unsafe([NotNull] byte* cursor, out ulong value) + { + Contract.Requires(cursor != null); + uint n = 1; + + //note: we expect the value to be large (most frequent use it to decode a Sequence Number), so there is no point in optimizing for single byte varints... + + ulong b = cursor[0]; + ulong res = b & 0x7F; + if (b < 0x80) + { + goto done; + } + + b = cursor[1]; + res |= (b & 0x7F) << 7; + if (b < 0x80) + { + n = 2; + goto done; + } + + b = cursor[2]; + res |= (b & 0x7F) << 14; + if (b < 0x80) + { + n = 3; + goto done; + } + + b = cursor[3]; + res |= (b & 0x7F) << 21; + if (b < 0x80) + { + n = 4; + goto done; + } + + b = cursor[4]; + res |= (b & 0x7F) << 28; + if (b < 0x80) + { + n = 5; + goto done; + } + + b = cursor[5]; + res |= (b & 0x7F) << 35; + if (b < 0x80) + { + n = 6; + goto done; + } + + b = cursor[6]; + res |= (b & 0x7F) << 42; + if (b < 0x80) + { + n = 7; + goto done; + } + + b = cursor[7]; + res |= (b & 0x7F) << 49; + if (b < 0x80) + { + n = 8; + goto done; + } + + b = cursor[8]; + res |= (b & 0x7F) << 56; + if (b < 0x80) + { + n = 9; + goto done; + } + + // the tenth byte should only have 1 bit worth of data + b = cursor[9]; + if (b > 1) throw Errors.VarIntOverflow(); + res |= (b & 0x1) << 63; + n = 10; + + done: + value = res; + return cursor + n; + } + + /// Reads a 7-bit encoded unsigned long (aka 'Varint32') from the buffer, and advances the cursor + /// Can read up to 10 bytes from the input + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* ReadVarint64([NotNull] byte* cursor, [NotNull] byte* stop, out ulong value) + { + Contract.Requires(cursor != null && stop != null); + if (cursor < stop && (value = *cursor) < 0x80) + { + return cursor + 1; + } + else + { + return ReadVarint64Slow(cursor, stop, out value); + } + } + + [NotNull, MethodImpl(MethodImplOptions.NoInlining)] + private static byte* ReadVarint64Slow([NotNull] byte* cursor, [NotNull] byte* stop, out ulong value) + { + uint n; + + // unless cursor >= stop, we already know that the first byte has the MSB set + if (cursor >= stop) goto overflow; + ulong b = cursor[0]; + Contract.Assert(b >= 0x80); + ulong res = b & 0x7F; + + if (cursor >= stop) goto overflow; + b = cursor[1]; + res |= (b & 0x7F) << 7; + if (b < 0x80) + { + n = 2; + goto done; + } + + if (cursor >= stop) goto overflow; + b = cursor[2]; + res |= (b & 0x7F) << 14; + if (b < 0x80) + { + n = 3; + goto done; + } + + if (cursor >= stop) goto overflow; + b = cursor[3]; + res |= (b & 0x7F) << 21; + if (b < 0x80) + { + n = 4; + goto done; + } + + if (cursor >= stop) goto overflow; + b = cursor[4]; + res |= (b & 0x7F) << 28; + if (b < 0x80) + { + n = 5; + goto done; + } + + if (cursor >= stop) goto overflow; + b = cursor[5]; + res |= (b & 0x7F) << 35; + if (b < 0x80) + { + n = 6; + goto done; + } + + if (cursor >= stop) goto overflow; + b = cursor[6]; + res |= (b & 0x7F) << 42; + if (b < 0x80) + { + n = 7; + goto done; + } + + if (cursor >= stop) goto overflow; + b = cursor[7]; + res |= (b & 0x7F) << 49; + if (b < 0x80) + { + n = 8; + goto done; + } + + if (cursor >= stop) goto overflow; + b = cursor[8]; + res |= (b & 0x7F) << 56; + if (b < 0x80) + { + n = 9; + goto done; + } + + // the tenth byte should only have 1 bit worth of data + if (cursor >= stop) goto overflow; + b = cursor[9]; + if (b > 1) throw Errors.VarIntOverflow(); + res |= (b & 0x1) << 63; + n = 10; + + done: + value = res; + return cursor + n; + + overflow: + value = 0; + throw Errors.VarIntTruncated(); + } + + /// Append a variable size byte sequence, using the VarInt encoding + /// This method performs bound checking. + [NotNull] + public static byte* WriteVarBytes([NotNull] byte* ptr, [NotNull] byte* stop, byte* data, uint count) + { + if (count == 0) + { // "Nil" + if (ptr >= stop) throw Errors.BufferOutOfBound(); + *ptr = 0; + return ptr + 1; + } + var cursor = WriteVarInt32(ptr, stop, count); + return WriteBytes(cursor, stop, data, count); + } + + /// Append a variable size byte sequence with an extra 0 at the end, using the VarInt encoding + /// This method performs bound checking. + [NotNull] + public static byte* WriteZeroTerminatedVarBytes([NotNull] byte* ptr, [NotNull] byte* stop, byte* data, uint count) + { + var cursor = WriteVarInt32(ptr, stop, count + 1); + cursor = WriteBytes(cursor, stop, data, count); + if (cursor >= stop) throw Errors.BufferOutOfBound(); + *cursor = 0; + return cursor + 1; + } + + #endregion + + #region Endianness... + +#if EXPECT_LITTLE_ENDIAN_HOST + // ReSharper disable ConditionIsAlwaysTrueOrFalse + // ReSharper disable UnreachableCode +#pragma warning disable 162 +#endif + + #region 16-bits + + /// Swap the order of the bytes in a 16-bit word + /// 0x0123 + /// 0x2301 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static ushort ByteSwap16(ushort value) + { + return (ushort) ((value << 8) | (value >> 8)); + } + + /// Swap the order of the bytes in a 16-bit word + /// 0x0123 + /// 0x2301 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static short ByteSwap16(short value) + { + //note: masking is required to get rid of the sign bit + return (short) ((value << 8) | ((value >> 8) & 0xFF)); + } + + /// Load a 16-bit integer from an in-memory buffer that holds a value in Little-Endian ordering (also known as Host Order) + /// Memory address of a 2-byte location + /// Logical value in host order + /// ([ 0x34, 0x12) => 0x1234 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static short LoadInt16LE([NotNull] void* ptr) + { + return IsLittleEndian ? *(short*)ptr : ByteSwap16(*(short*)ptr); + } + + /// Load a 16-bit integer from an in-memory buffer that holds a value in Little-Endian ordering (also known as Host Order) + /// Memory address of a 2-byte location + /// Logical value in host order + /// ([ 0x34, 0x12) => 0x1234 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static ushort LoadUInt16LE([NotNull] void* ptr) + { + return IsLittleEndian ? *(ushort*) ptr : ByteSwap16(*(ushort*) ptr); + } + + /// Store a 16-bit integer in an in-memory buffer that must hold a value in Little-Endian ordering (also known as Host Order) + /// Memory address of a 2-byte location + /// Logical value to store in the buffer + /// (ptr, 0x1234) => ptr[0] == 0x34, ptr[1] == 0x12 + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void StoreInt16LE([NotNull] void* ptr, short value) + { + *(short*)ptr = IsLittleEndian ? value : ByteSwap16(value); + } + + /// Store a 16-bit integer in an in-memory buffer that must hold a value in Little-Endian ordering (also known as Host Order) + /// Memory address of a 2-byte location + /// Logical value to store in the buffer + /// (ptr, 0x1234) => ptr[0] == 0x34, ptr[1] == 0x12 + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void StoreUInt16LE([NotNull] void* ptr, ushort value) + { + *(ushort*) ptr = IsLittleEndian ? value : ByteSwap16(value); + } + + /// Load a 16-bit integer from an in-memory buffer that holds a value in Little-Endian ordering (also known as Host Order) + /// Memory address of a 2-byte location + /// Logical value in host order + /// ([ 0x34, 0x12) => 0x1234 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static short LoadInt16BE([NotNull] void* ptr) + { + return IsLittleEndian ? ByteSwap16(*(short*) ptr) : *(short*) ptr; + } + + /// Load a 16-bit integer from an in-memory buffer that holds a value in Big-Endian ordering (also known as Network Order) + /// Memory address of a 2-byte location + /// Logical value in host order + /// ([ 0x12, 0x34) => 0x1234 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static ushort LoadUInt16BE([NotNull] void* ptr) + { + return IsLittleEndian ? ByteSwap16(*(ushort*) ptr) : *(ushort*) ptr; + } + + /// Store a 16-bit integer in an in-memory buffer that must hold a value in Big-Endian ordering (also known as Network Order) + /// Memory address of a 2-byte location + /// Logical value to store in the buffer + /// (ptr, 0x1234) => ptr[0] == 0x12, ptr[1] == 0x34 + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void StoreInt16BE([NotNull] void* ptr, short value) + { + *(short*) ptr = IsLittleEndian ? ByteSwap16(value) : value; + } + + /// Store a 16-bit integer in an in-memory buffer that must hold a value in Big-Endian ordering (also known as Network Order) + /// Memory address of a 2-byte location + /// Logical value to store in the buffer + /// (ptr, 0x1234) => ptr[0] == 0x12, ptr[1] == 0x34 + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void StoreUInt16BE([NotNull] void* ptr, ushort value) + { + *(ushort*) ptr = IsLittleEndian ? ByteSwap16(value) : value; + } + + #endregion + + #region 24-bits + + /// Swap the order of the bytes in a 24-bit word + /// 0x012345 + /// 0x452301 + /// Bits 24-31 are ignored + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static uint ByteSwap24(uint value) + { + return (value & 0xFF) << 16 | (value & 0x00FF00) | ((value & 0xFF0000) >> 16); + } + + /// Swap the order of the bytes in a 24-bit word + /// 0x0123 + /// 0x2301 + /// Bits 24-31 are ignored + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int ByteSwap24(int value) + { + //note: masking is required to get rid of the sign bit + return (value & 0xFF) << 16 | (value & 0x00FF00) | ((value & 0xFF0000) >> 16); + } + + /// Load a 24-bit integer from an in-memory buffer that holds a value in Little-Endian ordering (also known as Host Order) + /// Memory address of a 2-byte location + /// Logical value in host order + /// ([ 0x56, 0x34, 0x12 ]) => 0x123456 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int LoadInt24LE([NotNull] void* ptr) + { + uint x = *(ushort*) ptr; + x |= (uint) ((byte*) ptr)[2] << 16; + return IsLittleEndian ? (int) x : (int) ByteSwap24(x); + } + + /// Load a 24-bit integer from an in-memory buffer that holds a value in Little-Endian ordering (also known as Host Order) + /// Memory address of a 2-byte location + /// Logical value in host order + /// ([ 0x56, 0x34, 0x12 ]) => 0x123456 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static uint LoadUInt24LE([NotNull] void* ptr) + { + uint x = *(ushort*)ptr; + x |= (uint) ((byte*) ptr)[2] << 16; + return IsLittleEndian ? x : ByteSwap24(x); + } + + /// Store a 24-bit integer in an in-memory buffer that must hold a value in Little-Endian ordering (also known as Host Order) + /// Memory address of a 3-byte location + /// Logical value to store in the buffer. Bits 24-31 are ignored + /// (ptr, 0x123456) => ptr[0] == 0x56, ptr[1] == 0x34, ptr[2] == 0x12 + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void StoreInt24LE([NotNull] void* ptr, int value) + { + int x = IsLittleEndian ? value : ByteSwap24(value); + *(short*) ptr = (short) x; + ((byte*) ptr)[2] = (byte) (x >> 16); + } + + /// Store a 24-bit integer in an in-memory buffer that must hold a value in Little-Endian ordering (also known as Host Order) + /// Memory address of a 3-byte location + /// Logical value to store in the buffer. Bits 24-31 are ignored + /// (ptr, 0x123456) => ptr[0] == 0x56, ptr[1] == 0x34, ptr[2] == 0x12 + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void StoreUInt24LE([NotNull] void* ptr, uint value) + { + uint x = IsLittleEndian ? value : ByteSwap24(value); + *(ushort*)ptr = (ushort)x; + ((byte*)ptr)[2] = (byte)(x >> 16); + } + + /// Load a 24-bit integer from an in-memory buffer that holds a value in Big-Endian ordering (also known as Network Order) + /// Memory address of a 3-byte location + /// Logical value in host order + /// ([ 0x12, 0x34, 0x56 ]) => 0x123456 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int LoadInt24BE([NotNull] void* ptr) + { + uint x = *(ushort*) ptr | ((uint) ((byte*) ptr)[2] << 16); + return IsLittleEndian ? ByteSwap24((int) x) : (int) x; + } + + /// Load a 24-bit integer from an in-memory buffer that holds a value in Big-Endian ordering (also known as Network Order) + /// Memory address of a 3-byte location + /// Logical value in host order + /// ([ 0x12, 0x34, 0x56 ]) => 0x123456 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static uint LoadUInt24BE([NotNull] void* ptr) + { + uint x = *(ushort*) ptr | ((uint) ((byte*) ptr)[2] << 16); + return IsLittleEndian ? ByteSwap24(x) : x; + } + + /// Store a 24-bit integer in an in-memory buffer that must hold a value in Big-Endian ordering (also known as Network Order) + /// Memory address of a 3-byte location + /// Logical value to store in the buffer. Bits 24-31 are ignored + /// (ptr, 0x123456) => ptr[0] == 0x12, ptr[1] == 0x34, ptr[2] = 0x56 + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void StoreInt24BE([NotNull] void* ptr, int value) + { + int x = IsLittleEndian ? ByteSwap24(value) : value; + *(short*) ptr = (short) x; + ((byte*) ptr)[2] = (byte) (x >> 16); + } + + /// Store a 24-bit integer in an in-memory buffer that must hold a value in Big-Endian ordering (also known as Network Order) + /// Memory address of a 3-byte location + /// Logical value to store in the buffer. Bits 24-31 are ignored + /// (ptr, 0x123456) => ptr[0] == 0x12, ptr[1] == 0x34, ptr[2] = 0x56 + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void StoreUInt24BE([NotNull] void* ptr, uint value) + { + uint x = IsLittleEndian ? ByteSwap24(value) : value; + *(ushort*)ptr = (ushort)x; + ((byte*)ptr)[2] = (byte)(x >> 16); + } + + #endregion + + #region 32-bits + + /// Swap the order of the bytes in a 32-bit word + /// 0x01234567 + /// 0x67452301 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static uint ByteSwap32(uint value) + { + const uint MASK1_HI = 0xFF00FF00; + const uint MASK1_LO = 0x00FF00FF; + //PERF: do not remove the local 'tmp' variable (reusing 'value' is 4X slower with RyuJit64 than introducing a tmp variable) + uint tmp = ((value << 8) & MASK1_HI) | ((value >> 8) & MASK1_LO); + return (tmp << 16) | (tmp >> 16); + } + + /// Swap the order of the bytes in a 32-bit word + /// 0x01234567 + /// 0x67452301 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int ByteSwap32(int value) + { + const int MASK1_HI = unchecked((int) 0xFF00FF00); + const int MASK1_LO = 0x00FF00FF; + //PERF: do not remove the local 'tmp' variable! Reusing 'value' is 4X slower with RyuJit64 than introducing a tmp variable + int tmp = ((value << 8) & MASK1_HI) | ((value >> 8) & MASK1_LO); + return (tmp << 16) | ((tmp >> 16) & 0xFFFF); + } + + /// Load a 32-bit integer from an in-memory buffer that holds a value in Little-Endian ordering (also known as Host Order) + /// Memory address of a 4-byte location + /// Logical value in host order + /// ([ 0x78, 0x56, 0x34, 0x12) => 0x12345678 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int LoadInt32LE([NotNull] void* ptr) + { + return IsLittleEndian ? *(int*) ptr : ByteSwap32(*(int*) ptr); + } + + /// Load a 32-bit integer from an in-memory buffer that holds a value in Little-Endian ordering (also known as Host Order) + /// Memory address of a 4-byte location + /// Logical value in host order + /// ([ 0x78, 0x56, 0x34, 0x12) => 0x12345678 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static uint LoadUInt32LE([NotNull] void* ptr) + { + return IsLittleEndian ? * (uint*) ptr : ByteSwap32(* (uint*) ptr); + } + + /// Store a 32-bit integer in an in-memory buffer that must hold a value in Little-Endian ordering (also known as Host Order) + /// Memory address of a 4-byte location + /// Logical value to store in the buffer + /// (0x12345678) => ptr[0] == 0x78, ptr[1] == 0x56, ptr[2] == 0x34, ptr[3] == 0x12 + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void StoreInt32LE([NotNull] void* ptr, int value) + { + *(int*) ptr = IsLittleEndian ? value : ByteSwap32(value); + } + + /// Store a 32-bit integer in an in-memory buffer that must hold a value in Little-Endian ordering (also known as Host Order) + /// Memory address of a 4-byte location + /// Logical value to store in the buffer + /// (0x12345678) => ptr[0] == 0x78, ptr[1] == 0x56, ptr[2] == 0x34, ptr[3] == 0x12 + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void StoreUInt32LE([NotNull] void* ptr, uint value) + { + *(uint*) ptr = IsLittleEndian ? value : ByteSwap32(value); + } + + /// Load a 32-bit integer from an in-memory buffer that holds a value in Big-Endian ordering (also known as Network Order) + /// Memory address of a 4-byte location + /// Logical value in host order + /// ([ 0x12, 0x34, 0x56, 0x78) => 0x12345678 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int LoadInt32BE([NotNull] void* ptr) + { + return IsLittleEndian ? ByteSwap32(*(int*) ptr) : *(int*) ptr; + } + + /// Load a 32-bit integer from an in-memory buffer that holds a value in Big-Endian ordering (also known as Network Order) + /// Memory address of a 4-byte location + /// Logical value in host order + /// ([ 0x12, 0x34, 0x56, 0x78) => 0x12345678 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static uint LoadUInt32BE([NotNull] void* ptr) + { + return IsLittleEndian ? ByteSwap32(*(uint*) ptr) : *(uint*) ptr; + } + + /// Store a 32-bit integer in an in-memory buffer that must hold a value in Big-Endian ordering (also known as Network Order) + /// Memory address of a 4-byte location + /// Logical value to store in the buffer + /// (ptr, 0x12345678) => ptr[0] == 0x12, ptr[1] == 0x34, ptr[2] == 0x56, ptr[3] == 0x78 + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void StoreInt32BE([NotNull] void* ptr, int value) + { + *(int*) ptr = IsLittleEndian ? ByteSwap32(value) : value; + } + + /// Store a 32-bit integer in an in-memory buffer that must hold a value in Big-Endian ordering (also known as Network Order) + /// Memory address of a 4-byte location + /// Logical value to store in the buffer + /// (ptr, 0x12345678) => ptr[0] == 0x12, ptr[1] == 0x34, ptr[2] == 0x56, ptr[3] == 0x78 + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void StoreUInt32BE([NotNull] void* ptr, uint value) + { + *(uint*) ptr = IsLittleEndian ? ByteSwap32(value) : value; + } + + #endregion + + #region 64-bits + + /// Swap the order of the bytes in a 64-bit word + /// 0x0123456789ABCDEF + /// 0xEFCDAB8967452301 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static ulong ByteSwap64(ulong value) + { + const ulong MASK1_HI = 0xFF00FF00FF00FF00UL; + const ulong MASK1_LO = 0x00FF00FF00FF00FFUL; + const ulong MASK2_HI = 0xFFFF0000FFFF0000UL; + const ulong MASK2_LO = 0x0000FFFF0000FFFFUL; + + //PERF: do not remove the local 'tmp' variable! Reusing 'value' is 4X slower with RyuJit64 than introducing a tmp variable + ulong tmp = ((value << 8) & MASK1_HI) | ((value >> 8) & MASK1_LO); // swap pairs of 1 byte + tmp = ((tmp << 16) & MASK2_HI) | ((tmp >> 16) & MASK2_LO); // swap pairs of 2 bytes + return (tmp << 32) | (tmp >> 32); // swap pairs of 4 bytes + } + + /// Swap the order of the bytes in a 64-bit word + /// 0x0123456789ABCDEF + /// 0xEFCDAB8967452301 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static long ByteSwap64(long value) + { + const long MASK1_HI = unchecked((long) 0xFF00FF00FF00FF00L); + const long MASK1_LO = 0x00FF00FF00FF00FFL; + const long MASK2_HI = unchecked((long) 0xFFFF0000FFFF0000L); + const long MASK2_LO = 0x0000FFFF0000FFFFL; + + //PERF: do not remove the local 'tmp' variable! Reusing 'value' is 4X slower with RyuJit64 than introducing a tmp variable + long tmp = ((value << 8) & MASK1_HI) | ((value >> 8) & MASK1_LO); // swap pairs of 1 byte + tmp = ((tmp << 16) & MASK2_HI) | ((tmp >> 16) & MASK2_LO); // swap pairs of 2 bytes + return (tmp << 32) | ((tmp >> 32) & 0xFFFFFFFFL); // swap pairs of 4 bytes + } + + /// Load a 64-bit integer from an in-memory buffer that holds a value in Little-Endian ordering (also known as Host Order) + /// Memory address of an 8-byte location + /// Logical value in host order + /// ([ 0xEF, 0xCD, 0xAB, 0x89, 0x67, 0x456, 0x23, 0x01) => 0x0123456789ABCDEF + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static long LoadInt64LE([NotNull] void* ptr) + { + return IsLittleEndian ? *(long*) ptr : ByteSwap64(*(long*) ptr); + } + + /// Load a 64-bit integer from an in-memory buffer that holds a value in Little-Endian ordering (also known as Host Order) + /// Memory address of an 8-byte location + /// Logical value in host order + /// ([ 0xEF, 0xCD, 0xAB, 0x89, 0x67, 0x456, 0x23, 0x01) => 0x0123456789ABCDEF + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static ulong LoadUInt64LE([NotNull] void* ptr) + { + return IsLittleEndian ? *(ulong*) ptr : ByteSwap64(*(ulong*) ptr); + } + + /// Store a 64-bit integer in an in-memory buffer that must hold a value in Little-Endian ordering (also known as Host Order) + /// Memory address of an 8-byte location + /// Logical value to store in the buffer + /// (0x0123456789ABCDEF) => ptr[0] == 0xEF, ptr[1] == 0xCD, ptr[2] == 0xAB, ptr[3] == 0x89, ..., ptr[7] == 0x01 + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void StoreInt64LE([NotNull] void* ptr, long value) + { + *(long*) ptr = IsLittleEndian ? value : ByteSwap64(value); + } + + /// Store a 64-bit integer in an in-memory buffer that must hold a value in Little-Endian ordering (also known as Host Order) + /// Memory address of an 8-byte location + /// Logical value to store in the buffer + /// (0x0123456789ABCDEF) => ptr[0] == 0xEF, ptr[1] == 0xCD, ptr[2] == 0xAB, ptr[3] == 0x89, ..., ptr[7] == 0x01 + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void StoreUInt64LE([NotNull] void* ptr, ulong value) + { + *(ulong*) ptr = IsLittleEndian ? value : ByteSwap64(value); + } + + /// Load a 64-bit integer from an in-memory buffer that holds a value in Big-Endian ordering (also known as Network Order) + /// Memory address of an 8-byte location + /// Logical value in host order + /// ([ 0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF) => 0x0123456789ABCDEF + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static long LoadInt64BE([NotNull] void* ptr) + { + return IsLittleEndian ? ByteSwap64(*(long*) ptr) : *(long*) ptr; + } + + /// Load a 64-bit integer from an in-memory buffer that holds a value in Big-Endian ordering (also known as Network Order) + /// Memory address of an 8-byte location + /// Logical value in host order + /// ([ 0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF) => 0x0123456789ABCDEF + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static ulong LoadUInt64BE([NotNull] void* ptr) + { + return IsLittleEndian ? ByteSwap64(*(ulong*) ptr) : *(ulong*) ptr; + } + + /// Store a 64-bit integer in an in-memory buffer that must hold a value in Big-Endian ordering (also known as Network Order) + /// Memory address of an 8-byte location + /// Logical value to store in the buffer + /// (ptr, 0x0123456789ABCDEF) => ptr[0] == 0x01, ptr[1] == 0x23, ptr[2] == 0x45, ptr[3] == 0x67, ..., ptr[7] == 0xEF + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void StoreInt64BE([NotNull] void* ptr, long value) + { + *(long*) ptr = IsLittleEndian ? ByteSwap64(value) : value; + } + + /// Store a 64-bit integer in an in-memory buffer that must hold a value in Big-Endian ordering (also known as Network Order) + /// Memory address of an 8-byte location + /// Logical value to store in the buffer + /// (ptr, 0x0123456789ABCDEF) => ptr[0] == 0x01, ptr[1] == 0x23, ptr[2] == 0x45, ptr[3] == 0x67, ..., ptr[7] == 0xEF + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void StoreUInt64BE([NotNull] void* ptr, ulong value) + { + *(ulong*) ptr = IsLittleEndian ? ByteSwap64(value) : value; + } + + #endregion + +#if EXPECT_LITTLE_ENDIAN_HOST + #pragma warning restore 162 + // ReSharper restore UnreachableCode + // ReSharper restore ConditionIsAlwaysTrueOrFalse +#endif + + #endregion + + #region Fixed-Size Encoding + + // Plain old encoding where 32-bit values are stored using 4 bytes, 64-bit values are stored using 8 bytes, etc... + // Methods without suffix use Little-Endian, while methods with 'BE' suffix uses Big Endian. + + #region 16-bit + + /// Append a fixed size 16-bit number to the output buffer, using little-endian ordering + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteFixed16Unsafe([NotNull] byte* cursor, ushort value) + { + Contract.Requires(cursor != null); + StoreUInt16LE((ushort*) cursor, value); + return cursor + 2; + } + + /// Append a fixed size 16-bit number to the output buffer, using little-endian ordering + /// This method DOES perform bound checking! Caller must ensure that the buffer has enough capacity + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteFixed16([NotNull] byte* cursor, [NotNull] byte* stop, ushort value) + { + Contract.Requires(cursor != null & stop != null); + if (cursor + 2 > stop) throw Errors.BufferOutOfBound(); + StoreUInt16LE((ushort*) cursor, value); + return cursor + 2; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static ushort ReadFixed16([NotNull] byte* p) + { + return LoadUInt16LE((ushort*) p); + } + + [NotNull, Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* ReadFixed16([NotNull] byte* p, out ushort value) + { + value = LoadUInt16LE((ushort*) p); + return p + 2; + } + + /// Append a fixed size 16-bit number to the output buffer, using little-endian ordering + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteFixed16BEUnsafe([NotNull] byte* cursor, ushort value) + { + Contract.Requires(cursor != null); + StoreUInt16BE((ushort*) cursor, value); + return cursor + 2; + } + + /// Append a fixed size 16-bit number to the output buffer, using little-endian ordering + /// This method DOES perform bound checking! Caller must ensure that the buffer has enough capacity + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteFixed16BE([NotNull] byte* cursor, [NotNull] byte* stop, ushort value) + { + Contract.Requires(cursor != null && stop != null); + if (cursor + 2 > stop) throw Errors.BufferOutOfBound(); + StoreUInt16BE((ushort*) cursor, value); + return cursor + 2; + } + + /// Write a 16-bit zero + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteZeroFixed16([NotNull] byte* cursor) + { + // this does not care about LE or BE + *((ushort*)cursor) = 0; + return cursor + 2; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static ushort ReadFixed16BE([NotNull] byte* p) + { + return LoadUInt16BE((ushort*) p); + } + + [NotNull, Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* ReadFixed16BE([NotNull] byte* p, out ushort value) + { + value = LoadUInt16BE((ushort*) p); + return p + 2; + } + + #endregion + + #region 32-bits + + /// Append a fixed size 32-bit number to the output buffer, using little-endian ordering + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteFixed32Unsafe([NotNull] byte* cursor, uint value) + { + Contract.Requires(cursor != null); + StoreUInt32LE((uint*) cursor, value); + return cursor + 4; + } + + /// Append a fixed size 32-bit number to the output buffer, using little-endian ordering + /// This method DOES perform bound checking! Caller must ensure that the buffer has enough capacity + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteFixed32([NotNull] byte* cursor, [NotNull] byte* stop, uint value) + { + Contract.Requires(cursor != null && stop != null); + if (cursor + 4 > stop) throw Errors.BufferOutOfBound(); + StoreUInt32LE((uint*) cursor, value); + return cursor + 4; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static uint ReadFixed32([NotNull] byte* p) + { + return LoadUInt32LE((uint*) p); + } + + [NotNull, Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* ReadFixed32([NotNull] byte* p, out uint value) + { + value = LoadUInt32LE((uint*) p); + return p + 4; + } + + /// Append a fixed size 32-bit number to the output buffer, using little-endian ordering + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteFixed32BEUnsafe([NotNull] byte* cursor, uint value) + { + Contract.Requires(cursor != null); + StoreUInt32BE((uint*) cursor, value); + return cursor + 4; + } + + /// Append a fixed size 32-bit number to the output buffer, using little-endian ordering + /// This method DOES perform bound checking! Caller must ensure that the buffer has enough capacity + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteFixed32BE([NotNull] byte* cursor, [NotNull] byte* stop, uint value) + { + Contract.Requires(cursor != null && stop != null); + if (cursor + 4 > stop) throw Errors.BufferOutOfBound(); + StoreUInt32BE((uint*) cursor, value); + return cursor + 4; + } + + /// Write a 32-bit zero + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteZeroFixed32([NotNull] byte* cursor) + { + // this does not care about LE or BE + *((uint*)cursor) = 0; + return cursor + 4; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static uint ReadFixed32BE([NotNull] byte* p) + { + return LoadUInt32BE((uint*) p); + } + + [NotNull, Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* ReadFixed32BE([NotNull] byte* p, out uint value) + { + value = LoadUInt32BE((uint*) p); + return p + 4; + } + + #endregion + + #region 64-bits + + /// Append a fixed size 64-bit number to the output buffer, using little-endian ordering + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteFixed64Unsafe([NotNull] byte* cursor, ulong value) + { + Contract.Requires(cursor != null); + StoreUInt64LE((ulong*) cursor, value); + return cursor + 8; + } + + /// Append a fixed size 64-bit number to the output buffer, using little-endian ordering + /// This method DOES perform bound checking! Caller must ensure that the buffer has enough capacity + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteFixed64([NotNull] byte* cursor, [NotNull] byte* stop, ulong value) + { + Contract.Requires(cursor != null && stop != null); + if (cursor + 8 > stop) throw Errors.BufferOutOfBound(); + StoreUInt64LE((ulong*) cursor, value); + return cursor + 8; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static ulong ReadFixed64([NotNull] byte* p) + { + return LoadUInt64LE((ulong*) p); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* ReadFixed64([NotNull] byte* p, out ulong value) + { + value = LoadUInt64LE((ulong*) p); + return p + 8; + } + + /// Append a fixed size 64-bit number to the output buffer, using little-endian ordering + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteFixed64BEUnsafe([NotNull] byte* cursor, ulong value) + { + Contract.Requires(cursor != null); + StoreUInt64BE((ulong*) cursor, value); + return cursor + 8; + } + + /// Append a fixed size 64-bit number to the output buffer, using little-endian ordering + /// This method DOES perform bound checking! Caller must ensure that the buffer has enough capacity + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteFixed64BE([NotNull] byte* cursor, [NotNull] byte* stop, ulong value) + { + Contract.Requires(cursor != null && stop != null); + if (cursor + 8 > stop) throw Errors.BufferOutOfBound(); + StoreUInt64BE((ulong*) cursor, value); + return cursor + 8; + } + + /// Write a 64-bit zero + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteZeroFixed64([NotNull] byte* cursor) + { + // this does not care about LE or BE + *((ulong*)cursor) = 0; + return cursor + 8; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static ulong ReadFixed64BE([NotNull] byte* p) + { + return LoadUInt64BE((ulong*) p); + } + + [NotNull, Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* ReadFixed64BE([NotNull] byte* p, out ulong value) + { + value = LoadUInt64BE((ulong*) p); + return p + 8; + } + + #endregion + + #endregion + + #region Compact Unordered Encoding... + + // Simple encoding where each integer is stored using the smallest number of bytes possible. + // The encoded result does preserve the value ordering, and the caller needs to remember the result size in order to decode the value from a stream. + // Values from 0 to 0xFF will use 1 byte, values from 0x100 for 0xFFFF will use two bytes, and so on. + + /// Return the minimum number of bytes that hold the bits set (1) in a 32-bit unsigned integer + /// Number that needs to be encoded + /// Number of bytes needed (1-4) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static uint SizeOfCompact16(ushort value) + { + return value <= 0xFF ? 1U : 2U; + } + + /// Return the minimum number of bytes that hold the bits set (1) in a 32-bit unsigned integer + /// Number that needs to be encoded + /// Number of bytes needed (1-4) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static uint SizeOfCompact32(uint value) + { + return value <= 0xFF ? 1U : SizeOfCompact32Slow(value); + } + + [Pure, MethodImpl(MethodImplOptions.NoInlining)] + private static uint SizeOfCompact32Slow(uint value) + { + // value is already known to be >= 256 + if (value < (1U << 16)) return 2; + if (value < (1U << 24)) return 3; + return 4; + } + + /// Return the minimum number of bytes that hold the bits set (1) in a 64-bit unsigned integer + /// Number that needs to be encoded + /// Number of bytes needed (1-8) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static uint SizeOfCompact64(ulong value) + { + return value <= 0xFF ? 1U : SizeOfCompact64Slow(value); + } + + [Pure, MethodImpl(MethodImplOptions.NoInlining)] + private static uint SizeOfCompact64Slow(ulong value) + { + // value is already known to be >= 256 + if (value < (1UL << 16)) return 2; + if (value < (1UL << 24)) return 3; + if (value < (1UL << 32)) return 4; + if (value < (1UL << 40)) return 5; + if (value < (1UL << 48)) return 6; + if (value < (1UL << 56)) return 7; + return 8; + } + + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteCompact16Unsafe([NotNull] byte* ptr, ushort value) + { + Contract.Requires(ptr != null); + if (value <= 0xFF) + { + *ptr = (byte) value; + return ptr + 1; + } + + StoreUInt16LE((ushort*) ptr, value); + return ptr + 2; + } + + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteCompact16BEUnsafe([NotNull] byte* ptr, ushort value) + { + Contract.Requires(ptr != null); + if (value <= 0xFF) + { + *ptr = (byte) value; + return ptr + 1; + } + + StoreUInt16BE((ushort*) ptr, value); + return ptr + 2; + } + + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteCompact32Unsafe([NotNull] byte* ptr, uint value) + { + Contract.Requires(ptr != null); + if (value <= 0xFF) + { + ptr[0] = (byte) value; + return ptr + 1; + } + return WriteCompact32UnsafeSlow(ptr, value); + } + + [NotNull] + private static byte* WriteCompact32UnsafeSlow([NotNull] byte* ptr, uint value) + { + if (value <= 0xFFFF) + { + StoreUInt16LE((ushort*) ptr, (ushort) value); + return ptr + 2; + } + + if (value <= 0xFFFFFF) + { + StoreUInt16LE((ushort*) ptr, (ushort) value); + ptr[2] = (byte) (value >> 16); + return ptr + 3; + } + + StoreUInt32LE((uint*) ptr, value); + return ptr + 4; + } + + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteCompact32BEUnsafe([NotNull] byte* ptr, uint value) + { + Contract.Requires(ptr != null); + if (value <= 0xFF) + { + ptr[0] = (byte) value; + return ptr + 1; + } + return WriteCompact32BEUnsafeSlow(ptr, value); + } + + [NotNull] + private static byte* WriteCompact32BEUnsafeSlow([NotNull] byte* ptr, uint value) + { + if (value <= 0xFFFF) + { + StoreUInt16BE((ushort*) ptr, (ushort) value); + return ptr + 2; + } + + if (value <= 0xFFFFFF) + { + ptr[0] = (byte) (value >> 16); + StoreUInt16BE((ushort*) (ptr + 1), (ushort) value); + return ptr + 3; + } + + StoreUInt32BE((uint*) ptr, value); + return ptr + 4; + } + + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteCompact64Unsafe([NotNull] byte* ptr, ulong value) + { + Contract.Requires(ptr != null); + if (value <= 0xFF) + { // 1 byte + ptr[0] = (byte) value; + return ptr + 1; + } + + if (value >= 0x100000000000000) + { // 8 bytes + StoreUInt64LE((ulong*) ptr, value); + return ptr + 8; + } + + return WriteCompact64UnsafeSlow(ptr, value); + } + + [NotNull] + private static byte* WriteCompact64UnsafeSlow([NotNull] byte* ptr, ulong value) + { + if (value <= 0xFFFFFFFF) + { // 2 .. 4 bytes + + if (value >= 0x1000000) + { + // 4 bytes + StoreUInt32LE((uint*) ptr, (uint) value); + return ptr + 4; + } + + StoreUInt16LE((ushort*) ptr, (ushort) value); + + if (value <= 0xFFFF) + { // 2 bytes + return ptr + 2; + } + + // 3 bytes + ptr[2] = (byte) (value >> 16); + return ptr + 3; + } + else + { // 5 .. 7 bytes + StoreUInt32LE((uint*) ptr, (uint) value); + + if (value <= 0xFFFFFFFFFF) + { // 5 bytes + ptr[4] = (byte) (value >> 32); + return ptr + 5; + } + + if (value <= 0xFFFFFFFFFFFF) + { // 6 bytes + StoreUInt16LE((ushort*) (ptr + 4), (ushort) (value >> 32)); + return ptr + 6; + } + + // 7 bytes + Contract.Assert(value <= 0xFFFFFFFFFFFFFF); + StoreUInt16LE((ushort*) (ptr + 4), (ushort) (value >> 32)); + ptr[6] = (byte) (value >> 48); + return ptr + 7; + } + } + + [NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteCompact64BEUnsafe([NotNull] byte* ptr, ulong value) + { + Contract.Requires(ptr != null); + if (value <= 0xFF) + { // 1 byte + ptr[0] = (byte) value; + return ptr + 1; + } + + if (value >= 0x100000000000000) + { // 8 bytes + StoreUInt64BE((ulong*) ptr, value); + return ptr + 8; + } + + return WriteCompact64BEUnsafeSlow(ptr, value); + } + + [NotNull] + private static byte* WriteCompact64BEUnsafeSlow([NotNull] byte* ptr, ulong value) + { + if (value <= 0xFFFFFFFF) + { // 2 .. 4 bytes + + if (value >= 0x1000000) + { + // 4 bytes + StoreUInt32BE((uint*) ptr, (uint) value); + return ptr + 4; + } + + + if (value <= 0xFFFF) + { // 2 bytes + StoreUInt16BE((ushort*) ptr, (ushort) value); + return ptr + 2; + } + + // 3 bytes + StoreUInt16BE((ushort*) ptr, (ushort) (value >> 8)); + ptr[2] = (byte) value; + return ptr + 3; + } + else + { // 5 .. 7 bytes + + if (value <= 0xFFFFFFFFFF) + { // 5 bytes + StoreUInt32BE((uint*) ptr, (uint) (value >> 8)); + ptr[4] = (byte) value; + return ptr + 5; + } + + if (value <= 0xFFFFFFFFFFFF) + { // 6 bytes + StoreUInt32BE((uint*) ptr, (uint) (value >> 16)); + StoreUInt16BE((ushort*) (ptr + 4), (ushort) value); + return ptr + 6; + } + + // 7 bytes + Contract.Assert(value <= 0xFFFFFFFFFFFFFF); + StoreUInt32BE((uint*) ptr, (uint) (value >> 24)); + StoreUInt16BE((ushort*) (ptr + 4), (ushort) (value >> 8)); + ptr[6] = (byte) value; + return ptr + 7; + } + } + + #endregion + + #region Compact Ordered Encoding... + + // Specialized encoding to store counters (integers) using as few bytes as possible, but with the ordering preserved when using lexicographical order, i.e: Encoded(-1) < Encoded(0) < Encoded(42) < Encoded(12345678) + // + // There are two variantes: Unsigned and Signed which encodes either positive values (ie: sizes, count, ...) or negatives/values (integers, deltas, coordinates, ...) + + #region Unsigned + + // The signed variant uses the 3 highest bits to encode the number of extra bytes needed to store the value. + // - The 5 lowest bits of the start byte are the 5 highest bits of the encoded value + // - Each additional byte stores the next 8 bits until the last byte that stores the lowest 8 bits. + // - To prevent multiple ways of encoding the same value (ex: 0 can be stored as '00' or '20 00' or '04 00 00'), and preserve the ordering guarantees, only the smallest form is legal + // - Only values between 0 and 2^61 -1 can be encoded that way! (values >= 2^60 are NOT SUPPORTED). + // - 4 bytes can encode up to 2^29-1 (~ sizes up to 512 MB), 8 bytes up to 2^61-1 (~ sizes up to 2 Exabytes) + // + // WIRE FORMAT: BBBNNNNN (NNNNNNNN ...) + // + // MIN MAX SIZE WIRE FORMAT = VALUE + // 0 31 1 byte 000AAAAA = b_AAAAA (5 bits) + // 32 (1<<13)-1 2 bytes 001AAAAA BBBBBBBB = b_AAAAA_BBBBBBBB (13 bits) + // (1<<13) (1<<21)-1 3 bytes 010AAAAA BBBBBBBB CCCCCCCC = b_AAAAA_BBBBBBBB_CCCCCCCC (21 bits) + // ... + // (1<<53) (1<<61)-1 8 bytes 111AAAAA BBBBBBBB CCCCCCCC DDDDDDDD EEEEEEEE FFFFFFFF GGGGGGGG = b_AAAAA_BBBBBBBB_CCCCCCCC_DDDDDDDD_EEEEEEEE_FFFFFFFF_GGGGGGGG (61 bits) + // + // Examples: + // - 0 => b_000_00000 => (1) '00' + // - 1 => b_000_00001 => (1) '01' + // - 31 => b_000_11111 => (1) '1F' + // - 32 => b_001_00000_00100000 => (2) '20 20' + // - 123 => b_001_00000_01111011 => (2) '20 7B' + // - 1234 => b_001_00100_11010010 => (2) '24 D2' + // - 12345 => b_010_00000_00110000_00111001 => (3) '40 30 39' + // - 2^16-1 => b_010_00000_11111111_11111111 => (3) '40 FF FF' + // - 2^16 => b_010_00001_00000000_00000000 => (3) '41 00 00' + // - 2^21-1 => b_010_11111_11111111_11111111 => (3) '5F FF FF' + // - 2^21 => b_011_00000_00100000_00000000_00000000 => (4) '60 20 00 00' + // - 2^29-1 => b_011_11111_11111111_11111111_11111111 => (4) '7F FF FF FF' + // - 2^29 => b_100_00000_00100000_00000000_00000000_00000000 => (5) '80 20 00 00 00' + // - 2^31-1 => b_100_00000_01111111_11111111_11111111_11111111 => (5) '80 7F FF FF FF' + // - 2^32-1 => b_100_00000_11111111_11111111_11111111_11111111 => (5) '80 FF FF FF FF' + // - 2^32 => b_100_00001_00000000_00000000_00000000_00000000 => (5) '81 00 00 00 00' + // - 2^61-1 => b_111_11111_11111111_11111111_11111111_11111111_11111111_11111111_11111111 => (8) 'FF FF FF FF FF FF FF FF' + + private const int OCU_LEN0 = 0 << 5; + private const int OCU_LEN1 = 1 << 5; + private const int OCU_LEN2 = 2 << 5; + private const int OCU_LEN3 = 3 << 5; + private const int OCU_LEN4 = 4 << 5; + private const int OCU_LEN5 = 5 << 5; + private const int OCU_LEN6 = 6 << 5; + private const int OCU_LEN7 = 7 << 5; + private const int OCU_BITMAK = (1 << 5) - 1; + private const uint OCU_MAX0 = (1U << 5) - 1; + private const uint OCU_MAX1 = (1U << (5 + 8)) - 1; + private const uint OCU_MAX2 = (1U << (5 + 8 * 2)) - 1; + private const uint OCU_MAX3 = (1U << (5 + 8 * 3)) - 1; + private const ulong OCU_MAX4 = (1UL << (5 + 8 * 4)) - 1; + private const ulong OCU_MAX5 = (1UL << (5 + 8 * 5)) - 1; + private const ulong OCU_MAX6 = (1UL << (5 + 8 * 6)) - 1; + private const ulong OCU_MAX7 = (1UL << (5 + 8 * 7)) - 1; + + + /// Return the size (in bytes) that a 32-bit counter value would need with the Compact Order Unsigned encoding + /// Number that needs to be encoded + /// Number of bytes needed (1-5) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int SizeOfOrderedUInt32(uint value) + { + return value <= OCU_MAX0 ? 1 + : value <= OCU_MAX1 ? 2 + : value <= OCU_MAX2 ? 3 + : value <= OCU_MAX3 ? 4 + : 5; + } + + /// Return the size (in bytes) that a 64-bit counter value would need with the Compact Order Unsigned encoding + /// Number that needs to be encoded, between 0 and 2^60-1 + /// Number of bytes needed (1-8), or 0 if the number would overflow (2^60 or greater) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int SizeOfOrderedUInt64(ulong value) + { + return value <= OCU_MAX0 ? 1 + : value <= OCU_MAX1 ? 2 + : value <= OCU_MAX2 ? 3 + : value <= OCU_MAX3 ? 4 + : value <= OCU_MAX4 ? 5 + : value <= OCU_MAX5 ? 6 + : value <= OCU_MAX6 ? 7 + : value <= OCU_MAX7 ? 8 + : 0; // this would throw! + } + + /// Append an unsigned 32-bit counter value using a compact ordered encoding + /// Pointer to the next free byte in the buffer + /// Positive counter value + /// Pointer updated with the number of bytes written + /// Will write between 1 and 5 bytes + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteOrderedUInt32Unsafe([NotNull] byte* cursor, uint value) + { + if (value <= OCU_MAX0) + { // < 32 + *cursor = (byte) (OCU_LEN0 | value); + return cursor + 1; + } + if (value <= OCU_MAX1) + { // < 8 KB + cursor[0] = (byte) (OCU_LEN1 | (value >> 8)); + cursor[1] = (byte) (value); + return cursor + 2; + } + return WriteOrderedUInt32UnsafeSlow(cursor, value); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + private static byte* WriteOrderedUInt32UnsafeSlow([NotNull] byte* cursor, uint value) + { + if (value <= OCU_MAX2) + { // < 2 MB + cursor[0] = (byte)(OCU_LEN2 | (value >> 16)); + cursor[1] = (byte)(value >> 8); + cursor[2] = (byte)(value); + return cursor + 3; + } + if (value <= OCU_MAX3) + { // < 512 MB + cursor[0] = (byte)(OCU_LEN3 | (value >> 24)); + cursor[1] = (byte)(value >> 16); + cursor[2] = (byte)(value >> 8); + cursor[3] = (byte)(value); + return cursor + 4; + } + cursor[0] = OCU_LEN4; // we waste a byte for values >= 512MB, which is unfortunate... + cursor[1] = (byte)(value >> 24); + cursor[2] = (byte)(value >> 16); + cursor[3] = (byte)(value >> 8); + cursor[4] = (byte)(value); + return cursor + 5; + } + + /// Append an unsigned 64-bit counter value (up to 2^61-1) using the Compact Ordered Unsigned encoding + /// Pointer to the next free byte in the buffer + /// Positive counter value that must be between 0 and 2^61 - 1 (2,305,843,009,213,693,951 or 0x1FFFFFFFFFFFFFFF) + /// Pointer updated with the number of bytes written + /// Will write between 1 and 8 bytes + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static byte* WriteOrderedUInt64Unsafe([NotNull] byte* cursor, ulong value) + { + return value <= uint.MaxValue ? WriteOrderedUInt32Unsafe(cursor, (uint) value) : WriteOrderedUInt64UnsafeSlow(cursor, value); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + private static byte* WriteOrderedUInt64UnsafeSlow([NotNull] byte* cursor, ulong value) + { + if (value <= OCU_MAX4) + { + cursor[0] = (byte)(OCU_LEN4 | (value >> 32)); + cursor[1] = (byte)(value >> 24); + cursor[2] = (byte)(value >> 16); + cursor[3] = (byte)(value >> 8); + cursor[4] = (byte)(value); + return cursor + 5; + } + if (value <= OCU_MAX5) + { + cursor[0] = (byte)(OCU_LEN5 | (value >> 40)); + cursor[1] = (byte)(value >> 32); + cursor[2] = (byte)(value >> 24); + cursor[3] = (byte)(value >> 16); + cursor[4] = (byte)(value >> 8); + cursor[5] = (byte)(value); + return cursor + 6; + } + if (value <= OCU_MAX6) + { + cursor[0] = (byte)(OCU_LEN6 | (value >> 48)); + cursor[1] = (byte)(value >> 40); + cursor[2] = (byte)(value >> 32); + cursor[3] = (byte)(value >> 24); + cursor[4] = (byte)(value >> 16); + cursor[5] = (byte)(value >> 8); + cursor[6] = (byte)(value); + return cursor + 7; + } + + if (value <= OCU_MAX7) + { + cursor[0] = (byte) (OCU_LEN7 | (value >> 56)); + cursor[1] = (byte) (value >> 48); + cursor[2] = (byte) (value >> 40); + cursor[3] = (byte) (value >> 32); + cursor[4] = (byte) (value >> 24); + cursor[5] = (byte) (value >> 16); + cursor[6] = (byte) (value >> 8); + cursor[7] = (byte) (value); + return cursor + 8; + } + + throw new ArgumentOutOfRangeException(nameof(value), value, "Value must be less then 2^60"); + } + + /// Read an unsigned 32-bit counter value encoded using the Compact Ordered Unsigned encoding + /// + /// + /// + public static byte* ReadOrderedUInt32Unsafe(byte* cursor, out uint value) + { + uint start = cursor[0]; + switch (start >> 5) + { + case 0: + value = (start & OCU_BITMAK); + return cursor + 1; + case 1: + value = ((start & OCU_BITMAK) << 8) | ((uint) cursor[1]); + return cursor + 2; + case 2: + value = ((start & OCU_BITMAK) << 16) | ((uint) cursor[1] << 8) | ((uint) cursor[2]); + return cursor + 3; + case 3: + value = ((start & OCU_BITMAK) << 24) | ((uint)cursor[1] << 16) | ((uint)cursor[2] << 8) | (uint)cursor[3]; + return cursor + 4; + case 4: + // start bits MUST be 0 (else, there is an overflow) + if ((start & OCU_BITMAK) != 0) throw new InvalidDataException(); //TODO: message? + value = ((uint)cursor[1] << 24) | ((uint)cursor[2] << 16) | ((uint)cursor[3] << 8) | (uint)cursor[4]; + return cursor + 5; + default: + // overflow? + throw new InvalidDataException(); //TODO: message? + } + } + + /// Read an unsigned 64-bit counter value encoded using the Compact Ordered Unsigned encoding + /// + /// + /// + public static byte* ReadOrderedUInt64Unsafe(byte* cursor, out ulong value) + { + ulong start = cursor[0]; + switch (start >> 5) + { + case 0: + value = (start & OCU_BITMAK); + return cursor + 1; + case 1: + value = ((start & OCU_BITMAK) << 8) | ((ulong)cursor[1]); + return cursor + 2; + case 2: + value = ((start & OCU_BITMAK) << 16) | ((ulong)cursor[1] << 8) | ((ulong)cursor[2]); + return cursor + 3; + case 3: + value = ((start & OCU_BITMAK) << 24) | ((ulong)cursor[1] << 16) | ((ulong)cursor[2] << 8) | ((ulong)cursor[3]); + return cursor + 4; + case 4: + value = ((start & OCU_BITMAK) << 32) | ((ulong)cursor[1] << 24) | ((ulong)cursor[2] << 16) | ((ulong)cursor[3] << 8) | ((ulong)cursor[4]); + return cursor + 5; + case 5: + value = ((start & OCU_BITMAK) << 40) | ((ulong)cursor[1] << 32) | ((ulong)cursor[2] << 24) | ((ulong)cursor[3] << 16) | ((ulong)cursor[4] << 8) | ((ulong)cursor[5]); + return cursor + 6; + case 6: + value = ((start & OCU_BITMAK) << 48) | ((ulong)cursor[1] << 40) | ((ulong)cursor[2] << 32) | ((ulong)cursor[3] << 24) | ((ulong)cursor[4] << 16) | ((ulong)cursor[5] << 8) | ((ulong)cursor[6]); + return cursor + 7; + default: // 7 + value = ((start & OCU_BITMAK) << 56) | ((ulong)cursor[1] << 48) | ((ulong)cursor[2] << 40) | ((ulong)cursor[3] << 32) | ((ulong)cursor[4] << 24) | ((ulong)cursor[5] << 16) | ((ulong)cursor[6] << 8) | ((ulong)cursor[7]); + return cursor + 8; + } + } + + #endregion + + #region Signed + + // The signed variant is very similar, except that the start byte uses an additional "Sign" bit (inverted) + // - The hight bit (bit 7) of the start byte is 0 for negative numbers, and 1 for positive numbers + // - The next 3 bits (bits 6-4) of the start byte encode the number of extra bytes following + // - The last 4 bits (bit 3-0) contain the 4 highest bits of the encoded value + // - Each additional byte stores the next 8 bits until the last byte that stores the lowest 8 bits. + // - For negative values, the number of bytes required is computed by using Abs(X)-1, but the original negative value is used (after masking) + // i.e.: -1 becomes -(-1)-1 = 0 (which fits in 4 bits), and will be encoded as (-1) & 0xF = b_0_000_1111 = '0F', and 0 will be encoded as b_1_000_0000 = '10' (which is indeeded sorted after '0F') + // - Only values between -2^60 and 2^60-1 can be encoded that way! (values < -2^60 or >= 2^60 are NOT SUPPORTED) + + // WIRE FORMAT: SBBBNNNN (NNNNNNNN ...) + // - if S = 0, X is negative: BBB = 7 - exta bytes, NNN...N = 2's complement of X + // - if S = 1, X is positive: BBB = exta bytes, NNN...N = X + // + // MIN MAX SIZE WIRE FORMAT = VALUE + // -(1<<60) -(1<<52)-1 8 bytes 1111AAAA BBBBBBBB CCCCCCCC DDDDDDDD EEEEEEEE FFFFFFFF GGGGGGGG = b_AAAA_BBBBBBBB_CCCCCCCC_DDDDDDDD_EEEEEEEE_FFFFFFFF_GGGGGGGG (60 bits) + // ... + // -(1<<12) -17 2 bytes 1001AAAA BBBBBBBB = ~(b_AAAA_BBBBBBBB - 1) (12 bits) + // -16 -1 1 byte 0000AAAA = ~(b_AAAA - 1) (4 bits) + // 0 +15 1 byte 1000AAAA = b_AAAA (4 bits) + // +16 (1<<12)-1 2 bytes 1001AAAA BBBBBBBB = b_AAAA_BBBBBBBB (12 bits) + // ... + // (1<<52) (1<<60)-1 8 bytes 1111AAAA BBBBBBBB CCCCCCCC DDDDDDDD EEEEEEEE FFFFFFFF GGGGGGGG = b_AAAA_BBBBBBBB_CCCCCCCC_DDDDDDDD_EEEEEEEE_FFFFFFFF_GGGGGGGG (60 bits) + // + // Examples: + // - 0 => b_1_000_0000 => (1) '80' + // - 1 => b_1_000_0001 => (1) '81' + // - 15 => b_1_000_1111 => (1) '8F' + // - 16 => b_1_001_0000_00010000 => (2) '90 10' + // - 123 => b_1_001_0000_01111011 => (2) '90 7B' + // - 1234 => b_1_001_0100_11010010 => (2) '94 D2' + // - 12345 => b_1_010_0000_00110000_00111001 => (3) 'A0 30 39' + // - 2^16-1 => b_1_010_0001_00000000_00000000 => (3) 'A1 00 00' + // - 2^20-1 => b_1_010_1111_11111111_11111111 => (3) 'AF FF FF' + // - 2^21 => b_1_011_0000_00100000_00000000_00000000 => (4) 'B0 20 00 00' + // - 2^28-1 => b_1_011_1111_11111111_11111111_11111111 => (4) 'BF FF FF FF' + // - 2^32-1 => b_1_100_0000_11111111_11111111_11111111_11111111 => (4) 'C0 FF FF FF FF' + // - 2^32 => b_1_100_0001_00000000_00000000_00000000_00000000 => (4) 'C1 00 00 00 00' + // - 2^60-1 => b_1_111_1111_11111111_11111111_11111111_11111111_11111111_11111111_11111111 => (8) 'FF FF FF FF FF FF FF FF' + + //TODO! + + #endregion + + #endregion + + /// Convert a char in range '0-9A-Fa-f' into a value between 0 and 15 + /// Result is unspecified if char is not in the valid range! + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int Nibble(char c) + { + // The lowest 4 bits almost give us the result we want: + // - '0'..'9': (c & 15) = 0..9; need to add 0 to get correct result + // - 'A'..'F': (c & 15) = 1..6; need to add 9 to get correct reuslt + // - 'a'..'f': (c & 15) = 1..6; need to add 9 to get correct reuslt + // We just need to tweak the value to have a bit that is different between digits and letters, and use that bit to compute the final offset of 0 or 9 + return (c & 15) + (((((c + 16) & ~64) >> 4) & 1) * 9); + } + + /// Convert values between 0 and 15 into a character from in range '0-9A-F' + /// Only the lower 4 bits are used, so the caller does not need to mask out the upper bits! + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static char Nibble(int x) + { + // We first tweak the value in order to have a bit that is different between 0-9 and 10-15. + // Then, we use that bit to compute the final offset that will end up adding +48 or +55 + // 0-9 : X + 54 + 1 - (1 x 7) = X + 48 = '0'-'9' + // 10-15 : X + 54 + 1 - (0 x 7) = X + 55 = 'A'-'F' + int tmp = ((x & 0xF) + 54); + return (char)(tmp + 1 - ((tmp & 32) >> 5) * 7); + //REVIEW: '* 7' could probably be replaced with some shift/add trickery... (but maybe the JIT will do it for us?) + } + + #region String Helpers... + + /// Check if a string only contains characters between 0 and 127 (ASCII) + [Pure] + public static bool IsAsciiString([NotNull] string value) + { + Contract.Requires(value != null); + fixed (char* pChars = value) + { + return IsAsciiString(pChars, value.Length); + } + } + + /// Check if a section of a string only contains characters between 0 and 127 (ASCII) + [Pure] + public static bool IsAsciiString([NotNull] string value, int offset, int count) + { + Contract.Requires(value != null && offset >= 0 && count <= 0 && offset + count <= value.Length); + if (count == 0) return true; + fixed (char* pChars = value) + { + return IsAsciiString(pChars + offset, count); + } + } + +#if ENABLE_SPAN + /// Check if a section of a string only contains characters between 0 and 127 (ASCII) + [Pure] + public static bool IsAsciiString(ReadOnlySpan value) + { + if (value.Length == 0) return true; + fixed (char* pChars = &MemoryMarshal.GetReference(value)) + { + return IsAsciiString(pChars, value.Length); + } + } +#endif + + /// Check if a string only contains characters between 0 and 127 (ASCII) + [Pure] + public static bool IsAsciiString([NotNull] char* pChars, int numChars) + { + Contract.Requires(pChars != null); + // we test if each char has at least one bit set above bit 7, ie: (char & 0xFF80) != 0 + // to speed things up, we check multiple chars at a time + + #region Performance Notes... + /* + The following loop is optimized to produce the best x64 code with Deskop CLR RyuJitJIT (x64) that is currently in 4.6.2 (preview) + => if the JIT changes, we may need to revisit! + + Currently, the x64 code generated for the main unrolled loop looks like this: + + MAIN_LOOP: + // rax = ptr + // rcx = end + (01) cmp rax,rcx // while (ptr < end) + (02) jae TAIL // => bypass for small strings <= 7 chars + + LOOP: + (03) mov r8,qword ptr [rax] // ulong x1 = *(ulong*) (ptr + 0); + (04) mov r9,qword ptr [rax+8] // ulong x2 = *(ulong*) (ptr + 8); + (05) mov r10,qword ptr [rax+10h] // ulong x3 = *(ulong*) (ptr + 8); + (06) mov r11,qword ptr [rax+18h] // ulong x4 = *(ulong*) (ptr + 12); + (07) mov rsi,0FF80FF80FF80FF80h + (08) and r8,rsi // x1 &= MASK4; + (09) and r9,rsi // x2 &= MASK4; + (10) and r10,rsi // x3 &= MASK4; + (11) and r11,rsi // x4 &= MASK4; + (12) add rax,20h // ptr += 16; + (13) or r8,r9 // (x1 != 0 || x2 != 0) + (14) mov r9,r10 + (15) or r9,r11 // (x3 != 0 || x4 != 0) + (16) or r8,r9 // (...) || (...) + (17) test r8,r8 // if (...) ... + (18) jne INVALID // ... goto INVALID; + (19) cmp rax,rcx // while (ptr < end) + (20) jb LOOP // ... (continue) + + TAIL: + // continue for size <= 7 + + Commentary: + - At 3 to 6 we parallelize the reads from memory into 4 register + - At 8 to 11 we perform the ANDs again in a way that can be //ized by the CPU + - At 12, we pre-increment the pointer, so that the value is ready at 19 + - At 13 to 16, the whole if expression is optimized into a 3 or in cascade. + - note: doing "(... || ...) || (... || ...)" is ~5% faster than "(... || ... || ... || ...)" on my CPU + - At 18, we jump to the "INVALID" case, instead of doing "return false", because current JIT produce better code that way + - note: if we "return false" here, the JIT adds an additional JMP inside the loop, which if ~15% slower on my CPU + */ + #endregion + + const ulong MASK_4_CHARS = 0xFF80FF80FF80FF80UL; + const uint MASK_2_CHARS = 0xFF80FF80U; + const ushort MASK_1_CHAR = 0xFF80; + + char* ptr = pChars; + char* end = ptr + (numChars & ~15); + while (ptr < end) + { + ulong x1 = *(ulong*) (ptr + 0); + ulong x2 = *(ulong*) (ptr + 4); + ulong x3 = *(ulong*) (ptr + 8); + ulong x4 = *(ulong*) (ptr + 12); + // combine all the bits together in stages + x1 |= x2; + x3 |= x4; + x1 |= x3; + // drop the LS 7 bits + x1 &= MASK_4_CHARS; + ptr += 16; + if (x1 != 0) goto INVALID; + } + + if ((numChars & 8) != 0) + { + ulong x1 = *(ulong*) (ptr + 0); + ulong x2 = *(ulong*) (ptr + 4); + x1 = x1 | x2; + x1 &= MASK_4_CHARS; + ptr += 8; + if (x1 != 0) goto INVALID; + } + + if ((numChars & 4) != 0) + { + ulong x1 = *(ulong*) ptr & MASK_4_CHARS; + if (x1 != 0) goto INVALID; + ptr += 4; + } + if ((numChars & 2) != 0) + { + uint x1 = *(uint*) ptr & MASK_2_CHARS; + if (x1 != 0) goto INVALID; + ptr += 2; + } + // check the last character, if present + return (numChars & 1) == 0 || (*ptr & MASK_1_CHAR) == 0; + + INVALID: + // there is one character that is >= 0x80 in the string + return false; + } + + /// Check if a section of byte array only contains bytes between 0 and 127 (7-bit ASCII) + /// False if at least one byte has bit 7 set to 1; otherwise, True. + [Pure] + public static bool IsAsciiBytes([NotNull] byte[] array, int offset, int count) + { + Contract.Requires(array != null); + fixed (byte* pBytes = &array[offset]) + { + return IsAsciiBytes(pBytes, checked((uint) count)); + } + } + + /// Check if a memory region only contains bytes between 0 and 127 (7-bit ASCII) + /// False if at least one byte has bit 7 set to 1; otherwise, True. + [Pure] + public static bool IsAsciiBytes([NotNull] byte* buffer, uint count) + { + Contract.Requires(buffer != null); + + // we test if each byte has at least one bit set above bit 7, ie: (byte & 0x80) != 0 + // to speed things up, we check multiple bytes at a time + + const ulong MASK_8 = 0x8080808080808080UL; + const uint MASK_4 = 0x80808080U; + const int MASK_2 = 0x8080; + const int MASK_1 = 0x80; + + byte* end = buffer + (count & ~31); + byte* ptr = buffer; + while (ptr < end) + { + ulong x1 = *((ulong*) ptr + 0); + ulong x2 = *((ulong*) ptr + 1); + ulong x3 = *((ulong*) ptr + 2); + ulong x4 = *((ulong*) ptr + 3); + x1 |= x2; + x3 |= x4; + x1 |= x3; + x1 &= MASK_8; + ptr += 32; + if (x1 != 0) goto INVALID; + } + + if ((count & 16) != 0) + { + ulong x1 = *((ulong*) ptr + 0); + ulong x2 = *((ulong*) ptr + 1); + x1 |= x2; + x1 &= MASK_8; + ptr += 16; + if (x1 != 0) goto INVALID; + } + if ((count & 8) != 0) + { + if ((*((ulong*) ptr) & MASK_8) != 0) goto INVALID; + ptr += 8; + } + if ((count & 4) != 0) + { + if ((*((uint*) ptr) & MASK_4) != 0) goto INVALID; + ptr += 4; + } + if ((count & 2) != 0) + { + if ((*((ushort*) ptr) & MASK_2) != 0) goto INVALID; + ptr += 2; + } + if ((count & 1) != 0) + { + return *ptr < MASK_1; + } + // there is one character that is >= 0x80 in the string + return true; + INVALID: + return false; + } + + /// Convert a byte stream into a .NET string by expanding each byte to 16 bits characters + /// Equivalent .NET string + /// + /// This is safe to use with 7-bit ASCII strings. + /// You should *NOT* use this if the buffer contains ANSI or UTF-8 encoded strings! + /// If the bufer contains bytes that are >= 0x80, they will be mapped to the equivalent Unicode code points (0x80..0xFF), WITHOUT converting them using current ANSI code page. + /// + /// + /// ConvertToByteString(new byte[] { 'A', 'B', 'C' }, 0, 3) => "ABC" + /// ConvertToByteString(new byte[] { 255, 'A', 'B', 'C' }, 0, 4) => "\xffABC" + /// ConvertToByteString(UTF8("é"), ...) => "é" (len=2, 'C3 A9') + /// + [Pure, NotNull] + public static string ConvertToByteString([NotNull] byte[] array, int offset, int count) + { + Contract.Requires(array != null && offset >= 0 && count >= 0 && offset + count <= array.Length); + + // fast allocate a new empty string that will be mutated in-place. + //note: this calls String::CtorCharCount() which in turn calls FastAllocateString(..), but will not fill the buffer with 0s if 'char' == '\0' + string str = new string('\0', count); + + fixed (byte* ptr = &array[offset]) + fixed (char* pChars = str) + { + ConvertToByteStringUnsafe(pChars, ptr, (uint) count); + return str; + } + } + + /// Convert a byte stream into a .NET string by expanding each byte to 16 bits characters + /// Equivalent .NET string + /// + /// This is safe to use with 7-bit ASCII strings. + /// You should *NOT* use this if the buffer contains ANSI or UTF-8 encoded strings! + /// If the bufer contains bytes that are >= 0x80, they will be mapped to the equivalent Unicode code points (0x80..0xFF), WITHOUT converting them using current ANSI code page. + /// + [Pure, NotNull] + public static string ConvertToByteString(byte* pBytes, uint count) + { + Contract.Requires(pBytes != null); + + if (count == 0) return String.Empty; + + // fast allocate a new empty string that will be mutated in-place. + //note: this calls String::CtorCharCount() which in turn calls FastAllocateString(..), but will not fill the buffer with 0s if 'char' == '\0' + string str = new string('\0', checked((int) count)); + fixed (char* pChars = str) + { + ConvertToByteStringUnsafe(pChars, pBytes, count); + return str; + } + } + + internal static void ConvertToByteStringUnsafe(char* pChars, byte* pBytes, uint count) + { + byte* inp = pBytes; + char* outp = pChars; + + // unroll 4 characters at a time + byte* inend = pBytes + (count & ~3); + while (inp < inend) + { + //this loop has been verified to produce the best x64 code I could get out from the DesktopCLR JIT (4.6.x) + long x = *(long*) inp; + // split + long y1 = x & 0xFF; + long y2 = x & 0xFF00; + long y3 = x & 0xFF0000; + long y4 = x & 0xFF000000; + // shift + y2 <<= 8; + y3 <<= 16; + y4 <<= 24; + // merge + y1 |= y2; + y3 |= y4; + y1 |= y3; + // output + *(long*) outp = y1; + inp += 4; + outp += 4; + } + // complete the tail + + if ((count & 2) != 0) + { // two chars + int x = *(ushort*) inp; + // split + int y1 = x & 0xFF; + int y2 = x & 0xFF00; + // shift + y2 <<= 8; + // merge + y2 |= y1; + // output + *(int*) outp = y2; + inp += 2; + outp += 2; + } + + if ((count & 1) != 0) + { // one char + *outp = (char) *inp; + } + } + #endregion + + [SuppressUnmanagedCodeSecurity] + [SecurityCritical] + internal static class NativeMethods + { + // C/C++ .NET + // --------------------------------- + // void* byte* (or IntPtr) + // size_t UIntPtr (or IntPtr) + // int int + // char byte + + /// Compare characters in two buffers. + /// First buffer. + /// Second buffer. + /// Number of bytes to compare. + /// The return value indicates the relationship between the buffers. + [DllImport("msvcrt.dll", CallingConvention = CallingConvention.Cdecl, SetLastError = false)] + [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] + public static extern int memcmp(byte* buf1, byte* buf2, UIntPtr count); + + /// Moves one buffer to another. + /// Destination object. + /// Source object. + /// Number of bytes to copy. + /// The value of dest. + /// Copies count bytes from src to dest. If some regions of the source area and the destination overlap, both functions ensure that the original source bytes in the overlapping region are copied before being overwritten. + [DllImport("msvcrt.dll", CallingConvention = CallingConvention.Cdecl, SetLastError = false)] + [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] + public static extern byte* memmove(byte* dest, byte* src, UIntPtr count); + + /// Sets buffers to a specified character. + /// Pointer to destination + /// Character to set + /// Number of characters + /// memset returns the value of dest. + /// The memset function sets the first count bytes of dest to the character c. + [DllImport("msvcrt.dll", CallingConvention = CallingConvention.Cdecl, SetLastError = false)] + public static extern byte* memset(byte* dest, int ch, UIntPtr count); + + } + + [DebuggerNonUserCode] + internal static class Errors + { + + /// Reject an invalid slice by throw an error with the appropriate diagnostic message. + /// If the corresponding slice is invalid (offset or count out of bounds, array is null, ...) + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception MalformedBuffer(byte* bytes, long count) + { + if (count < 0) return BufferCountNotNeg(); + if (count > 0) + { + if (bytes == null) return BufferArrayNotNull(); + } + // maybe it's Lupus ? + return BufferInvalid(); + } + + /// Reject an invalid slice by throw an error with the appropriate diagnostic message. + /// If the corresponding slice is invalid (offset or count out of bounds, array is null, ...) + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception MalformedBuffer(byte[] array, long offset, long count) + { + if (offset < 0) return BufferOffsetNotNeg(); + if (count < 0) return BufferCountNotNeg(); + if (count > 0) + { + if (array == null) return BufferArrayNotNull(); + if (offset + count > array.Length) return BufferArrayToSmall(); + } + // maybe it's Lupus ? + return BufferInvalid(); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static OverflowException PowerOfTwoOverflow() + { + return new OverflowException("Cannot compute the next power of two because the value would overflow."); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static OverflowException PowerOfTwoNegative() + { + return new OverflowException("Cannot compute the next power of two for negative numbers."); + } + + /// Reject an attempt to write past the end of a buffer + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static InvalidOperationException BufferOutOfBound() + { + return new InvalidOperationException("Attempt to write outside of the buffer, or at a position that would overflow past the end."); + } + + [ContractAnnotation("=> halt"), MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void ThrowOffsetOutsideSlice() + { + throw OffsetOutsideSlice(); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception OffsetOutsideSlice() + { + // ReSharper disable once NotResolvedInText + return ThrowHelper.ArgumentOutOfRangeException("offset", "Offset is outside the bounds of the slice."); + } + + [ContractAnnotation("=> halt"), MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void ThrowIndexOutOfBound(int index) + { + throw IndexOutOfBound(index); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static IndexOutOfRangeException IndexOutOfBound(int index) + { + return new IndexOutOfRangeException("Index is outside the slice"); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static FormatException SliceOffsetNotNeg() + { + return new FormatException("The specified slice has a negative offset, which is not legal. This may be a side effect of memory corruption."); + } + + [ContractAnnotation("=> halt"), MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void ThrowSliceCountNotNeg() + { + throw SliceCountNotNeg(); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static FormatException SliceCountNotNeg() + { + return new FormatException("The specified slice has a negative size, which is not legal. This may be a side effect of memory corruption."); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static FormatException SliceBufferNotNull() + { + return new FormatException("The specified slice is missing its underlying buffer."); + } + + [ContractAnnotation("=> halt"), MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void ThrowSliceBufferTooSmall() + { + throw SliceBufferTooSmall(); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static FormatException SliceBufferTooSmall() + { + return new FormatException("The specified slice is larger than its underlying buffer."); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static FormatException SliceInvalid() + { + return new FormatException("The specified slice is invalid."); + } + + [ContractAnnotation("=>halt"), MethodImpl(MethodImplOptions.NoInlining)] + public static T ThrowSliceTooLargeForConversion(int size) + { + throw new FormatException($"Cannot convert slice to value of type {typeof(T).Name} because it is larger than {size} bytes."); + } + + [ContractAnnotation("=>halt"), MethodImpl(MethodImplOptions.NoInlining)] + public static T ThrowSliceSizeInvalidForConversion(int size) + { + throw new FormatException($"Cannot convert slice of size {size} to value of type {typeof(T).Name}."); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static ArgumentException BufferOffsetNotNeg() + { + // ReSharper disable once NotResolvedInText + return new ArgumentException("The specified segment has a negative offset, which is not legal. This may be a side effect of memory corruption.", "offset"); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static ArgumentException BufferCountNotNeg() + { + // ReSharper disable once NotResolvedInText + return new ArgumentException("The specified segment has a negative size, which is not legal. This may be a side effect of memory corruption.", "count"); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static ArgumentException BufferArrayNotNull() + { + // ReSharper disable once NotResolvedInText + return new ArgumentException("The specified segment is missing its underlying buffer.", "array"); + } + + [ContractAnnotation("=> halt"), MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void ThrowBufferArrayToSmall() + { + throw BufferArrayToSmall(); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static ArgumentException BufferArrayToSmall() + { + // ReSharper disable once NotResolvedInText + return new ArgumentException("The specified segment is larger than its underlying buffer.", "count"); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static ArgumentException BufferInvalid() + { + // ReSharper disable once NotResolvedInText + return new ArgumentException("The specified segment is invalid."); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static FormatException VarIntOverflow() + { + return new FormatException("Malformed Varint would overflow the expected range"); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static FormatException VarIntTruncated() + { + return new FormatException("Malformed Varint seems to be truncated"); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static FormatException VarBytesTruncated() + { + return new FormatException("Malformed VarBytes seems to be truncated"); + } + + } + } + +} diff --git a/FoundationDB.Client/Shared/README.MD b/FoundationDB.Client/Shared/README.MD new file mode 100644 index 000000000..2d9659951 --- /dev/null +++ b/FoundationDB.Client/Shared/README.MD @@ -0,0 +1 @@ +This folder contains all the shared framework libraries and tools. \ No newline at end of file diff --git a/FoundationDB.Client/Shared/StringConverters.cs b/FoundationDB.Client/Shared/StringConverters.cs new file mode 100644 index 000000000..101ea1379 --- /dev/null +++ b/FoundationDB.Client/Shared/StringConverters.cs @@ -0,0 +1,763 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Serialization +{ + using System; + using System.Globalization; + using System.Runtime.CompilerServices; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; + + internal static class StringConverters + { + #region Numbers... + + //NOTE: ces mthodes ont t importes de KTL/Sioux + //REVIEW: je ne sais pas si c'est la meilleure place pour ce code? + + /// Table de lookup pour les nombres entre 0 et 99, afin d'viter d'allouer une string inutilement + //note: vu que ce sont des literals, ils sont interned automatiquement + private static readonly string[] SmallNumbers = new string[100] + { + "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", + "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", + "20", "21", "22", "23", "24", "25", "26", "27", "28", "29", + "30", "31", "32", "33", "34", "35", "36", "37", "38", "39", + "40", "41", "42", "43", "44", "45", "46", "47", "48", "49", + "50", "51", "52", "53", "54", "55", "56", "57", "58", "59", + "60", "61", "62", "63", "64", "65", "66", "67", "68", "69", + "70", "71", "72", "73", "74", "75", "76", "77", "78", "79", + "80", "81", "82", "83", "84", "85", "86", "87", "88", "89", + "90", "91", "92", "93", "94", "95", "96", "97", "98", "99", + }; + + /// Convertit un entier en chane, de manire optimise + /// Valeure entire convertir + /// Version chane + /// Cette fonction essaye d'vite le plus possibles des allocations mmoire + [Pure, NotNull] + public static string ToString(int value) + { + var cache = StringConverters.SmallNumbers; + return value >= 0 && value < cache.Length ? cache[value] : value.ToString(NumberFormatInfo.InvariantInfo); + } + + /// Convertit un entier en chane, de manire optimise + /// Valeure entire convertir + /// Version chane + /// Cette fonction essaye d'vite le plus possibles des allocations mmoire + [Pure, NotNull] + public static string ToString(uint value) + { + var cache = StringConverters.SmallNumbers; + return value < cache.Length ? cache[value] : value.ToString(NumberFormatInfo.InvariantInfo); + } + + /// Convertit un entier en chane, de manire optimise + /// Valeure entire convertir + /// Version chane + /// Cette fonction essaye d'vite le plus possibles des allocations mmoire + [Pure, NotNull] + public static string ToString(long value) + { + var cache = StringConverters.SmallNumbers; + return value >= 0 && value < cache.Length ? cache[(int) value] : value.ToString(NumberFormatInfo.InvariantInfo); + } + + /// Convertit un entier en chane, de manire optimise + /// Valeure entire convertir + /// Version chane + /// Cette fonction essaye d'vite le plus possibles des allocations mmoire + [Pure, NotNull] + public static string ToString(ulong value) + { + var cache = StringConverters.SmallNumbers; + return value < (ulong) cache.Length ? cache[(int) value] : value.ToString(NumberFormatInfo.InvariantInfo); + } + + /// Convertit un dcimal en chane, de manire optimise + /// Valeure dcimale convertir + /// Version chane + /// Cette fonction essaye d'vite le plus possibles des allocations mmoire + [Pure, NotNull] + public static string ToString(float value) + { + long x = unchecked((long) value); + // ReSharper disable once CompareOfFloatsByEqualityOperator + return x != value + ? value.ToString("R", CultureInfo.InvariantCulture) + : (x >= 0 && x < StringConverters.SmallNumbers.Length ? StringConverters.SmallNumbers[(int) x] : x.ToString(NumberFormatInfo.InvariantInfo)); + } + + /// Convertit un dcimal en chane, de manire optimise + /// Valeure dcimale convertir + /// Version chane + /// Cette fonction essaye d'vite le plus possibles des allocations mmoire + [Pure, NotNull] + public static string ToString(double value) + { + long x = unchecked((long)value); + // ReSharper disable once CompareOfFloatsByEqualityOperator + return x != value + ? value.ToString("R", CultureInfo.InvariantCulture) + : (x >= 0 && x < StringConverters.SmallNumbers.Length ? StringConverters.SmallNumbers[(int)x] : x.ToString(NumberFormatInfo.InvariantInfo)); + } + + /// Convertit une chane en boolen + /// Chane de texte (ex: "true") + /// Valeur par dfaut si vide ou invalide + /// Valeur boolenne correspondant (ex: true) ou valeur par dfaut + /// Les valeurs pour true sont "true", "yes", "on", "1". + /// Les valeurs pour false sont "false", "no", "off", "0", ou tout le reste + /// null et chane vide sont considrs comme false + /// + [Pure] + public static bool ToBoolean(string value, bool dflt) + { + if (string.IsNullOrEmpty(value)) return dflt; + char c = value[0]; + if (c == 't' || c == 'T') return true; + if (c == 'f' || c == 'F') return false; + if (c == 'y' || c == 'Y') return true; + if (c == 'n' || c == 'N') return false; + if ((c == 'o' || c == 'O') && value.Length > 1) { c = value[1]; return c == 'n' || c == 'N'; } + if (c == '1') return true; + if (c == '0') return false; + return dflt; + } + + /// Convertit une chane en boolen + /// Chane de texte (ex: "true") + /// Valeur boolenne correspondant (ex: true) ou null + /// Les valeurs pour true sont "true", "yes", "on", "1". + /// Les valeurs pour false sont "false", "no", "off", "0" + /// + [Pure] + public static bool? ToBoolean(string value) + { + if (string.IsNullOrEmpty(value)) return null; + char c = value[0]; + if (c == 't' || c == 'T') return true; + if (c == 'f' || c == 'F') return false; + if (c == 'y' || c == 'Y') return true; + if (c == 'n' || c == 'N') return false; + if ((c == 'o' || c == 'O') && value.Length > 1) { c = value[1]; return c == 'n' || c == 'N'; } + if (c == '1') return true; + if (c == '0') return false; + return null; + } + + /// Convertit un entier jusqu'au prochain sparateur (ou fin de buffer). A utilis pour simuler un Split + /// Buffer de caractres + /// Offset courant dans le buffer + /// + /// Sparateur attendu entre les ints + /// Valeur par dfaut retourne si erreur + /// Rcupre le rsultat de la conversion + /// Rcupre la nouvelle position (aprs le sparateur) + /// true si int charg, false si erreur (plus de place, incorrect, ...) + /// Si buffer est null + public static unsafe bool FastTryGetInt([NotNull] char* buffer, int offset, int length, char separator, int defaultValue, out int result, out int newpos) + { + Contract.PointerNotNull(buffer, nameof(buffer)); + result = defaultValue; + newpos = offset; + if (offset < 0 || offset >= length) return false; // deja a la fin !! + + char c = buffer[offset]; + if (c == separator) { newpos = offset + 1; return false; } // avance quand mme le curseur + if (!char.IsDigit(c)) + { // c'est pas un nombre, va jusqu'au prochain sparateur + while (offset < length) + { + c = buffer[offset++]; + if (c == separator) break; + } + newpos = offset; + return false; // deja le separateur, ou pas un digit == WARNING: le curseur ne sera pas avanc! + } + int res = c - 48; + offset++; + // il y a au moins 1 digit, parcourt les suivants + while (offset < length) + { + c = buffer[offset++]; + if (c == separator) break; + if (!char.IsDigit(c)) + { // va jusqu'au prochain sparator + while (offset < length) + { + c = buffer[offset++]; + if (c == separator) break; + } + newpos = offset; + return false; + } + // accumule le digit + res = res * 10 + (c - 48); + } + + result = res; + newpos = offset; + return true; + } + + /// Convertit un entier jusqu'au prochain sparateur (ou fin de buffer). A utilis pour simuler un Split + /// Buffer de caractres + /// Offset courant dans le buffer + /// + /// Sparateur attendu entre les ints + /// Valeur par dfaut retourne si erreur + /// Rcupre le rsultat de la conversion + /// Rcupre la nouvelle position (aprs le sparateur) + /// true si int charg, false si erreur (plus de place, incorrect, ...) + /// Si buffer est null + public static unsafe bool FastTryGetLong([NotNull] char* buffer, int offset, int length, char separator, long defaultValue, out long result, out int newpos) + { + Contract.PointerNotNull(buffer, nameof(buffer)); + result = defaultValue; + newpos = offset; + if (offset < 0 || offset >= length) return false; // deja a la fin !! + + char c = buffer[offset]; + if (c == separator) { newpos = offset + 1; return false; } // avance quand mme le curseur + if (!char.IsDigit(c)) + { // c'est pas un nombre, va jusqu'au prochain sparateur + while (offset < length) + { + c = buffer[offset++]; + if (c == separator) break; + } + newpos = offset; + return false; // deja le separateur, ou pas un digit == WARNING: le curseur ne sera pas avanc! + } + int res = c - 48; + offset++; + // il y a au moins 1 digit, parcourt les suivants + while (offset < length) + { + c = buffer[offset++]; + if (c == separator) break; + if (!char.IsDigit(c)) + { // va jusqu'au prochain sparator + while (offset < length) + { + c = buffer[offset++]; + if (c == separator) break; + } + newpos = offset; + return false; + } + // accumule le digit + res = res * 10 + (c - 48); + } + + result = res; + newpos = offset; + return true; + } + + /// Convertit une chane en entier (int) + /// Chane de caractre (ex: "1234") + /// Valeur par dfaut si vide ou invalide + /// Entier correspondant ou valeur par dfaut si pb (ex: 1234) + [Pure] + public static int ToInt32(string value, int defaultValue) + { + if (string.IsNullOrEmpty(value)) return defaultValue; + // optimisation: si premier carac pas chiffre, exit + char c = value[0]; + if (value.Length == 1) return char.IsDigit(c) ? c - 48 : defaultValue; + if (!char.IsDigit(c) && c != '-' && c != '+' && c != ' ') return defaultValue; + return int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out int res) ? res : defaultValue; + } + + /// Convertit une chane en entier (int) + /// Chane de caractre (ex: "1234") + /// Entier correspondant ou null si pb (ex: 1234) + [Pure] + public static int? ToInt32(string value) + { + if (string.IsNullOrEmpty(value)) return default(int?); + // optimisation: si premier carac pas chiffre, exit + char c = value[0]; + if (value.Length == 1) return char.IsDigit(c) ? (c - 48) : default(int?); + if (!char.IsDigit(c) && c != '-' && c != '+' && c != ' ') return default(int?); + return int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out int res) ? res : default(int?); + } + + /// Convertit une chane en entier (long) + /// Chane de caractre (ex: "1234") + /// Valeur par dfaut si vide ou invalide + /// Entier correspondant ou valeur par dfaut si pb (ex: 1234) + [Pure] + public static long ToInt64(string value, long defaultValue) + { + if (string.IsNullOrEmpty(value)) return defaultValue; + // optimisation: si premier carac pas chiffre, exit + char c = value[0]; + if (value.Length == 1) return char.IsDigit(c) ? ((long) c - 48) : defaultValue; + if (!char.IsDigit(c) && c != '-' && c != '+' && c != ' ') return defaultValue; + return long.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out long res) ? res : defaultValue; + } + + /// Convertit une chane en entier (long) + /// Chane de caractre (ex: "1234") + /// Entier correspondant ou null si pb (ex: 1234) + [Pure] + public static long? ToInt64(string value) + { + if (string.IsNullOrEmpty(value)) return default(long?); + // optimisation: si premier carac pas chiffre, exit + char c = value[0]; + if (value.Length == 1) return char.IsDigit(c) ? ((long) c - 48) : default(long?); + if (!char.IsDigit(c) && c != '-' && c != '+' && c != ' ') return default(long?); + return long.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out long res) ? res : default(long?); + } + + /// Convertit une chaine de caractre en double, quelque soit la langue locale (utilise le '.' comme sparateur dcimal) + /// Chaine (ex: "1.0", "123.456e7") + /// Valeur par dfaut si problme de conversion ou null + /// Culture (par dfaut InvariantCulture) + /// Double correspondant + [Pure] + public static double ToDouble(string value, double defaultValue, IFormatProvider culture = null) + { + if (string.IsNullOrEmpty(value)) return defaultValue; + char c = value[0]; + if (!char.IsDigit(c) && c != '+' && c != '-' && c != '.' && c != ' ') return defaultValue; + if (culture == null) culture = CultureInfo.InvariantCulture; + if (culture == CultureInfo.InvariantCulture && value.IndexOf(',') >= 0) value = value.Replace(',', '.'); + return double.TryParse(value, NumberStyles.Float | NumberStyles.AllowThousands, culture, out double result) ? result : defaultValue; + } + + [Pure] + public static double? ToDouble(string value, IFormatProvider culture = null) + { + if (value == null) return default(double?); + double result = ToDouble(value, double.NaN, culture); + return double.IsNaN(result) ? default(double?) : result; + } + + /// Convertit une chaine de caractre en float, quelque soit la langue locale (utilise le '.' comme sparateur dcimal) + /// Chaine (ex: "1.0", "123.456e7") + /// Valeur par dfaut si problme de conversion ou null + /// Culture (par dfaut InvariantCulture) + /// Float correspondant + [Pure] + public static float ToSingle(string value, float defaultValue, IFormatProvider culture = null) + { + if (string.IsNullOrEmpty(value)) return defaultValue; + char c = value[0]; + if (!char.IsDigit(c) && c != '+' && c != '-' && c != '.' && c != ' ') return defaultValue; + if (culture == null) culture = CultureInfo.InvariantCulture; + if (culture == CultureInfo.InvariantCulture && value.IndexOf(',') >= 0) value = value.Replace(',', '.'); + return float.TryParse(value, NumberStyles.Float | NumberStyles.AllowThousands, culture, out float result) ? result : defaultValue; + } + + [Pure] + public static float? ToSingle(string value, IFormatProvider culture = null) + { + if (value == null) return default(float?); + float result = ToSingle(value, float.NaN, culture); + return double.IsNaN(result) ? default(float?) : result; + } + + /// Convertit une chaine de caractre en double, quelque soit la langue locale (utilise le '.' comme sparateur dcimal) + /// Chaine (ex: "1.0", "123.456e7") + /// Valeur par dfaut si problme de conversion ou null + /// Culture (par dfaut InvariantCulture) + /// Dcimal correspondant + [Pure] + public static decimal ToDecimal(string value, decimal defaultValue, IFormatProvider culture = null) + { + if (string.IsNullOrEmpty(value)) return defaultValue; + char c = value[0]; + if (!char.IsDigit(c) && c != '+' && c != '-' && c != '.' && c != ' ') return defaultValue; + if (culture == null) culture = CultureInfo.InvariantCulture; + if (culture == CultureInfo.InvariantCulture && value.IndexOf(',') >= 0) value = value.Replace(',', '.'); + return decimal.TryParse(value, NumberStyles.Float | NumberStyles.AllowThousands, culture, out decimal result) ? result : defaultValue; + } + + [Pure] + public static decimal? ToDecimal(string value, IFormatProvider culture = null) + { + if (string.IsNullOrEmpty(value)) return default(decimal?); + char c = value[0]; + if (!char.IsDigit(c) && c != '+' && c != '-' && c != '.' && c != ' ') return default(decimal?); + if (culture == null) culture = CultureInfo.InvariantCulture; + if (culture == CultureInfo.InvariantCulture && value.IndexOf(',') >= 0) value = value.Replace(',', '.'); + return decimal.TryParse(value, NumberStyles.Float | NumberStyles.AllowThousands, culture, out decimal result) ? result : default(decimal?); + } + + /// Convertit une chaine en DateTime + /// Date convertir + /// Valeur par dfaut + /// + /// Voir StringConverters.ParseDateTime() + [Pure] + public static DateTime ToDateTime(string value, DateTime defaultValue, CultureInfo culture = null) + { + return ParseDateTime(value, defaultValue, culture); + } + + /// Convertit une chaine en DateTime + /// Date convertir + /// + /// Voir StringConverters.ParseDateTime() + [Pure] + public static DateTime? ToDateTime(string value, CultureInfo culture = null) + { + if (string.IsNullOrEmpty(value)) return default(DateTime?); + DateTime result = ParseDateTime(value, DateTime.MaxValue, culture); + return result == DateTime.MaxValue ? default(DateTime?) : result; + } + + /// Convertit une chaine de caractres en GUID + /// Chaine (ex: "123456-789") + /// Valeur par dfaut si problme de conversion ou null + /// GUID correspondant + [Pure] + public static Guid ToGuid(string value, Guid defaultValue) + { + if (string.IsNullOrEmpty(value)) return defaultValue; + return Guid.TryParse(value, out Guid result) ? result : defaultValue; + } + + [Pure] + public static Guid? ToGuid(string value) + { + if (string.IsNullOrEmpty(value)) return default(Guid?); + return Guid.TryParse(value, out Guid result) ? result : default(Guid?); + } + + /// Convertit une chaine de caractres en Enum + /// Type de l'Enum + /// Chaine (ex: "Red", "2", ...) + /// Valeur par dfaut si problme de conversion ou null + /// Valeur de l'enum correspondante + /// Accepte les valeures sous forme textuelle ou numrique, case insensitive + [Pure] + public static TEnum ToEnum(string value, TEnum defaultValue) + where TEnum : struct, IComparable, IConvertible, IFormattable + { + if (string.IsNullOrEmpty(value)) return defaultValue; + return Enum.TryParse(value, true, out TEnum result) ? result : defaultValue; + } + + [Pure] + public static TEnum? ToEnum(string value) + where TEnum : struct, IComparable, IConvertible, IFormattable + { + if (string.IsNullOrEmpty(value)) return default(TEnum?); + return Enum.TryParse(value, true, out TEnum result) ? result : default(TEnum?); + } + + #endregion + + #region Dates... + + /// Convertit une date en une chaine de caractres au format "YYYYMMDDHHMMSS" + /// Date formater + /// Date formate sur 14 caractres au format YYYYMMDDHHMMSS + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string ToDateTimeString(DateTime date) + { + //REVIEW: PERF: faire une version optimise? + return date.ToString("yyyyMMddHHmmss", CultureInfo.InvariantCulture); + } + + /// Convertit une date en une chaine de caractres au format "AAAAMMJJ" + /// Date formater + /// Date formate sur 8 caractres au format AAAAMMJJ + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string ToDateString(DateTime date) + { + //REVIEW: PERF: faire une version optimise? + return date.ToString("yyyyMMdd", CultureInfo.InvariantCulture); + } + + /// Convertit un heure en une chaine de caractres au format "HHMMSS" + /// Date formater + /// Heure formate sur 6 caractres au format HHMMSS + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string ToTimeString(DateTime date) + { + //REVIEW: PERF: faire une version optimise? + return date.ToString("HHmmss", CultureInfo.InvariantCulture); + } + + /// Convertit une date en une chaine de caractres au format "yyyy-MM-dd HH:mm:ss" + /// Date convertir + /// Chaine au format "yyyy-MM-dd HH:mm:ss" + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string FormatDateTime(DateTime date) + { + //REVIEW: PERF: faire une version optimise? + return date.ToString("yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture); + } + + /// Convertit une date en une chaine de caractres au format "yyyy-MM-dd" + /// Date convertir + /// Chaine au format "yyyy-MM-dd" + [Pure, NotNull] + public static string FormatDate(DateTime date) + { + //REVIEW: PERF: faire une version optimise? + return date.ToString("yyyy-MM-dd", CultureInfo.InvariantCulture); + } + + /// Convertit une heure en une chaine de caractres au format "hh:mm:ss" + /// Heure convertir + /// Chaine au format "hh:mm:ss" + [Pure, NotNull] + public static string FormatTime(DateTime date) + { + //REVIEW: PERF: faire une version optimise? + return date.ToString("HH:mm:ss", CultureInfo.InvariantCulture); + } + + /// Convertit une chaine de caractre au format "YYYY", "YYYYMM", "YYYYMMDD" ou "YYYYMMDDHHMMSS" en DateTime + /// Chaine de caractres convertir + /// Objet DateTime correspondant, ou exception si incorrect + /// Si la date est incorrecte + [Pure] + public static DateTime ParseDateTime(string date) + { + return ParseDateTime(date, null); + } + + /// Convertit une chaine de caractre au format "YYYY", "YYYYMM", "YYYYMMDD" ou "YYYYMMDDHHMMSS" en DateTime + /// Chaine de caractres convertir + /// Culture (pour le format attendu) ou null + /// Objet DateTime correspondant, ou exception si incorrect + /// Si la date est incorrecte + [Pure] + public static DateTime ParseDateTime(string date, CultureInfo culture) + { + if (!TryParseDateTime(date, culture, out DateTime result, true)) throw FailInvalidDateFormat(); + return result; + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + private static Exception FailInvalidDateFormat() + { + // ReSharper disable once NotResolvedInText + return new ArgumentException("Invalid date format", "date"); + } + + /// Convertit une chaine de caractre au format "YYYY", "YYYYMM", "YYYYMMDD" ou "YYYYMMDDHHMMSS" en DateTime + /// Chaine de caractres convertir + /// Valeur par dfaut + /// Objet DateTime correspondant, ou dflt si date est null ou vide + [Pure] + public static DateTime ParseDateTime(string date, DateTime dflt) + { + if (string.IsNullOrEmpty(date)) return dflt; + if (!TryParseDateTime(date, null, out DateTime result, false)) return dflt; + return result; + } + + /// Convertit une chaine de caractre au format "YYYY", "YYYYMM", "YYYYMMDD" ou "YYYYMMDDHHMMSS" en DateTime + /// Chaine de caractres convertir + /// Valeur par dfaut + /// Culture (pour le format attendu) ou null + /// Objet DateTime correspondant, ou dflt si date est null ou vide + [Pure] + public static DateTime ParseDateTime(string date, DateTime dflt, CultureInfo culture) + { + if (!TryParseDateTime(date, culture, out DateTime result, false)) return dflt; + return result; + } + private static int ParseDateSegmentUnsafe(string source, int offset, int size) + { + // note: normalement le caller a dja valid les paramtres + int sum = source[offset++] - '0'; + if (sum < 0 || sum >= 10) return -1; // invalid first digit + while (--size > 0) + { + int d = source[offset++] - '0'; + if (d < 0 || d >= 10) return -1; // invalid digit! + sum = (sum * 10) + d; + } + return sum; + } + + /// Essayes de convertir une chaine de caratres au format "YYYY", "YYYYMM", "YYYYMMDD" ou "YYYYMMDDHHMMSS" en DateTime + /// Chaine de caractres convertir + /// Culture (pour le format attendu) ou null + /// Date convertie (ou DateTime.MinValue en cas de problme) + /// Si false, absorbe les exceptions ventuelles. Si true, laisse les s'chaper + /// True si la date est correcte, false dans les autres cas + [Pure] + public static bool TryParseDateTime(string date, CultureInfo culture, out DateTime result, bool throwsFail) + { + result = DateTime.MinValue; + + if (date == null) { if (throwsFail) throw new ArgumentNullException(nameof(date)); else return false; } + if (date.Length < 4) { if (throwsFail) throw new FormatException("Date '" + date + "' must be at least 4 characters long"); else return false; } + //if (throwsFail) throw new FormatException("Date '"+date+"' must contains only digits"); else return false; + try + { + if (char.IsDigit(date[0])) + { // commence par un chiffre, c'est peut etre un timestamp? + switch (date.Length) + { + case 4: + { // YYYY -> YYYY/01/01 00:00:00.000 + int y = ParseDateSegmentUnsafe(date, 0, 4); + if (y < 1 || y > 9999) break; + result = new DateTime(y, 1, 1); + return true; + } + case 6: + { // YYYYMM -> YYYY/MM/01 00:00:00.000 + int y = ParseDateSegmentUnsafe(date, 0, 4); + if (y < 1 || y > 9999) break; + int m = ParseDateSegmentUnsafe(date, 4, 2); + if (m < 1 || m > 12) break; + result = new DateTime(y, m, 1); + return true; + } + case 8: + { // YYYYMMDD -> YYYY/MM/DD 00:00:00.000 + int y = ParseDateSegmentUnsafe(date, 0, 4); + if (y < 1 || y > 9999) break; + int m = ParseDateSegmentUnsafe(date, 4, 2); + if (m < 1 || m > 12) break; + int d = ParseDateSegmentUnsafe(date, 6, 2); + if (d < 1 || d > 31) break; + result = new DateTime(y, m, d); + return true; + } + case 14: + { // YYYYMMDDHHMMSS -> YYYY/MM/DD HH:MM:SS.000 + int y = ParseDateSegmentUnsafe(date, 0, 4); + if (y < 1 || y > 9999) break; + int m = ParseDateSegmentUnsafe(date, 4, 2); + if (m < 1 || m > 12) break; + int d = ParseDateSegmentUnsafe(date, 6, 2); + if (d < 1 || d > 31) break; + int h = ParseDateSegmentUnsafe(date, 8, 2); + if (h < 0 || h > 23) break; + int n = ParseDateSegmentUnsafe(date, 10, 2); + if (n < 0 || n > 59) break; + int s = ParseDateSegmentUnsafe(date, 12, 2); + if (s < 0 || s > 59) break; + result = new DateTime(y, m, d, h, n, s); + return true; + } + case 17: + { // YYYYMMDDHHMMSSFFF -> YYYY/MM/DD HH:MM:SS.FFF + int y = ParseDateSegmentUnsafe(date, 0, 4); + if (y < 1 || y > 9999) break; + int m = ParseDateSegmentUnsafe(date, 4, 2); + if (m < 1 || m > 12) break; + int d = ParseDateSegmentUnsafe(date, 6, 2); + if (d < 1 || d > 31) break; + int h = ParseDateSegmentUnsafe(date, 8, 2); + if (h < 0 || h > 23) break; + int n = ParseDateSegmentUnsafe(date, 10, 2); + if (n < 0 || n > 59) break; + int s = ParseDateSegmentUnsafe(date, 12, 2); + if (s < 0 || s > 59) break; + int f = ParseDateSegmentUnsafe(date, 14, 3); + result = new DateTime(y, m, d, h, n, s, f); + return true; + } + } + } + else if (char.IsLetter(date[0])) + { // on va tenter un ParseExact ("Vendredi, 37 Trumaire 1789 3 heures moint le quart") + result = DateTime.ParseExact(date, new[] { "D", "F", "f" }, culture ?? CultureInfo.InvariantCulture, DateTimeStyles.None); + return true; + } + + // Je vais tenter le jackpot, mon cher Julien! + result = DateTime.Parse(date, culture ?? CultureInfo.InvariantCulture); + return true; + } + catch (FormatException) + { // Dommage! La cagnote est remise la fois prochaine... + if (throwsFail) throw; + return false; + } + catch (ArgumentOutOfRangeException) + { // Pb sur un DateTime avec des dates invalides (31 fvrier, ...) + if (throwsFail) throw; + return false; + } + } + + /// Convertit une heure "human friendly" en DateTime: "11","11h","11h00","11:00" -> {11:00:00.000} + /// Chaine contenant l'heure convertir + /// Object DateTime contenant l'heure. La partie "date" est fixe aujourd'hui + [Pure] + public static DateTime ParseTime([NotNull] string time) + { + Contract.NotNullOrEmpty(time, nameof(time)); + + time = time.ToLowerInvariant(); + + int hour; + int minute = 0; + int second = 0; + + int p = time.IndexOf('h'); + if (p > 0) + { + hour = System.Convert.ToInt16(time.Substring(0, p)); + if (p + 1 >= time.Length) + minute = 0; + else + minute = System.Convert.ToInt16(time.Substring(p + 1)); + } + else + { + p = time.IndexOf(':'); + if (p > 0) + { + hour = System.Convert.ToInt16(time.Substring(0, p)); + if (p + 1 >= time.Length) + minute = 0; + else + minute = System.Convert.ToInt16(time.Substring(p + 1)); + } + else + { + hour = System.Convert.ToInt16(time); + } + } + var d = DateTime.Today; + return new DateTime(d.Year, d.Month, d.Day, hour, minute, second, 0); + } + + #endregion + } +} diff --git a/FoundationDB.Client/Shared/ThrowHelper.cs b/FoundationDB.Client/Shared/ThrowHelper.cs new file mode 100644 index 000000000..a6099d00c --- /dev/null +++ b/FoundationDB.Client/Shared/ThrowHelper.cs @@ -0,0 +1,587 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Diagnostics.Contracts +{ + using JetBrains.Annotations; + using System; + using System.Diagnostics; + using System.Globalization; + using System.Reflection; + using System.Runtime.CompilerServices; + + [DebuggerNonUserCode] + internal static class ThrowHelper + { + + #region ArgumentNullException... + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception ArgumentNullException([InvokerParameterName] string paramName) + { + return new ArgumentNullException(paramName); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception ArgumentNullException([InvokerParameterName] string paramName, [NotNull] string message) + { + return new ArgumentNullException(paramName, message); + } + + [ContractAnnotation("=> halt")] + public static void ThrowArgumentNullException([InvokerParameterName] string paramName) + { + throw ArgumentNullException(paramName); + } + + [ContractAnnotation("=> halt")] + public static void ThrowArgumentNullException([InvokerParameterName] string paramName, [NotNull] string message) + { + throw ArgumentNullException(paramName, message); + } + + [ContractAnnotation("=> halt"), MethodImpl(MethodImplOptions.NoInlining)] + public static T ThrowArgumentNullException([InvokerParameterName] string paramName, string message = null) + { + throw message != null ? new ArgumentNullException(paramName, message) : new ArgumentNullException(paramName); + } + + #endregion + + #region ArgumentException... + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception ArgumentException([InvokerParameterName] string paramName, string message = null) + { + // oui, c'est inversé :) + return new ArgumentException(message, paramName); + } + + [Pure, NotNull, StringFormatMethod("message"), MethodImpl(MethodImplOptions.NoInlining)] + public static Exception ArgumentException([InvokerParameterName] string paramName, string message, object arg0) + { + // oui, c'est inversé :) + return new ArgumentException(string.Format(message, arg0), paramName); + } + + [Pure, NotNull, StringFormatMethod("message"), MethodImpl(MethodImplOptions.NoInlining)] + public static Exception ArgumentException([InvokerParameterName] string paramName, string message, object arg0, object arg1) + { + // oui, c'est inversé :) + return new ArgumentException(string.Format(message, arg0, arg1), paramName); + } + + [Pure, NotNull, StringFormatMethod("message"), MethodImpl(MethodImplOptions.NoInlining)] + public static Exception ArgumentException([InvokerParameterName] string paramName, string message, params object[] args) + { + // oui, c'est inversé :) + return new ArgumentException(string.Format(message, args), paramName); + } + + [ContractAnnotation("=> halt")] + public static void ThrowArgumentException([InvokerParameterName] string paramName, string message = null) + { + // oui, c'est inversé :) + throw ArgumentException(paramName, message); + } + + [ContractAnnotation("=> halt"), MethodImpl(MethodImplOptions.NoInlining)] + public static T ThrowArgumentException([InvokerParameterName] string paramName, string message = null) + { + // oui, c'est inversé :) + throw ArgumentException(paramName, message); + } + + #endregion + + #region ArgumentOutOfRangeException... + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception ArgumentOutOfRangeException([InvokerParameterName] string paramName, object actualValue, string message = null) + { + return new ArgumentOutOfRangeException(paramName, actualValue, message); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception ArgumentOutOfRangeException([InvokerParameterName, NotNull] string paramName) + { + return new ArgumentOutOfRangeException(paramName); + } + + [ContractAnnotation("=> halt")] + public static void ThrowArgumentOutOfRangeException() + { + // ReSharper disable once NotResolvedInText + throw ArgumentOutOfRangeException("index", "Index was out of range. Must be non-negative and less than the size of the collection."); + } + + [ContractAnnotation("=> halt")] + public static void ThrowArgumentOutOfRangeException([InvokerParameterName] string paramName) + { + throw ArgumentOutOfRangeException(paramName); + } + + [ContractAnnotation("=> halt")] + public static void ThrowArgumentOutOfRangeException([InvokerParameterName] string paramName, string message) + { + throw ArgumentOutOfRangeException(paramName, message); + } + + [ContractAnnotation("=> halt")] + public static void ThrowArgumentOutOfRangeException([InvokerParameterName] string paramName, object actualValue, string message) + { + throw ArgumentOutOfRangeException(paramName, actualValue, message); + } + + #endregion + + #region ObjectDisposedException... + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static ObjectDisposedException ObjectDisposedException(TDisposed disposed) + { + return new ObjectDisposedException(disposed.GetType().Name); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static ObjectDisposedException ObjectDisposedException(Type type) + { + return new ObjectDisposedException(type.Name); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static ObjectDisposedException ObjectDisposedException(Type type, string message) + { + return new ObjectDisposedException(type.Name, message); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static ObjectDisposedException ObjectDisposedException(TDisposed disposed, string message) + { + return new ObjectDisposedException(disposed.GetType().Name, message); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static ObjectDisposedException ObjectDisposedException(string message) + { + return new ObjectDisposedException(typeof(TDisposed).Name, message); + } + + [Pure, NotNull, StringFormatMethod("message"), MethodImpl(MethodImplOptions.NoInlining)] + public static ObjectDisposedException ObjectDisposedException(string message, object arg0) + { + return new ObjectDisposedException(typeof(TDisposed).Name, string.Format(CultureInfo.InvariantCulture, message, arg0)); + } + + [Pure, NotNull, StringFormatMethod("message"), MethodImpl(MethodImplOptions.NoInlining)] + public static ObjectDisposedException ObjectDisposedException(string message, params object[] args) + { + return new ObjectDisposedException(typeof(TDisposed).Name, string.Format(CultureInfo.InvariantCulture, message, args)); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static ObjectDisposedException ObjectDisposedException(string message, Exception innnerException) + { + return new ObjectDisposedException(message, innnerException); + } + + [ContractAnnotation("=> halt")] + public static void ThrowObjectDisposedException(Type type) + { + throw ObjectDisposedException(type); + } + + [ContractAnnotation("=> halt")] + public static void ThrowObjectDisposedException(string message, Exception innnerException) + { + throw ObjectDisposedException(message, innnerException); + } + + [ContractAnnotation("=> halt"), MethodImpl(MethodImplOptions.NoInlining)] //fix .NET < 4.5.2 + public static void ThrowObjectDisposedException(TDisposed disposed) + where TDisposed : IDisposable + { + throw ObjectDisposedException(disposed.GetType()); + } + + [ContractAnnotation("=> halt"), MethodImpl(MethodImplOptions.NoInlining)] //fix .NET < 4.5.2 + public static void ThrowObjectDisposedException(TDisposed disposed, string message) + where TDisposed : IDisposable + { + throw ObjectDisposedException(disposed.GetType(), message); + } + + [ContractAnnotation("=> halt"), StringFormatMethod("message"), MethodImpl(MethodImplOptions.NoInlining)] //fix .NET < 4.5.2 + public static void ThrowObjectDisposedException(TDisposed disposed, string message, object arg0) + where TDisposed : IDisposable + { + throw ObjectDisposedException(disposed.GetType(), string.Format(CultureInfo.InvariantCulture, message, arg0)); + } + + [ContractAnnotation("=> halt"), StringFormatMethod("message"), MethodImpl(MethodImplOptions.NoInlining)] //fix .NET < 4.5.2 + public static void ThrowObjectDisposedException(TDisposed disposed, string message, params object[] args) + where TDisposed : IDisposable + { + throw ObjectDisposedException(disposed.GetType(), string.Format(CultureInfo.InvariantCulture, message, args)); + } + + #endregion + + #region InvalidOperationException... + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static InvalidOperationException InvalidOperationException(string message) + { + return new InvalidOperationException(message); + } + + [Pure, NotNull, StringFormatMethod("message"), MethodImpl(MethodImplOptions.NoInlining)] + public static InvalidOperationException InvalidOperationException(string message, object arg0) + { + return new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, message, arg0)); + } + + [Pure, NotNull, StringFormatMethod("message"), MethodImpl(MethodImplOptions.NoInlining)] + public static InvalidOperationException InvalidOperationException(string message, object arg0, object arg1) + { + return new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, message, arg0, arg1)); + } + + [Pure, NotNull, StringFormatMethod("message"), MethodImpl(MethodImplOptions.NoInlining)] + public static InvalidOperationException InvalidOperationException(string message, params object[] args) + { + return new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, message, args)); + } + + [ContractAnnotation("=> halt")] + public static void ThrowInvalidOperationException(string message) + { + throw InvalidOperationException(message); + } + + [ContractAnnotation("=> halt"), StringFormatMethod("message")] + public static void ThrowInvalidOperationException(string message, object arg0) + { + throw InvalidOperationException(message, arg0); + } + + [ContractAnnotation("=> halt"), StringFormatMethod("message")] + public static void ThrowInvalidOperationException(string message, object arg0, object arg1) + { + throw InvalidOperationException(message, arg0, arg1); + } + + [ContractAnnotation("=> halt"), StringFormatMethod("message")] + public static void ThrowInvalidOperationException(string message, object arg0, object arg1, object arg2) + { + throw InvalidOperationException(message, arg0, arg1, arg2); + } + + [ContractAnnotation("=> halt"), StringFormatMethod("message")] + public static void ThrowInvalidOperationException(string message, params object[] args) + { + throw InvalidOperationException(message, args); + } + + [ContractAnnotation("=> halt"), StringFormatMethod("message"), MethodImpl(MethodImplOptions.NoInlining)] + public static T ThrowInvalidOperationException(string message) + { + throw InvalidOperationException(message); + } + + #endregion + + #region FormatException... + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static FormatException FormatException(string message) + { + return new FormatException(message); + } + + [Pure, NotNull, StringFormatMethod("message"), MethodImpl(MethodImplOptions.NoInlining)] + public static FormatException FormatException(string message, object arg0) + { + return new FormatException(String.Format(CultureInfo.InvariantCulture, message, arg0)); + } + + [Pure, NotNull, StringFormatMethod("message"), MethodImpl(MethodImplOptions.NoInlining)] + public static FormatException FormatException(string message, object arg0, object arg1) + { + return new FormatException(String.Format(CultureInfo.InvariantCulture, message, arg0, arg1)); + } + + [Pure, NotNull, StringFormatMethod("message"), MethodImpl(MethodImplOptions.NoInlining)] + public static FormatException FormatException(string message, params object[] args) + { + return new FormatException(String.Format(CultureInfo.InvariantCulture, message, args)); + } + + [ContractAnnotation("=> halt")] + public static void ThrowFormatException(string message) + { + throw FormatException(message); + } + + [ContractAnnotation("=> halt"), StringFormatMethod("message")] + public static void ThrowFormatException(string message, object arg0) + { + throw FormatException(message, arg0); + } + + [ContractAnnotation("=> halt"), StringFormatMethod("message")] + public static void ThrowFormatException(string message, object arg0, object arg1) + { + throw FormatException(message, arg0, arg1); + } + + [ContractAnnotation("=> halt"), StringFormatMethod("message")] + public static void ThrowFormatException(string message, object arg0, object arg1, object arg2) + { + throw FormatException(message, arg0, arg1, arg2); + } + + [ContractAnnotation("=> halt"), StringFormatMethod("message")] + public static void ThrowFormatException(string message, params object[] args) + { + throw FormatException(message, args); + } + + #endregion + + #region OperationCanceledException... + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static OperationCanceledException OperationCanceledException(string message) + { + return new OperationCanceledException(message); + } + + [Pure, NotNull, StringFormatMethod("message"), MethodImpl(MethodImplOptions.NoInlining)] + public static OperationCanceledException OperationCanceledException(string message, object arg0) + { + return new OperationCanceledException(String.Format(CultureInfo.InvariantCulture, message, arg0)); + } + + [Pure, NotNull, StringFormatMethod("message"), MethodImpl(MethodImplOptions.NoInlining)] + public static OperationCanceledException OperationCanceledException(string message, params object[] args) + { + return new OperationCanceledException(String.Format(CultureInfo.InvariantCulture, message, args)); + } + + [ContractAnnotation("=> halt")] + public static void ThrowOperationCanceledException(string message) + { + throw OperationCanceledException(message); + } + + [ContractAnnotation("=> halt"), StringFormatMethod("message")] + public static void ThrowOperationCanceledException(string message, object arg0) + { + throw OperationCanceledException(message, arg0); + } + + [ContractAnnotation("=> halt"), StringFormatMethod("message")] + public static void ThrowOperationCanceledException(string message, params object[] args) + { + throw OperationCanceledException(message, args); + } + + #endregion + + #region NotSupportedException... + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static NotSupportedException NotSupportedException(string message) + { + return new NotSupportedException(message); + } + + [Pure, NotNull, StringFormatMethod("message"), MethodImpl(MethodImplOptions.NoInlining)] + public static NotSupportedException NotSupportedException(string message, params object[] args) + { + return new NotSupportedException(String.Format(CultureInfo.InvariantCulture, message, args)); + } + + #endregion + + [CanBeNull, Pure, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception TryMapToKnownException(Type exceptionType, string message, string paramName) + { + // d'abord on regarde si c'est un type "simple" + if (exceptionType == typeof(ArgumentNullException)) + { + return new ArgumentNullException(paramName, message); + } + if (exceptionType == typeof(InvalidOperationException)) + { + return new InvalidOperationException(message); + } + if (exceptionType == typeof(ArgumentException)) + { + return new ArgumentException(message, paramName); + } + if (exceptionType == typeof(ArgumentOutOfRangeException)) + { + return new ArgumentOutOfRangeException(paramName, message); + } + if (exceptionType == typeof(ObjectDisposedException)) + { + return new ObjectDisposedException(paramName, message); + } + if (exceptionType == typeof (FormatException)) + { + return new FormatException(message); + } + return null; + } + + [CanBeNull, MethodImpl(MethodImplOptions.NoInlining)] + public static Exception TryMapToComplexException(Type exceptionType, string message, string paramName) + { + ConstructorInfo constructor; + + if (paramName != null) + { // essayes de trouver un constructeur qui prenne deux string dont une soit "paramName" + constructor = exceptionType.GetConstructor(new[] { typeof(string), typeof(string) }); + if (constructor != null) + { + if (constructor.GetParameters()[0].Name == "paramName") + { + return constructor.Invoke(new object[] { paramName, message }) as Exception; + } + else if (constructor.GetParameters()[1].Name == "paramName") + { + return constructor.Invoke(new object[] { message, paramName }) as Exception; + } + } + } + + // essayes de trouver un constructeur qui prenne une string + constructor = exceptionType.GetConstructor(new[] { typeof(string) }); + if (constructor != null) + { + return constructor.Invoke(new object[] { message }) as Exception; + } + + // c'est un type d'erreur qui ne prend pas de params ? + constructor = exceptionType.GetConstructor(Type.EmptyTypes); + if (constructor != null) + { + return constructor.Invoke(null) as Exception; + } + + return null; + } + + #region Collection Errors... + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static InvalidOperationException InvalidOperationNoElements() + { + return new InvalidOperationException("Sequence contains no elements."); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static InvalidOperationException InvalidOperationNoMatchingElements() + { + return new InvalidOperationException("Sequence contains no matching element."); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static IndexOutOfRangeException IndexOutOfRangeException() + { + return new IndexOutOfRangeException("Index was out of range. Must be non-negative and less than the size of the collection."); + } + + [ContractAnnotation("=> halt")] + public static void ThrowIndexOutOfRangeException() + { + throw IndexOutOfRangeException(); + } + + [ContractAnnotation("=> halt"), MethodImpl(MethodImplOptions.NoInlining)] + public static T ThrowIndexOutOfRangeException() + { + throw IndexOutOfRangeException(); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static ArgumentOutOfRangeException ArgumentOutOfRangeIndex(int index) + { + // ArgumentOutOfRange_NeedNonNegNum + // ReSharper disable once UseNameofExpression + return new ArgumentOutOfRangeException("index", index, "Index was out of range. Must be non-negative and less than the size of the collection."); + } + + [ContractAnnotation("=> halt")] + public static void ThrowArgumentOutOfRangeIndex(int index) + { + // ArgumentOutOfRange_NeedNonNegNum + throw ArgumentOutOfRangeIndex(index); + } + + [ContractAnnotation("=> halt"), MethodImpl(MethodImplOptions.NoInlining)] + public static T ThrowArgumentOutOfRangeIndex(int index) + { + throw IndexOutOfRangeException(); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static ArgumentOutOfRangeException ArgumentOutOfRangeNeedNonNegNum([InvokerParameterName] string paramName) + { + // ArgumentOutOfRange_NeedNonNegNum + return new ArgumentOutOfRangeException(paramName, "Non-negative number required"); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static ArgumentException ArgumentInvalidOffLen() + { + // Argument_InvalidOffLen + return new ArgumentException("Offset and length were out of bounds for the array or count is greater than the number of elements from index to the end of the source collection."); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + public static NotSupportedException NotSupportedReadOnlyCollection() + { + // NotSupported_ReadOnlyCollection + return new NotSupportedException("Collection is read-only."); + } + + [ContractAnnotation("=> halt")] + public static void ThrowNotSupportedReadOnlyCollection() + { + // NotSupported_ReadOnlyCollection + throw NotSupportedReadOnlyCollection(); + } + + #endregion + + } + +} diff --git a/FoundationDB.Client/Layers/Tuples/Formatters/FdbAnonymousTupleFormatter.cs b/FoundationDB.Client/Shared/Tuples/Formatters/AnonymousTupleFormatter.cs similarity index 74% rename from FoundationDB.Client/Layers/Tuples/Formatters/FdbAnonymousTupleFormatter.cs rename to FoundationDB.Client/Shared/Tuples/Formatters/AnonymousTupleFormatter.cs index 806d8c228..c2f157c31 100644 --- a/FoundationDB.Client/Layers/Tuples/Formatters/FdbAnonymousTupleFormatter.cs +++ b/FoundationDB.Client/Shared/Tuples/Formatters/AnonymousTupleFormatter.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,33 +26,35 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples { using System; + using JetBrains.Annotations; + using Doxense.Diagnostics.Contracts; /// Customer formatter that will called the provided lambda functions to convert to and from a tuple - internal sealed class FdbAnonymousTupleFormatter : ITupleFormatter + public sealed class AnonymousTupleFormatter : ITupleFormatter { - private readonly Func m_to; - private readonly Func m_from; + private readonly Func m_to; + private readonly Func m_from; - public FdbAnonymousTupleFormatter(Func to, Func from) + public AnonymousTupleFormatter([NotNull] Func to, [NotNull] Func from) { - if (to == null) throw new ArgumentNullException("to"); - if (from == null) throw new ArgumentNullException("from"); + Contract.NotNull(to, nameof(to)); + Contract.NotNull(from, nameof(from)); m_to = to; m_from = from; } - public IFdbTuple ToTuple(T key) + public ITuple ToTuple(T key) { return m_to(key); } - public T FromTuple(IFdbTuple tuple) + public T FromTuple(ITuple tuple) { - if (tuple == null) throw new ArgumentNullException("tuple"); + Contract.NotNull(tuple, nameof(tuple)); return m_from(tuple); } } diff --git a/FoundationDB.Client/Layers/Tuples/Formatters/FdbFormattableTupleFormatter.cs b/FoundationDB.Client/Shared/Tuples/Formatters/FormattableTupleFormatter.cs similarity index 85% rename from FoundationDB.Client/Layers/Tuples/Formatters/FdbFormattableTupleFormatter.cs rename to FoundationDB.Client/Shared/Tuples/Formatters/FormattableTupleFormatter.cs index fe5ed5b0d..bce76efc2 100644 --- a/FoundationDB.Client/Layers/Tuples/Formatters/FdbFormattableTupleFormatter.cs +++ b/FoundationDB.Client/Shared/Tuples/Formatters/FormattableTupleFormatter.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,23 +26,24 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples { using System; + using Doxense.Diagnostics.Contracts; /// Specialized formatter for types that implement ITupleFormattable - internal sealed class FdbFormattableTupleFormatter : ITupleFormatter + public sealed class FormattableTupleFormatter : ITupleFormatter where T : ITupleFormattable, new() { - public IFdbTuple ToTuple(T key) + public ITuple ToTuple(T key) { if (key == null) return null; return key.ToTuple(); } - public T FromTuple(IFdbTuple tuple) + public T FromTuple(ITuple tuple) { - if (tuple == null) throw new ArgumentNullException("tuple"); + Contract.NotNull(tuple, nameof(tuple)); var key = new T(); key.FromTuple(tuple); return key; diff --git a/FoundationDB.Client/Layers/Tuples/Formatters/FdbGenericTupleFormatter.cs b/FoundationDB.Client/Shared/Tuples/Formatters/GenericTupleFormatter.cs similarity index 86% rename from FoundationDB.Client/Layers/Tuples/Formatters/FdbGenericTupleFormatter.cs rename to FoundationDB.Client/Shared/Tuples/Formatters/GenericTupleFormatter.cs index 29edfb904..2b87cd73c 100644 --- a/FoundationDB.Client/Layers/Tuples/Formatters/FdbGenericTupleFormatter.cs +++ b/FoundationDB.Client/Shared/Tuples/Formatters/GenericTupleFormatter.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,20 +26,20 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples { using System; /// Simple key formatter that maps a value into a singleton tuple, and back - internal sealed class FdbGenericTupleFormatter : ITupleFormatter + public sealed class GenericTupleFormatter : ITupleFormatter { - public IFdbTuple ToTuple(T key) + public ITuple ToTuple(T key) { - return FdbTuple.Create(key); + return STuple.Create(key); } - public T FromTuple(IFdbTuple tuple) + public T FromTuple(ITuple tuple) { return tuple.OfSize(1).Get(0); } diff --git a/FoundationDB.Client/Layers/Tuples/Formatters/ITupleFormattable.cs b/FoundationDB.Client/Shared/Tuples/Formatters/ITupleFormattable.cs similarity index 84% rename from FoundationDB.Client/Layers/Tuples/Formatters/ITupleFormattable.cs rename to FoundationDB.Client/Shared/Tuples/Formatters/ITupleFormattable.cs index 1e1c86b6d..49df60f9b 100644 --- a/FoundationDB.Client/Layers/Tuples/Formatters/ITupleFormattable.cs +++ b/FoundationDB.Client/Shared/Tuples/Formatters/ITupleFormattable.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,7 +26,7 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples { using System; @@ -36,11 +36,14 @@ namespace FoundationDB.Layers.Tuples public interface ITupleFormattable { /// Return the tuple representation of this instance - IFdbTuple ToTuple(); + ITuple ToTuple(); //REVIEW: [NotNull] ? /// Load a tuple representation into a newly created instance /// - void FromTuple(IFdbTuple tuple); + [Obsolete("Use an ITupleFormater instead!")] + void FromTuple(ITuple tuple); + //REVIEW: REMOVE THIS! This does not work well with private ctors and readonly + // => use ITupleFormatter to serialize/deseralize stuff } } diff --git a/FoundationDB.Client/Layers/Tuples/Formatters/ITupleFormatter.cs b/FoundationDB.Client/Shared/Tuples/Formatters/ITupleFormatter.cs similarity index 91% rename from FoundationDB.Client/Layers/Tuples/Formatters/ITupleFormatter.cs rename to FoundationDB.Client/Shared/Tuples/Formatters/ITupleFormatter.cs index 6e3dea0f0..6ff3a2257 100644 --- a/FoundationDB.Client/Layers/Tuples/Formatters/ITupleFormatter.cs +++ b/FoundationDB.Client/Shared/Tuples/Formatters/ITupleFormatter.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,7 +26,7 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples { using System; @@ -39,12 +39,12 @@ public interface ITupleFormatter /// Convert a key into a tuple sequence /// Key to convert to a tuple /// Tuple that represent the key (can contain a single item for primitive keys, or several items for composite keys) - IFdbTuple ToTuple(TKey key); + ITuple ToTuple(TKey key); //REVIEW: [NotNull] result? /// Convert a tuple sequence into a key /// Tuple to convert back into a key /// Key that corresponds to the tuple - TKey FromTuple(IFdbTuple tuple); + TKey FromTuple(ITuple tuple); //REVIEW: [NotNull] tuple? } } diff --git a/FoundationDB.Client/Layers/Tuples/Formatters/FdbTupleFormatter.cs b/FoundationDB.Client/Shared/Tuples/Formatters/TupleFormatter.cs similarity index 78% rename from FoundationDB.Client/Layers/Tuples/Formatters/FdbTupleFormatter.cs rename to FoundationDB.Client/Shared/Tuples/Formatters/TupleFormatter.cs index 64dd5a875..78122f220 100644 --- a/FoundationDB.Client/Layers/Tuples/Formatters/FdbTupleFormatter.cs +++ b/FoundationDB.Client/Shared/Tuples/Formatters/TupleFormatter.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,16 +26,19 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples { using System; + using JetBrains.Annotations; + using Doxense.Diagnostics.Contracts; /// Helper class to get or create tuple formatters - public static class FdbTupleFormatter + public static class TupleFormatter { private static ITupleFormatter s_default; /// Return the default tuple formatter for this type + [NotNull] public static ITupleFormatter Default { get @@ -54,22 +57,24 @@ public static ITupleFormatter Default /// Lambda that is called to convert a value into a tuple. It SHOULD NOT return null. /// Lambda that is called to convert a tuple back into a value. It CAN return null. /// Custom formatter - public static ITupleFormatter Create(Func from, Func to) + [NotNull] + public static ITupleFormatter Create([NotNull] Func from, [NotNull] Func to) { - return new FdbAnonymousTupleFormatter(from, to); + return new AnonymousTupleFormatter(from, to); } /// Create a formatter that just add or remove a prefix to values - public static ITupleFormatter CreateAppender(IFdbTuple prefix) + [NotNull] + public static ITupleFormatter CreateAppender([NotNull] ITuple prefix) { - if (prefix == null) throw new ArgumentNullException("prefix"); + Contract.NotNull(prefix, nameof(prefix)); - return new FdbAnonymousTupleFormatter( + return new AnonymousTupleFormatter( (value) => prefix.Append(value), (tuple) => { if (tuple.Count != prefix.Count + 1) throw new ArgumentException("Tuple size is invalid", "tuple"); - if (!FdbTuple.StartsWith(tuple, prefix)) throw new ArgumentException("Tuple does not start with the expected prefix", "tuple"); + if (!TupleHelpers.StartsWith(tuple, prefix)) throw new ArgumentException("Tuple does not start with the expected prefix", "tuple"); return tuple.Last(); } ); @@ -77,23 +82,24 @@ public static ITupleFormatter CreateAppender(IFdbTuple prefix) /// Creates and instance of a tuple formatter that is best suited for this type + [NotNull] private static ITupleFormatter CreateDefaultFormatter() { var type = typeof(T); - if (typeof(IFdbTuple).IsAssignableFrom(type)) + if (typeof(ITuple).IsAssignableFrom(type)) { - return new FdbAnonymousTupleFormatter((x) => (IFdbTuple)x, (x) => (T)x); + return new AnonymousTupleFormatter((x) => (ITuple)x, (x) => (T)x); } if (typeof(ITupleFormattable).IsAssignableFrom(type)) { // note: we cannot call directlty 'new FormattableFormatter()' because of the generic type constraints, so we have to use reflection... // => this WILL fail if someone implements 'ITupleFormattable' on a class that does not have public parameterless constructor ! - return (ITupleFormatter)Activator.CreateInstance(typeof(FdbFormattableTupleFormatter<>).MakeGenericType(type)); + return (ITupleFormatter)Activator.CreateInstance(typeof(FormattableTupleFormatter<>).MakeGenericType(type)); } - return new FdbGenericTupleFormatter(); + return new GenericTupleFormatter(); } } diff --git a/FoundationDB.Client/Layers/Tuples/IFdbTuple.cs b/FoundationDB.Client/Shared/Tuples/ITuple.cs similarity index 66% rename from FoundationDB.Client/Layers/Tuples/IFdbTuple.cs rename to FoundationDB.Client/Shared/Tuples/ITuple.cs index 28988b554..d36baffbd 100644 --- a/FoundationDB.Client/Layers/Tuples/IFdbTuple.cs +++ b/FoundationDB.Client/Shared/Tuples/ITuple.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,22 +26,17 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples { - using FoundationDB.Client; using JetBrains.Annotations; using System; using System.Collections.Generic; using System.ComponentModel; - /// Represents a Tuple of N elements + /// Represents a Tuple of N elements of any type [ImmutableObject(true)] [CannotApplyEqualityOperator] - public interface IFdbTuple : IEnumerable, IEquatable, IReadOnlyCollection, IFdbKey -#if !NET_4_0 - , IReadOnlyList - , System.Collections.IStructuralEquatable -#endif + public interface ITuple : IEquatable, IReadOnlyList, System.Collections.IStructuralEquatable { // Tuples should, by default, behave as closely to Python's tuples as possible. See http://docs.python.org/2/tutorial/datastructures.html#tuples-and-sequences @@ -56,61 +51,43 @@ public interface IFdbTuple : IEnumerable, IEquatable, IReadOn // - Accessing the Count and Last item should be fast, if possible in O(1) // - Appending should also be fast, if possible O(1) // - Getting the substring of a tuple should as fast as possible, if possible O(1). For list-based tuples, it should return a view of the list (offset/count) and avoid copying the list - // - If an operation returns an empty tuple, then it should return the FdbTuple.Empty singleton instance - // - If an operation does not change the tuple (like Append(FdbTuple.Empty), or tuple.Substring(0)), then the tuple should return itself - // - If the same tuple will be packed frequently, it should be memoized (converted into a FdbMemoizedTuple) - -#if NET_4_0 - /// [DANGEROUS] Return an item of the tuple, given its position - /// Position of the item (if negative, means relative from the end) - /// Value of the item - /// The type of the returned value will be either null, string, byte[], Guid, long or ulong. You should use tuple.Get<T>(...) instead if you are working with non standard values! - /// If is outside the bounds of the tuple - /// - /// ("Hello", "World", 123,)[0] => "Hello" - /// ("Hello", "World", 123,)[-1] => 123L - /// - object this[int index] { get; } -#endif + // - If an operation returns an empty tuple, then it should return the STuple.Empty singleton instance + // - If an operation does not change the tuple (like Append(STuple.Empty), or tuple.Substring(0)), then the tuple should return itself + // - If the same tuple will be packed frequently, it should be memoized (converted into a MemoizedTuple) /// Return a section of the tuple /// Starting offset of the sub-tuple to return, or null to select from the start. Negative values means from the end /// Ending offset (excluded) of the sub-tuple to return or null to select until the end. Negative values means from the end. /// Tuple that include all items in the current tuple whose offset are greather than or equal to and strictly less than . The tuple may be smaller than expected if the range is larger than the parent tuple. If the range does not intersect with the tuple, the Empty tuple will be returned. - IFdbTuple this[int? fromIncluded, int? toExcluded] { [NotNull] get; } + ITuple this[int? fromIncluded, int? toExcluded] { [NotNull, Pure] get; } /// Return the typed value of an item of the tuple, given its position - /// Expected type of the item + /// Expected type of the item /// Position of the item (if negative, means relative from the end) - /// Value of the item at position , adapted into type . + /// Value of the item at position , adapted into type . /// If is outside the bounds of the tuple /// /// ("Hello", "World", 123,).Get<string>(0) => "Hello" /// ("Hello", "World", 123,).Get<int>(-1) => 123 /// ("Hello", "World", 123,).Get<string>(-1) => "123" /// - T Get(int index); - - /// Return the typed value of the last item in the tuple - /// Expected type of the item - /// Value of the last item of this tuple, adapted into type - /// Equivalent of tuple.Get<T>(-1) - T Last(); + [Pure] + TItem Get(int index); /// Create a new Tuple by appending a single new value at the end of this tuple - /// Type of the new value + /// Type of the new value /// Value that will be appended at the end /// New tuple with the new value /// ("Hello,").Append("World") => ("Hello", "World",) - /// If is an , then it will be appended as a single element. If you need to append the *items* of a tuple, you must call - [NotNull] - IFdbTuple Append(T value); + /// If is an , then it will be appended as a single element. If you need to append the *items* of a tuple, you must call + [Pure, NotNull] + ITuple Append(TItem value); /// Create a new Tuple by appending the items of another tuple at the end of this tuple /// Tuple whose items must be appended at the end of the current tuple /// New tuple with the new values, or the same instance if is empty. - [NotNull] - IFdbTuple Concat([NotNull] IFdbTuple tuple); + [Pure, NotNull] + ITuple Concat([NotNull] ITuple tuple); /// Copy all items of the tuple into an array at a specific location /// Destination array (must be big enough to contains all the items) @@ -121,16 +98,6 @@ public interface IFdbTuple : IEnumerable, IEquatable, IReadOn /// void CopyTo([NotNull] object[] array, int offset); - /// Appends the packed bytes of this instance to the end of a buffer - /// Buffer that will received the packed bytes of this instance - void PackTo(ref TupleWriter writer); - - /// Pack this instance into a Slice - /// - /// ("Hello", "World", 123).ToSlice() => '\x02Hello\x00\x02World\x00\x15\x7B' - /// - Slice ToSlice(); - } } diff --git a/FoundationDB.Client/Layers/Tuples/FdbJoinedTuple.cs b/FoundationDB.Client/Shared/Tuples/JoinedTuple.cs similarity index 67% rename from FoundationDB.Client/Layers/Tuples/FdbJoinedTuple.cs rename to FoundationDB.Client/Shared/Tuples/JoinedTuple.cs index 60e7bb181..7e4b42f2a 100644 --- a/FoundationDB.Client/Layers/Tuples/FdbJoinedTuple.cs +++ b/FoundationDB.Client/Shared/Tuples/JoinedTuple.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,27 +26,28 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples { - using FoundationDB.Client; - using FoundationDB.Client.Converters; using JetBrains.Annotations; using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; + using Doxense.Collections.Tuples.Encoding; + using Doxense.Runtime.Converters; + using Doxense.Diagnostics.Contracts; /// Tuple that represents the concatenation of two tuples - [DebuggerDisplay("{ToString()}")] - public sealed class FdbJoinedTuple : IFdbTuple + [DebuggerDisplay("{ToString(),nq}")] + public sealed class JoinedTuple : ITuple { // Uses cases: joining a 'subspace' tuple (customerId, 'Users', ) with a 'key' tuple (userId, 'Contacts', 123, ) /// First tuple (first N items) - public readonly IFdbTuple Head; + public readonly ITuple Head; /// Second tuple (last M items) - public readonly IFdbTuple Tail; + public readonly ITuple Tail; /// Offset at which the Tail tuple starts. Items are in Head tuple if index < split. Items are in Tail tuple if index >= split. private readonly int m_split; @@ -54,10 +55,10 @@ public sealed class FdbJoinedTuple : IFdbTuple /// Total size of the tuple (sum of the size of the two inner tuples) private readonly int m_count; - public FdbJoinedTuple(IFdbTuple head, IFdbTuple tail) + public JoinedTuple(ITuple head, ITuple tail) { - if (head == null) throw new ArgumentNullException("head"); - if (tail == null) throw new ArgumentNullException("tail"); + Contract.NotNull(head, nameof(head)); + Contract.NotNull(tail, nameof(tail)); this.Head = head; this.Tail = tail; @@ -65,97 +66,65 @@ public FdbJoinedTuple(IFdbTuple head, IFdbTuple tail) m_count = m_split + tail.Count; } - public void PackTo(ref TupleWriter writer) - { - this.Head.PackTo(ref writer); - this.Tail.PackTo(ref writer); - } - - public Slice ToSlice() - { - var writer = new TupleWriter(); - PackTo(ref writer); - return writer.Output.ToSlice(); - } - - Slice IFdbKey.ToFoundationDbKey() - { - return this.ToSlice(); - } - public override string ToString() { - return FdbTuple.ToString(this); + return STuple.Formatter.ToString(this); } - public int Count - { - get { return m_count; } - } + public int Count => m_count; public object this[int index] { get { - index = FdbTuple.MapIndex(index, m_count); + index = TupleHelpers.MapIndex(index, m_count); return index < m_split ? this.Head[index] : this.Tail[index - m_split]; } } - public IFdbTuple this[int? fromIncluded, int? toExcluded] + public ITuple this[int? fromIncluded, int? toExcluded] { get { - int begin = fromIncluded.HasValue ? FdbTuple.MapIndexBounded(fromIncluded.Value, m_count) : 0; - int end = toExcluded.HasValue ? FdbTuple.MapIndexBounded(toExcluded.Value, m_count) : m_count; + int begin = fromIncluded.HasValue ? TupleHelpers.MapIndexBounded(fromIncluded.Value, m_count) : 0; + int end = toExcluded.HasValue ? TupleHelpers.MapIndexBounded(toExcluded.Value, m_count) : m_count; - if (end <= begin) return FdbTuple.Empty; + if (end <= begin) return STuple.Empty; int p = this.Head.Count; if (begin >= p) { // all selected items are in the tail return this.Tail[begin - p, end - p]; } - else if (end <= p) + if (end <= p) { // all selected items are in the head return this.Head[begin, end]; } - else - { // selected items are both in head and tail - return new FdbJoinedTuple(this.Head[begin, null], this.Tail[null, end - p]); - } + // selected items are both in head and tail + return new JoinedTuple(this.Head[begin, null], this.Tail[null, end - p]); } } public T Get(int index) { - index = FdbTuple.MapIndex(index, m_count); + index = TupleHelpers.MapIndex(index, m_count); return index < m_split ? this.Head.Get(index) : this.Tail.Get(index - m_split); } - public T Last() + ITuple ITuple.Append(T value) { - if (this.Tail.Count > 0) - return this.Tail.Last(); - else - return this.Head.Last(); - } - - IFdbTuple IFdbTuple.Append(T value) - { - return new FdbLinkedTuple(this, value); + return new LinkedTuple(this, value); } [NotNull] - public FdbLinkedTuple Append(T value) + public LinkedTuple Append(T value) { - return new FdbLinkedTuple(this, value); + return new LinkedTuple(this, value); } - [NotNull] - public IFdbTuple Concat([NotNull] IFdbTuple tuple) + public ITuple Concat(ITuple tuple) { - if (tuple == null) throw new ArgumentNullException("tuple"); + Contract.NotNull(tuple, nameof(tuple)); int n1 = tuple.Count; if (n1 == 0) return this; @@ -164,13 +133,10 @@ public IFdbTuple Concat([NotNull] IFdbTuple tuple) if (n1 + n2 >= 10) { // it's getting bug, merge to a new List tuple - return new FdbListTuple(this.Head, this.Tail, tuple); - } - else - { - // REVIEW: should we always concat with the tail? - return new FdbJoinedTuple(this.Head, this.Tail.Concat(tuple)); + return new ListTuple(this.Head, this.Tail, tuple); } + // REVIEW: should we always concat with the tail? + return new JoinedTuple(this.Head, this.Tail.Concat(tuple)); } public void CopyTo(object[] array, int offset) @@ -201,7 +167,7 @@ public override bool Equals(object obj) return obj != null && ((IStructuralEquatable)this).Equals(obj, SimilarValueComparer.Default); } - public bool Equals(IFdbTuple other) + public bool Equals(ITuple other) { return !object.ReferenceEquals(other, null) && ((IStructuralEquatable)this).Equals(other, SimilarValueComparer.Default); } @@ -216,7 +182,7 @@ bool System.Collections.IStructuralEquatable.Equals(object other, System.Collect if (object.ReferenceEquals(this, other)) return true; if (other == null) return false; - var tuple = other as IFdbTuple; + var tuple = other as ITuple; if (!object.ReferenceEquals(tuple, null)) { if (tuple.Count != m_count) return false; @@ -240,9 +206,9 @@ bool System.Collections.IStructuralEquatable.Equals(object other, System.Collect int System.Collections.IStructuralEquatable.GetHashCode(System.Collections.IEqualityComparer comparer) { - return FdbTuple.CombineHashCodes( - this.Head != null ? this.Head.GetHashCode(comparer) : 0, - this.Tail != null ? this.Tail.GetHashCode(comparer) : 0 + return HashCodes.Combine( + HashCodes.Compute(this.Head, comparer), + HashCodes.Compute(this.Tail, comparer) ); } } diff --git a/FoundationDB.Client/Layers/Tuples/FdbLinkedTuple.cs b/FoundationDB.Client/Shared/Tuples/LinkedTuple.cs similarity index 63% rename from FoundationDB.Client/Layers/Tuples/FdbLinkedTuple.cs rename to FoundationDB.Client/Shared/Tuples/LinkedTuple.cs index e15b126d3..4909cbb32 100644 --- a/FoundationDB.Client/Layers/Tuples/FdbLinkedTuple.cs +++ b/FoundationDB.Client/Shared/Tuples/LinkedTuple.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,20 +26,19 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples { - using FoundationDB.Client; - using FoundationDB.Client.Converters; - using FoundationDB.Client.Utils; using JetBrains.Annotations; - using System.Collections; using System.Collections.Generic; using System.Diagnostics; + using Doxense.Collections.Tuples.Encoding; + using Doxense.Diagnostics.Contracts; + using Doxense.Runtime.Converters; /// Tuple that adds a value at the end of an already existing tuple /// Type of the last value of the tuple - [DebuggerDisplay("{ToString()}")] - public sealed class FdbLinkedTuple : IFdbTuple + [DebuggerDisplay("{ToString(),nq}")] + public sealed class LinkedTuple : ITuple { //TODO: consider changing this to a struct ? @@ -50,46 +49,23 @@ public sealed class FdbLinkedTuple : IFdbTuple public readonly T Tail; /// Link to the parent tuple that contains the head. - public readonly IFdbTuple Head; + public readonly ITuple Head; /// Cached size of the size of the Head tuple. Add 1 to get the size of this tuple. public readonly int Depth; /// Append a new value at the end of an existing tuple - internal FdbLinkedTuple(IFdbTuple head, T tail) + public LinkedTuple([NotNull] ITuple head, T tail) { - Contract.Requires(head != null); + Contract.NotNull(head, nameof(head)); this.Head = head; this.Tail = tail; this.Depth = head.Count; } - /// Pack this tuple into a buffer - public void PackTo(ref TupleWriter writer) - { - this.Head.PackTo(ref writer); - FdbTuplePacker.SerializeTo(ref writer, this.Tail); - } - - /// Pack this tuple into a slice - public Slice ToSlice() - { - var writer = new TupleWriter(); - PackTo(ref writer); - return writer.Output.ToSlice(); - } - - Slice IFdbKey.ToFoundationDbKey() - { - return this.ToSlice(); - } - /// Returns the number of elements in this tuple - public int Count - { - get { return this.Depth + 1; } - } + public int Count => this.Depth + 1; public object this[int index] { @@ -101,41 +77,41 @@ public object this[int index] } } - public IFdbTuple this[int? fromIncluded, int? toExcluded] + public ITuple this[int? fromIncluded, int? toExcluded] { - get { return FdbTuple.Splice(this, fromIncluded, toExcluded); } + get { return TupleHelpers.Splice(this, fromIncluded, toExcluded); } } - public R Get(int index) + public TItem Get(int index) { - if (index == this.Depth || index == -1) return FdbConverters.Convert(this.Tail); + if (index == this.Depth || index == -1) return TypeConverters.Convert(this.Tail); if (index < -1) index++; - return this.Head.Get(index); + return this.Head.Get(index); } - public R Last() + public T Last { - return FdbConverters.Convert(this.Tail); + [Pure] + get { return this.Tail; } } - IFdbTuple IFdbTuple.Append(R value) + ITuple ITuple.Append(TItem value) { - return this.Append(value); + return this.Append(value); } [NotNull] - public FdbLinkedTuple Append(R value) + public LinkedTuple Append(TItem value) { - return new FdbLinkedTuple(this, value); + return new LinkedTuple(this, value); } - [NotNull] - public IFdbTuple Concat([NotNull] IFdbTuple tuple) + public ITuple Concat(ITuple tuple) { - return FdbTuple.Concat(this, tuple); + return STuple.Concat(this, tuple); } - public void CopyTo([NotNull] object[] array, int offset) + public void CopyTo(object[] array, int offset) { this.Head.CopyTo(array, offset); array[offset + this.Depth] = this.Tail; @@ -157,22 +133,22 @@ System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() public override string ToString() { - return FdbTuple.ToString(this); + return STuple.Formatter.ToString(this); } public override bool Equals(object obj) { - return obj != null && ((IStructuralEquatable)this).Equals(obj, SimilarValueComparer.Default); + return obj != null && ((System.Collections.IStructuralEquatable)this).Equals(obj, SimilarValueComparer.Default); } - public bool Equals(IFdbTuple other) + public bool Equals(ITuple other) { - return !object.ReferenceEquals(other, null) && ((IStructuralEquatable)this).Equals(other, SimilarValueComparer.Default); + return !object.ReferenceEquals(other, null) && ((System.Collections.IStructuralEquatable)this).Equals(other, SimilarValueComparer.Default); } public override int GetHashCode() { - return ((IStructuralEquatable)this).GetHashCode(SimilarValueComparer.Default); + return ((System.Collections.IStructuralEquatable)this).GetHashCode(SimilarValueComparer.Default); } bool System.Collections.IStructuralEquatable.Equals(object other, System.Collections.IEqualityComparer comparer) @@ -180,7 +156,7 @@ bool System.Collections.IStructuralEquatable.Equals(object other, System.Collect if (object.ReferenceEquals(this, other)) return true; if (other == null) return false; - var linked = other as FdbLinkedTuple; + var linked = other as LinkedTuple; if (!object.ReferenceEquals(linked, null)) { // must have same length @@ -191,13 +167,13 @@ bool System.Collections.IStructuralEquatable.Equals(object other, System.Collect return this.Head.Equals(linked.Tail, comparer); } - return FdbTuple.Equals(this, other, comparer); + return TupleHelpers.Equals(this, other, comparer); } - int IStructuralEquatable.GetHashCode(System.Collections.IEqualityComparer comparer) + int System.Collections.IStructuralEquatable.GetHashCode(System.Collections.IEqualityComparer comparer) { - return FdbTuple.CombineHashCodes( - this.Head != null ? this.Head.GetHashCode(comparer) : 0, + return HashCodes.Combine( + HashCodes.Compute(this.Head, comparer), comparer.GetHashCode(this.Tail) ); } diff --git a/FoundationDB.Client/Layers/Tuples/FdbListTuple.cs b/FoundationDB.Client/Shared/Tuples/ListTuple.cs similarity index 68% rename from FoundationDB.Client/Layers/Tuples/FdbListTuple.cs rename to FoundationDB.Client/Shared/Tuples/ListTuple.cs index 738e183a9..102f68c65 100644 --- a/FoundationDB.Client/Layers/Tuples/FdbListTuple.cs +++ b/FoundationDB.Client/Shared/Tuples/ListTuple.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,21 +26,22 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples { - using FoundationDB.Client; - using FoundationDB.Client.Converters; - using FoundationDB.Client.Utils; using System; using System.Collections; using System.Collections.Generic; using System.Linq; + using Doxense.Collections.Tuples.Encoding; + using Doxense.Diagnostics.Contracts; + using Doxense.Runtime.Converters; + using JetBrains.Annotations; /// Tuple that can hold any number of untyped items - public sealed class FdbListTuple : IFdbTuple + public sealed class ListTuple : ITuple { - // We could use a FdbListTuple for tuples where all items are of type T, and FdbListTuple could derive from FdbListTuple. - // => this could speed up a bit the use case of FdbTuple.FromArray or FdbTuple.FromSequence + // We could use a ListTuple for tuples where all items are of type T, and ListTuple could derive from ListTuple. + // => this could speed up a bit the use case of STuple.FromArray or STuple.FromSequence /// List of the items in the tuple. /// It is supposed to be immutable! @@ -53,7 +54,7 @@ public sealed class FdbListTuple : IFdbTuple private int? m_hashCode; /// Create a new tuple from a sequence of items (copied) - internal FdbListTuple(IEnumerable items) + public ListTuple([NotNull, InstantHandle] IEnumerable items) { m_items = items.ToArray(); m_count = m_items.Length; @@ -61,7 +62,7 @@ internal FdbListTuple(IEnumerable items) /// Wrap a List of items /// The list should not mutate and should not be exposed to anyone else! - internal FdbListTuple(object[] items, int offset, int count) + public ListTuple(object[] items, int offset, int count) { Contract.Requires(items != null && offset >= 0 && count >= 0); Contract.Requires(offset + count <= items.Length, "inner item array is too small"); @@ -72,12 +73,10 @@ internal FdbListTuple(object[] items, int offset, int count) } /// Create a new list tuple by merging the items of two tuples together - /// - /// - internal FdbListTuple(IFdbTuple a, IFdbTuple b) + public ListTuple(ITuple a, ITuple b) { - if (a == null) throw new ArgumentNullException("a"); - if (b == null) throw new ArgumentNullException("b"); + Contract.NotNull(a, nameof(a)); + Contract.NotNull(b, nameof(b)); int nA = a.Count; int nB = b.Count; @@ -91,11 +90,11 @@ internal FdbListTuple(IFdbTuple a, IFdbTuple b) } /// Create a new list tuple by merging the items of three tuples together - internal FdbListTuple(IFdbTuple a, IFdbTuple b, IFdbTuple c) + public ListTuple(ITuple a, ITuple b, ITuple c) { - if (a == null) throw new ArgumentNullException("a"); - if (b == null) throw new ArgumentNullException("b"); - if (c == null) throw new ArgumentNullException("c"); + Contract.NotNull(a, nameof(a)); + Contract.NotNull(b, nameof(b)); + Contract.NotNull(c, nameof(c)); int nA = a.Count; int nB = b.Count; @@ -110,76 +109,67 @@ internal FdbListTuple(IFdbTuple a, IFdbTuple b, IFdbTuple c) if (nC > 0) c.CopyTo(m_items, nA + nB); } - public int Count - { - get { return m_count; } - } + public int Count => m_count; - public object this[int index] - { - get - { - return m_items[m_offset + FdbTuple.MapIndex(index, m_count)]; - } - } + public object this[int index] => m_items[m_offset + TupleHelpers.MapIndex(index, m_count)]; - public IFdbTuple this[int? fromIncluded, int? toExcluded] + public ITuple this[int? fromIncluded, int? toExcluded] { get { - int begin = fromIncluded.HasValue ? FdbTuple.MapIndexBounded(fromIncluded.Value, m_count) : 0; - int end = toExcluded.HasValue ? FdbTuple.MapIndexBounded(toExcluded.Value, m_count) : m_count; + int begin = fromIncluded.HasValue ? TupleHelpers.MapIndexBounded(fromIncluded.Value, m_count) : 0; + int end = toExcluded.HasValue ? TupleHelpers.MapIndexBounded(toExcluded.Value, m_count) : m_count; int len = end - begin; - if (len <= 0) return FdbTuple.Empty; + if (len <= 0) return STuple.Empty; if (begin == 0 && len == m_count) return this; Contract.Assert(m_offset + begin >= m_offset); Contract.Assert(len >= 0 && len <= m_count); - return new FdbListTuple(m_items, m_offset + begin, len); + return new ListTuple(m_items, m_offset + begin, len); } } - public R Get(int index) + public TItem Get(int index) { - return FdbConverters.ConvertBoxed(this[index]); + return TypeConverters.ConvertBoxed(this[index]); } - public R Last() + public TItem Last() { if (m_count == 0) throw new InvalidOperationException("Tuple is empty"); - return FdbConverters.ConvertBoxed(m_items[m_offset + m_count - 1]); + return TypeConverters.ConvertBoxed(m_items[m_offset + m_count - 1]); } - IFdbTuple IFdbTuple.Append(T value) + ITuple ITuple.Append(TItem value) { - return this.Append(value); + return Append(value); } - public FdbListTuple Append(T value) + public ListTuple Append(TItem value) { var list = new object[m_count + 1]; Array.Copy(m_items, m_offset, list, 0, m_count); list[m_count] = value; - return new FdbListTuple(list, 0, list.Length); + return new ListTuple(list, 0, list.Length); } - public FdbListTuple AppendRange(object[] items) + public ListTuple AppendRange(object[] items) { - if (items == null) throw new ArgumentNullException("items"); + Contract.NotNull(items, nameof(items)); if (items.Length == 0) return this; var list = new object[m_count + items.Length]; Array.Copy(m_items, m_offset, list, 0, m_count); Array.Copy(items, 0, list, m_count, items.Length); - return new FdbListTuple(list, 0, list.Length); + return new ListTuple(list, 0, list.Length); } - public FdbListTuple Concat(FdbListTuple tuple) + public ListTuple Concat(ListTuple tuple) { - if (tuple == null) throw new ArgumentNullException("tuple"); + Contract.NotNull(tuple, nameof(tuple)); if (tuple.m_count == 0) return this; if (m_count == 0) return tuple; @@ -187,13 +177,12 @@ public FdbListTuple Concat(FdbListTuple tuple) var list = new object[m_count + tuple.m_count]; Array.Copy(m_items, m_offset, list, 0, m_count); Array.Copy(tuple.m_items, tuple.m_offset, list, m_count, tuple.m_count); - return new FdbListTuple(list, 0, list.Length); + return new ListTuple(list, 0, list.Length); } - public FdbListTuple Concat(IFdbTuple tuple) + public ListTuple Concat(ITuple tuple) { - var _ = tuple as FdbListTuple; - if (_ != null) return Concat(_); + if (tuple is ListTuple lt) return Concat(lt); int count = tuple.Count; if (count == 0) return this; @@ -201,12 +190,12 @@ public FdbListTuple Concat(IFdbTuple tuple) var list = new object[m_count + count]; Array.Copy(m_items, m_offset, list, 0, m_count); tuple.CopyTo(list, m_count); - return new FdbListTuple(list, 0, list.Length); + return new ListTuple(list, 0, list.Length); } - IFdbTuple IFdbTuple.Concat(IFdbTuple tuple) + ITuple ITuple.Concat(ITuple tuple) { - return this.Concat(tuple); + return Concat(tuple); } public void CopyTo(object[] array, int offset) @@ -236,29 +225,9 @@ private static IEnumerator Enumerate(object[] items, int offset, int cou } } - public void PackTo(ref TupleWriter writer) - { - for (int i = 0; i < m_count; i++) - { - FdbTuplePackers.SerializeObjectTo(ref writer, m_items[i + m_offset]); - } - } - - public Slice ToSlice() - { - var writer = new TupleWriter(); - PackTo(ref writer); - return writer.Output.ToSlice(); - } - - Slice IFdbKey.ToFoundationDbKey() - { - return this.ToSlice(); - } - public override string ToString() { - return FdbTuple.ToString(m_items, m_offset, m_count); + return STuple.Formatter.ToString(m_items, m_offset, m_count); } private bool CompareItems(IEnumerable theirs, IEqualityComparer comparer) @@ -286,7 +255,7 @@ public override bool Equals(object obj) return obj != null && ((IStructuralEquatable)this).Equals(obj, SimilarValueComparer.Default); } - public bool Equals(IFdbTuple other) + public bool Equals(ITuple other) { return !object.ReferenceEquals(other, null) && ((IStructuralEquatable)this).Equals(other, SimilarValueComparer.Default); } @@ -301,7 +270,7 @@ bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer) if (object.ReferenceEquals(this, other)) return true; if (other == null) return false; - var list = other as FdbListTuple; + var list = other as ListTuple; if (!object.ReferenceEquals(list, null)) { if (list.m_count != m_count) return false; @@ -316,7 +285,7 @@ bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer) } } - return FdbTuple.Equals(this, other, comparer); + return TupleHelpers.Equals(this, other, comparer); } int IStructuralEquatable.GetHashCode(System.Collections.IEqualityComparer comparer) @@ -332,8 +301,8 @@ int IStructuralEquatable.GetHashCode(System.Collections.IEqualityComparer compar for (int i = 0; i < m_count; i++) { var item = m_items[i + m_offset]; - - h = FdbTuple.CombineHashCodes(h, comparer.GetHashCode(item)); + + h = HashCodes.Combine(h, comparer.GetHashCode(item)); } if (canUseCache) m_hashCode = h; return h; diff --git a/FoundationDB.Client/Shared/Tuples/STuple.cs b/FoundationDB.Client/Shared/Tuples/STuple.cs new file mode 100644 index 000000000..b1ededa29 --- /dev/null +++ b/FoundationDB.Client/Shared/Tuples/STuple.cs @@ -0,0 +1,1028 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Collections.Tuples +{ + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.Globalization; + using System.Linq; + using System.Runtime.CompilerServices; + using System.Text; + using Doxense.Collections.Tuples.Encoding; + using Doxense.Diagnostics.Contracts; + using Doxense.Serialization; + using JetBrains.Annotations; + + /// Factory class for Tuples + [PublicAPI] + public readonly struct STuple : ITuple + { + //note: We cannot use 'Tuple' because it's already used by the BCL in the System namespace, and we cannot use 'Tuples' either because it is part of the namespace... + + /// Empty tuple + /// Not to be mistaken with a 1-tuple containing 'null' ! + [NotNull] + public static readonly ITuple Empty = new STuple(); + + #region Empty Tuple + + public int Count => 0; + + object IReadOnlyList.this[int index] => throw new InvalidOperationException("Tuple is empty"); + + //REVIEW: should we throw if from/to are not null, 0 or -1 ? + public ITuple this[int? from, int? to] => this; + + public TItem Get(int index) + { + throw new InvalidOperationException("Tuple is empty"); + } + + public ITuple Append(T1 value) => new STuple(value); + + public ITuple Concat(ITuple tuple) + { + Contract.NotNull(tuple, nameof(tuple)); + if (tuple.Count == 0) return this; + return tuple; + } + + public void CopyTo(object[] array, int offset) + { + //NO-OP + } + + public IEnumerator GetEnumerator() + { + yield break; + } + + System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() + { + return this.GetEnumerator(); + } + + public override string ToString() + { + return "()"; + } + + public override int GetHashCode() + { + return 0; + } + + public bool Equals(ITuple value) + { + return value != null && value.Count == 0; + } + + public override bool Equals(object obj) + { + return Equals(obj as ITuple); + } + + bool System.Collections.IStructuralEquatable.Equals(object other, System.Collections.IEqualityComparer comparer) + { + return other is ITuple tuple && tuple.Count == 0; + } + + int System.Collections.IStructuralEquatable.GetHashCode(System.Collections.IEqualityComparer comparer) + { + return 0; + } + + #endregion + + #region Creation + + /// Create a new empty tuple with 0 elements + [MethodImpl(MethodImplOptions.AggressiveInlining), DebuggerStepThrough] + public static STuple Create() + { + //note: redundant with STuple.Empty, but is here to fit nicely with the other Create overloads + return new STuple(); + } + + /// Create a new 1-tuple, holding only one item + [MethodImpl(MethodImplOptions.AggressiveInlining), DebuggerStepThrough] + public static STuple Create(T1 item1) + { + return new STuple(item1); + } + + /// Create a new 2-tuple, holding two items + [MethodImpl(MethodImplOptions.AggressiveInlining), DebuggerStepThrough] + public static STuple Create(T1 item1, T2 item2) + { + return new STuple(item1, item2); + } + + /// Create a new 3-tuple, holding three items + [MethodImpl(MethodImplOptions.AggressiveInlining), DebuggerStepThrough] + public static STuple Create(T1 item1, T2 item2, T3 item3) + { + return new STuple(item1, item2, item3); + } + + /// Create a new 4-tuple, holding four items + [MethodImpl(MethodImplOptions.AggressiveInlining), DebuggerStepThrough] + public static STuple Create(T1 item1, T2 item2, T3 item3, T4 item4) + { + return new STuple(item1, item2, item3, item4); + } + + /// Create a new 5-tuple, holding five items + [MethodImpl(MethodImplOptions.AggressiveInlining), DebuggerStepThrough] + public static STuple Create(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) + { + return new STuple(item1, item2, item3, item4, item5); + } + + /// Create a new 6-tuple, holding six items + [MethodImpl(MethodImplOptions.AggressiveInlining), DebuggerStepThrough] + public static STuple Create(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6) + { + return new STuple(item1, item2, item3, item4, item5, item6); + } + + /// Create a new N-tuple, from N items + /// Items to wrap in a tuple + /// If you already have an array of items, you should call instead. Mutating the array, would also mutate the tuple! + [NotNull] + public static ITuple Create([NotNull] params object[] items) + { + Contract.NotNull(items, nameof(items)); + + //note: this is a convenience method for people that wants to pass more than 3 args arguments, and not have to call CreateRange(object[]) method + + if (items.Length == 0) return new STuple(); + + // We don't copy the array, and rely on the fact that the array was created by the compiler and that nobody will get a reference on it. + return new ListTuple(items, 0, items.Length); + } + + /// Create a new 1-tuple, holding only one item + /// This is the non-generic equivalent of STuple.Create<object>() + [NotNull] + public static ITuple CreateBoxed(object item) + { + return new STuple(item); + } + + /// Create a new N-tuple that wraps an array of untyped items + /// If the original array is mutated, the tuple will reflect the changes! + [NotNull] + public static ITuple Wrap([NotNull] object[] items) + { + //note: this method only exists to differentiate between Create(object[]) and Create() + Contract.NotNull(items, nameof(items)); + return FromObjects(items, 0, items.Length, copy: false); + } + + /// Create a new N-tuple that wraps a section of an array of untyped items + /// If the original array is mutated, the tuple will reflect the changes! + [NotNull] + public static ITuple Wrap([NotNull] object[] items, int offset, int count) + { + return FromObjects(items, offset, count, copy: false); + } + + /// Create a new N-tuple by copying the content of an array of untyped items + [NotNull] + public static ITuple FromObjects([NotNull] object[] items) + { + //note: this method only exists to differentiate between Create(object[]) and Create() + Contract.NotNull(items, nameof(items)); + return FromObjects(items, 0, items.Length, copy: true); + } + + /// Create a new N-tuple by copying a section of an array of untyped items + [NotNull] + public static ITuple FromObjects([NotNull] object[] items, int offset, int count) + { + return FromObjects(items, offset, count, copy: true); + } + + /// Create a new N-tuple that wraps a section of an array of untyped items + /// If is true, and the original array is mutated, the tuple will reflect the changes! + [NotNull] + public static ITuple FromObjects([NotNull] object[] items, int offset, int count, bool copy) + { + Contract.NotNull(items, nameof(items)); + Contract.Positive(offset, nameof(offset)); + Contract.Positive(count, nameof(count)); + Contract.LessOrEqual(offset + count, items.Length, nameof(count), "Source array is too small"); + + if (count == 0) return STuple.Empty; + + if (copy) + { + var tmp = new object[count]; + Array.Copy(items, offset, tmp, 0, count); + return new ListTuple(tmp, 0, count); + } + else + { + // can mutate if passed a pre-allocated array: { var foo = new objec[123]; Create(foo); foo[42] = "bad"; } + return new ListTuple(items, offset, count); + } + } + + /// Create a new tuple, from an array of typed items + /// Array of items + /// Tuple with the same size as and where all the items are of type + [NotNull] + public static ITuple FromArray([NotNull] T[] items) + { + Contract.NotNull(items, nameof(items)); + + return FromArray(items, 0, items.Length); + } + + /// Create a new tuple, from a section of an array of typed items + [NotNull] + public static ITuple FromArray([NotNull] T[] items, int offset, int count) + { + Contract.NotNull(items, nameof(items)); + Contract.Positive(offset, nameof(offset)); + Contract.Positive(count, nameof(count)); + Contract.LessOrEqual(offset + count, items.Length, nameof(count), "Source array is too small"); + + switch (count) + { + case 0: return Create(); + case 1: return Create(items[offset]); + case 2: return Create(items[offset], items[offset + 1]); + case 3: return Create(items[offset], items[offset + 1], items[offset + 2]); + case 4: return Create(items[offset], items[offset + 1], items[offset + 2], items[offset + 3]); + case 5: return Create(items[offset], items[offset + 1], items[offset + 2], items[offset + 3], items[offset + 4]); + case 6: return Create(items[offset], items[offset + 1], items[offset + 2], items[offset + 3], items[offset + 4], items[offset + 5]); + default: + { // copy the items in a temp array + //TODO: we would probably benefit from having an ListTuple here! + var tmp = new object[count]; + Array.Copy(items, offset, tmp, 0, count); + return new ListTuple(tmp, 0, count); + } + } + } + + /// Create a new tuple from a sequence of typed items + [NotNull] + public static ITuple FromEnumerable([NotNull] IEnumerable items) + { + Contract.NotNull(items, nameof(items)); + + if (items is T[] arr) + { + return FromArray(arr, 0, arr.Length); + } + + // may already be a tuple (because it implements IE) + if (items is ITuple tuple) + { + return tuple; + } + + object[] tmp = items.Cast().ToArray(); + //TODO: we would probably benefit from having an ListTuple here! + return new ListTuple(tmp, 0, tmp.Length); + } + + /// Concatenates two tuples together + [NotNull] + public static ITuple Concat([NotNull] ITuple head, [NotNull] ITuple tail) + { + Contract.NotNull(head, nameof(head)); + Contract.NotNull(tail, nameof(tail)); + + return head.Count == 0 ? tail + : tail.Count == 0 ? head + : new JoinedTuple(head, tail); + } + + [Pure] + public static STuple Create(ValueTuple tuple) + { + return new STuple(tuple.Item1); + } + + [Pure] + public static STuple Create(ref ValueTuple tuple) + { + return new STuple(tuple.Item1); + } + + [Pure] + public static STuple Create((T1, T2) tuple) + { + return new STuple(tuple.Item1, tuple.Item2); + } + + [Pure] + public static STuple Create(ref (T1, T2) tuple) + { + return new STuple(tuple.Item1, tuple.Item2); + } + + [Pure] + public static STuple Create((T1, T2, T3) tuple) + { + return new STuple(tuple.Item1, tuple.Item2, tuple.Item3); + } + + [Pure] + public static STuple Create(ref (T1, T2, T3) tuple) + { + return new STuple(tuple.Item1, tuple.Item2, tuple.Item3); + } + + [Pure] + public static STuple Create((T1, T2, T3, T4) tuple) + { + return new STuple(tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4); + } + + [Pure] + public static STuple Create(ref (T1, T2, T3, T4) tuple) + { + return new STuple(tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4); + } + + [Pure] + public static STuple Create((T1, T2, T3, T4, T5) tuple) + { + return new STuple(tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4, tuple.Item5); + } + + [Pure] + public static STuple Create((T1, T2, T3, T4, T5, T6) tuple) + { + return new STuple(tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4, tuple.Item5, tuple.Item6); + } + + [Pure] + public static STuple Create(ref (T1, T2, T3, T4, T5, T6) tuple) + { + return new STuple(tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4, tuple.Item5, tuple.Item6); + } + + #endregion + + #region Internal Helpers... + + /// Determines whether the specified tuple instances are considered equal + /// Left tuple + /// Right tuple + /// True if the tuples are considered equal; otherwise, false. If both and are null, the methods returns true; + /// This method is equivalent of calling left.Equals(right), + public static bool Equals(ITuple left, ITuple right) + { + if (object.ReferenceEquals(left, null)) return object.ReferenceEquals(right, null); + return left.Equals(right); + } + + /// Determines whether the specifield tuple instances are considered similar + /// Left tuple + /// Right tuple + /// True if the tuples are considered similar; otherwise, false. If both and are null, the methods returns true; + public static bool Equivalent(ITuple left, ITuple right) + { + if (object.ReferenceEquals(left, null)) return object.ReferenceEquals(right, null); + return !object.ReferenceEquals(right, null) && TupleHelpers.Equals(left, right, TupleComparisons.Default); + } + + public static class Formatter + { + + private const string TokenNull = "null"; + private const string TokenFalse = "false"; + private const string TokenTrue = "true"; + private const string TokenDoubleQuote = "\""; + private const string TokenSingleQuote = "'"; + private const string TokenTupleEmpty = "()"; + private const string TokenTupleSep = ", "; + private const string TokenTupleClose = ")"; + private const string TokenTupleSingleClose = ",)"; + + /// Converts any object into a displayable string, for logging/debugging purpose + /// Object to stringify + /// String representation of the object + /// + /// Stringify<{REF_TYPE}>(null) => "nil" + /// Stringify<string>{string}("hello") => "\"hello\"" + /// Stringify<int>(123) => "123" + /// Stringify<double>(123.4d) => "123.4" + /// Stringify<bool>(true) => "true" + /// Stringify<char>('Z') => "'Z'" + /// Stringify<Slice>((...) => hexa decimal string ("01 23 45 67 89 AB CD EF") + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string Stringify(T item) + { + if (default(T) == null) + { + if (item == null) return TokenNull; + } + // ! + if (typeof(T) == typeof(int)) return Stringify((int) (object) item); + if (typeof(T) == typeof(uint)) return Stringify((uint) (object) item); + if (typeof(T) == typeof(long)) return Stringify((long) (object) item); + if (typeof(T) == typeof(ulong)) return Stringify((ulong) (object) item); + if (typeof(T) == typeof(bool)) return Stringify((bool) (object) item); + if (typeof(T) == typeof(char)) return Stringify((char) (object) item); + if (typeof(T) == typeof(Slice)) return Stringify((Slice)(object)item); + if (typeof(T) == typeof(double)) return Stringify((double) (object) item); + if (typeof(T) == typeof(float)) return Stringify((float) (object) item); + if (typeof(T) == typeof(Guid)) return Stringify((Guid) (object) item); + if (typeof(T) == typeof(Uuid128)) return Stringify((Uuid128) (object) item); + if (typeof(T) == typeof(Uuid64)) return Stringify((Uuid64) (object) item); + // + if (typeof(T) == typeof(string)) return Stringify((string) (object) item); + + // some other type + return StringifyInternal(item); + } + + /// Converts any object into a displayable string, for logging/debugging purpose + /// Object to stringify + /// String representation of the object + /// + /// Stringify(null) => "nil" + /// Stringify("hello") => "\"hello\"" + /// Stringify(123) => "123" + /// Stringify(123.4d) => "123.4" + /// Stringify(true) => "true" + /// Stringify('Z') => "'Z'" + /// Stringify((Slice)...) => hexa decimal string ("01 23 45 67 89 AB CD EF") + /// + [NotNull] + internal static string StringifyBoxed(object item) + { + switch (item) + { + case null: return TokenNull; + case string s: return Stringify(s); + case int i: return Stringify(i); + case long l: return Stringify(l); + case uint u: return Stringify(u); + case ulong ul: return Stringify(ul); + case bool b: return Stringify(b); + case char c: return Stringify(c); + case Slice sl: return Stringify(sl); + case double d: return Stringify(d); + case float f: return Stringify(f); + case Guid guid: return Stringify(guid); + case Uuid128 u128: return Stringify(u128); + case Uuid64 u64: return Stringify(u64); + } + + // some other type + return StringifyInternal(item); + } + + private static string StringifyInternal(object item) + { + if (item is byte[] bytes) return Stringify(bytes.AsSlice()); + if (item is Slice slice) return Stringify(slice); + if (item is ArraySegment buffer) return Stringify(buffer.AsSlice()); + //TODO: Span, ReadOnlySpan, Memory, ReadOnlyMemory, ... + if (item is IFormattable f) return f.ToString(null, CultureInfo.InvariantCulture); + + // This will probably not give a meaningful result ... :( + return item.ToString(); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + //TODO: escape the string? If it contains \0 or control chars, it can cause problems in the console or debugger output + public static string Stringify(string item) => TokenDoubleQuote + item + TokenDoubleQuote; /* "hello" */ + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string Stringify(bool item) => item ? TokenTrue : TokenFalse; + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string Stringify(int item) => StringConverters.ToString(item); + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string Stringify(uint item) => StringConverters.ToString(item); + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string Stringify(long item) => StringConverters.ToString(item); + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string Stringify(ulong item) => StringConverters.ToString(item); + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string Stringify(double item) => item.ToString("R", CultureInfo.InvariantCulture); + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string Stringify(float item) => item.ToString("R", CultureInfo.InvariantCulture); + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string Stringify(char item) => TokenSingleQuote + new string(item, 1) + TokenSingleQuote; /* 'X' */ + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string Stringify(Slice item) => item.IsNull ? "null" : '`' + Slice.Dump(item, item.Count) + '`'; + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string Stringify(byte[] item) => Stringify(item.AsSlice()); + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string Stringify(ArraySegment item) => Stringify(item.AsSlice()); + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string Stringify(Guid item) => item.ToString("B", CultureInfo.InstalledUICulture); /* {xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx} */ + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string Stringify(Uuid128 item) => item.ToString("B", CultureInfo.InstalledUICulture); /* {xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx} */ + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static string Stringify(Uuid64 item) => item.ToString("B", CultureInfo.InstalledUICulture); /* {xxxxxxxx-xxxxxxxx} */ + + /// Converts a list of object into a displaying string, for loggin/debugging purpose + /// Array containing items to stringfy + /// Start offset of the items to convert + /// Number of items to convert + /// String representation of the tuple in the form "(item1, item2, ... itemN,)" + /// ToString(STuple.Create("hello", 123, true, "world")) => "(\"hello\", 123, true, \"world\",) + [NotNull] + public static string ToString(object[] items, int offset, int count) + { + if (items == null) return String.Empty; + Contract.Requires(offset >= 0 && count >= 0); + + if (count <= 0) + { // empty tuple: "()" + return TokenTupleEmpty; + } + + var sb = new StringBuilder(); + sb.Append('('); + sb.Append(StringifyBoxed(items[offset++])); + + if (count == 1) + { // singleton tuple : "(X,)" + return sb.Append(TokenTupleSingleClose).ToString(); + } + + while (--count > 0) + { + sb.Append(TokenTupleSep /* ", " */).Append(StringifyBoxed(items[offset++])); + } + return sb.Append(TokenTupleClose /* ",)" */).ToString(); + } + + /// Converts a sequence of object into a displaying string, for loggin/debugging purpose + /// Sequence of items to stringfy + /// String representation of the tuple in the form "(item1, item2, ... itemN,)" + /// ToString(STuple.Create("hello", 123, true, "world")) => "(\"hello\", 123, true, \"world\") + [NotNull] + public static string ToString(IEnumerable items) + { + if (items == null) return string.Empty; + + if (items is object[] arr) return ToString(arr, 0, arr.Length); + + using (var enumerator = items.GetEnumerator()) + { + if (!enumerator.MoveNext()) + { // empty tuple : "()" + return TokenTupleEmpty; + } + + var sb = new StringBuilder(); + sb.Append('(').Append(StringifyBoxed(enumerator.Current)); + bool singleton = true; + while (enumerator.MoveNext()) + { + singleton = false; + sb.Append(TokenTupleSep).Append(StringifyBoxed(enumerator.Current)); + } + // add a trailing ',' for singletons + return sb.Append(singleton ? TokenTupleSingleClose : TokenTupleClose).ToString(); + } + } + + } + + /// Hleper to parse strings back into tuples + public static class Deformatter + { + + + [Pure, NotNull] + public static ITuple Parse([NotNull] string expression) + { + Contract.NotNullOrWhiteSpace(expression, nameof(expression)); + var parser = new Parser(expression.Trim()); + var tuple = parser.ParseExpression(); + if (parser.HasMore) throw new FormatException("Unexpected token after final ')' in Tuple expression."); + return tuple; + } + + /// Parse a tuple expression at the start of a string + /// String who starts with a valid Tuple expression, with optional extra characters + /// First item is the parsed tuple, and the second item is the rest of the string (or null if we consumed the whole expression) + public static void ParseNext(string expression, out ITuple tuple, out string tail) + { + Contract.NotNullOrWhiteSpace(expression, nameof(expression)); + if (string.IsNullOrWhiteSpace(expression)) + { + tuple = null; + tail = null; + return; + } + + var parser = new Parser(expression.Trim()); + tuple = parser.ParseExpression(); + string s = parser.GetTail(); + tail = string.IsNullOrWhiteSpace(s) ? null : s.Trim(); + } + + private struct Parser + { + + private const char EOF = '\xFFFF'; + + public Parser(string expression) + { + this.Expression = expression; + this.Cursor = 0; + } + + public readonly string Expression; + private int Cursor; + + public bool HasMore => this.Cursor < this.Expression.Length; + + [CanBeNull] + public string GetTail() => this.Cursor < this.Expression.Length ? this.Expression.Substring(this.Cursor) : null; + + private char ReadNext() + { + int p = this.Cursor; + string s = this.Expression; + if ((uint) p >= (uint) s.Length) return EOF; + char c = s[p]; + this.Cursor = p + 1; + return c; + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private char PeekNext() + { + int p = this.Cursor; + string s = this.Expression; + return (uint) p < (uint) s.Length ? s[p] : EOF; + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private void Advance() + { + ++this.Cursor; + } + + private bool TryReadKeyword(string keyword) + { + //IMPORTANT: 'keyword' doit être en lowercase! + int p = this.Cursor; + string s = this.Expression; + int r = keyword.Length; + if ((uint) (p + r) > (uint) s.Length) return false; // not enough + for (int i = 0; i < r; i++) + { + if (char.ToLowerInvariant(s[p + i]) != keyword[i]) return false; + } + this.Cursor = p + r; + return true; + } + + /// Parse a tuple + [Pure, NotNull] + public ITuple ParseExpression() + { + + char c = ReadNext(); + if (c != '(') + { + throw new FormatException("Invalid tuple expression. Valid tuple must start with '(' and end with ')'."); + } + + bool expectItem = true; + + var items = new List(); + while (true) + { + c = PeekNext(); + switch (c) + { + case ')': + { + //note: we accept a terminal ',' without the last item, to allow "(123,)" as a valid tuple. + if (expectItem && items.Count > 1) throw new FormatException("Missing item before last ',' in Tuple expression"); + Advance(); + return items.Count == 0 ? STuple.Empty : new ListTuple(items); + } + case EOF: + { + throw new FormatException("Missing ')' at the end of tuple expression."); + } + + case ',': + { + if (expectItem) throw new FormatException("Missing ',' before next item in Tuple expression."); + Advance(); + expectItem = true; + break; + } + + case '"': + { // string literal + string s = ReadStringLiteral(); + items.Add(s); + expectItem = false; + break; + } + case '\'': + { // single char literal + Advance(); + char x = ReadNext(); + c = ReadNext(); + if (c != '\'') throw new FormatException("Missing quote after character. Single quotes are for single characters. For strings, use double quotes!"); + items.Add(x); + expectItem = false; + break; + } + case '{': + { // Guid + Guid g = ReadGuidLiteral(); + items.Add(g); + expectItem = false; + break; + } + case '(': + { // embedded tuple! + var sub = ParseExpression(); + items.Add(sub); + expectItem = false; + break; + } + + default: + { + if (char.IsWhiteSpace(c)) + { // ignore whitespaces + Advance(); + break; + } + + if (char.IsDigit(c) || c == '-') + { // number! + items.Add(ReadNumberLiteral()); + expectItem = false; + break; + } + + if (c == 't' || c == 'T') + { // true? + if (!TryReadKeyword("true")) throw new FormatException("Unrecognized keyword in Tuple expression. Did you meant to write 'true' instead?"); + items.Add(true); + expectItem = false; + break; + } + + if (c == 'f' || c == 'F') + { // false? + if (!TryReadKeyword("false")) throw new FormatException("Unrecognized keyword in Tuple expression. Did you meant to write 'false' instead?"); + items.Add(false); + expectItem = false; + break; + } + + throw new FormatException($"Invalid token '{c}' in Tuple expression."); + } + } + } + } + + private object ReadNumberLiteral() + { + bool dec = false; + bool neg = false; + bool exp = false; + + string s = this.Expression; + int start = this.Cursor; + int end = s.Length; + int p = start; + ulong x = 0; + + char c = s[p]; + if (c == '-') + { + neg = true; + } + else if (c != '+') + { + x = (ulong) (c - '0'); + } + ++p; + + while (p < end) + { + c = s[p]; + if (char.IsDigit(c)) + { + x = checked(x * 10 + (ulong) (c - '0')); + ++p; + continue; + } + + if (c == '.') + { + if (dec) throw new FormatException("Redundant '.' in number that already has a decimal point."); + if (exp) throw new FormatException("Unexpected '.' in exponent part of number."); + dec = true; + ++p; + continue; + } + + if (c == ',' || c == ')' || char.IsWhiteSpace(c)) + { + break; + } + + if (c == 'E') + { + if (dec) throw new FormatException("Redundant 'E' in number that already has an exponent."); + exp = true; + ++p; + continue; + } + + if (c == '-' || c == '+') + { + if (!exp) throw new FormatException("Unexpected sign in number."); + ++p; + continue; + } + + throw new FormatException($"Unexpected token '{c}' while parsing number in Tuple expression."); + } + + this.Cursor = p; + + if (!dec && !exp) + { + if (neg) + { + if (x < int.MaxValue) return -((int) x); + if (x < long.MaxValue) return -((long) x); + if (x == 1UL + long.MaxValue) return long.MinValue; + throw new OverflowException("Parsed number is too large"); + } + + if (x <= int.MaxValue) return (int) x; + if (x <= long.MaxValue) return (long) x; + return x; + } + + return double.Parse(s.Substring(start, p - start), CultureInfo.InvariantCulture); + } + + private string ReadStringLiteral() + { + string s = this.Expression; + int p = this.Cursor; + int end = p + s.Length; + + // main loop is optimistic and assumes that the string will not be escaped. + // If we find the first instance of '\', then we switch to a secondary loop that uses a StringBuilder to decode each character + + char c = s[p++]; + if (c != '"') throw new FormatException("Expected '\"' token is missing in Tuple expression"); + int start = p; + + while (p < end) + { + c = s[p]; + if (c == '"') + { + this.Cursor = p + 1; + return s.Substring(start, p - start); + } + + if (c == '\\') + { // string is escaped, will need to decode the content + ++p; + goto parse_escaped_string; + } + ++p; + } + goto truncated_string; + + parse_escaped_string: + bool escape = true; + var sb = new StringBuilder(); + if (p > start + 1) sb.Append(s.Substring(start, p - start - 1)); // copy what we have parsed so far + while (p < end) + { + c = s[p]; + if (c == '"') + { + if (escape) + { + escape = false; + } + else + { + this.Cursor = p + 1; + return sb.ToString(); + } + } + else if (c == '\\') + { + if (!escape) + { // start of escape sequence + escape = true; + ++p; + continue; + } + escape = false; + } + else if (escape) + { + if (c == 't') c = '\t'; + else if (c == 'r') c = '\r'; + else if (c == 'n') c = '\n'; + //TODO: \x## and \u#### syntax! + else throw new FormatException($"Unrecognized '\\{c}' token while parsing string in Tuple expression"); + escape = false; + } + ++p; + sb.Append(c); + } + truncated_string: + throw new FormatException("Missing double quote at end of string in Tuple expression"); + } + + private Guid ReadGuidLiteral() + { + var s = this.Expression; + int p = this.Cursor; + int end = s.Length; + char c = s[p]; + if (s[p] != '{') throw new FormatException($"Unexpected token '{c}' at start of GUID in Tuple expression"); + ++p; + int start = p; + while (p < end) + { + c = s[p]; + if (c == '}') + { + string lit = s.Substring(start, p - start); + // Shortcut: "{} or {0} means "00000000-0000-0000-0000-000000000000" + Guid g = lit == "" || lit == "0" ? Guid.Empty : Guid.Parse(lit); + this.Cursor = p + 1; + return g; + } + ++p; + } + + throw new FormatException("Invalid GUID in Tuple expression."); + } + + } + } + + #endregion + } +} diff --git a/FoundationDB.Client/Shared/Tuples/STuple`1.cs b/FoundationDB.Client/Shared/Tuples/STuple`1.cs new file mode 100644 index 000000000..97baee083 --- /dev/null +++ b/FoundationDB.Client/Shared/Tuples/STuple`1.cs @@ -0,0 +1,353 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Collections.Tuples +{ + using System; + using System.Collections; + using System.Collections.Generic; + using System.ComponentModel; + using System.Diagnostics; + using System.Runtime.CompilerServices; + using Doxense.Collections.Tuples.Encoding; + using Doxense.Diagnostics.Contracts; + using Doxense.Runtime.Converters; + using JetBrains.Annotations; + + /// Tuple that holds only one item + /// Type of the item + [ImmutableObject(true), DebuggerDisplay("{ToString(),nq}")] + public readonly struct STuple : ITuple, IEquatable>, IEquatable> + { + // This is mostly used by code that create a lot of temporary singleton, to reduce the pressure on the Garbage Collector by allocating them on the stack. + // Please note that if you return an STuple as an ITuple, it will be boxed by the CLR and all memory gains will be lost + + /// First and only item in the tuple + public readonly T1 Item1; + + [DebuggerStepThrough] + public STuple(T1 item1) + { + this.Item1 = item1; + } + + public int Count => 1; + + public object this[int index] + { + get + { + if (index > 0 || index < -1) return TupleHelpers.FailIndexOutOfRange(index, 1); + return this.Item1; + } + } + + public ITuple this[int? fromIncluded, int? toExcluded] + { + get { return TupleHelpers.Splice(this, fromIncluded, toExcluded); } + } + + /// Return the typed value of an item of the tuple, given its position + /// Expected type of the item + /// Position of the item (if negative, means relative from the end) + /// Value of the item at position , adapted into type . + public TItem Get(int index) + { + if (index > 0 || index < -1) return TupleHelpers.FailIndexOutOfRange(index, 1); + return TypeConverters.Convert(this.Item1); + } + + ITuple ITuple.Append(T2 value) + { + return new STuple(this.Item1, value); + } + + /// Appends a tuple as a single new item at the end of the current tuple. + /// Tuple that will be added as an embedded item + /// New tuple with one extra item + /// If you want to append the *items* of , and not the tuple itself, please call ! + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public STuple Append(T2 value) + { + return new STuple(this.Item1, value); + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public ITuple Concat(ITuple tuple) + { + return STuple.Concat(this, tuple); + } + + /// Copy the item of this singleton into an array at the specified offset + public void CopyTo(object[] array, int offset) + { + array[offset] = this.Item1; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void Deconstruct(out T1 item1) + { + item1 = this.Item1; + } + + + /// Execute a lambda Action with the content of this tuple + /// Action that will be passed the content of this tuple as parameters + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void With([NotNull] Action lambda) + { + lambda(this.Item1); + } + + /// Execute a lambda Function with the content of this tuple + /// Action that will be passed the content of this tuple as parameters + /// Result of calling with the items of this tuple + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public TItem With([NotNull] Func lambda) + { + return lambda(this.Item1); + } + + public IEnumerator GetEnumerator() + { + yield return this.Item1; + } + + System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() + { + return this.GetEnumerator(); + } + + public override string ToString() + { + // singleton tuples end with a trailing ',' + return "(" + STuple.Formatter.Stringify(this.Item1) + ",)"; + } + + public override bool Equals(object obj) + { + return obj != null && ((IStructuralEquatable)this).Equals(obj, SimilarValueComparer.Default); + } + + public bool Equals(ITuple other) + { + return other != null && ((IStructuralEquatable)this).Equals(other, SimilarValueComparer.Default); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public bool Equals(STuple other) + { + return SimilarValueComparer.Default.Equals(this.Item1, other.Item1); + } + + public override int GetHashCode() + { + return ((IStructuralEquatable)this).GetHashCode(SimilarValueComparer.Default); + } + + public static bool operator ==(STuple left, STuple right) + { + return SimilarValueComparer.Default.Equals(left.Item1, right.Item1); + } + + public static bool operator !=(STuple left, STuple right) + { + return !SimilarValueComparer.Default.Equals(left.Item1, right.Item1); + } + + bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer) + { + if (other == null) return false; + if (other is STuple stuple) + { + return comparer.Equals(this.Item1, stuple.Item1); + } + if (other is ValueTuple vtuple) + { + return comparer.Equals(this.Item1, vtuple.Item1); + } + return TupleHelpers.Equals(this, other, comparer); + } + + int IStructuralEquatable.GetHashCode(IEqualityComparer comparer) + { + return comparer.GetHashCode(this.Item1); + } + + [Pure] + public static implicit operator STuple([NotNull] Tuple t) + { + Contract.NotNull(t, nameof(t)); + return new STuple(t.Item1); + } + + [Pure, NotNull] + public static explicit operator Tuple(STuple t) + { + return new Tuple(t.Item1); + } + + public void Fill(ref ValueTuple t) + { + t.Item1 = this.Item1; + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [Pure] + public STuple Concat(ValueTuple tuple) + { + return new STuple(this.Item1, tuple.Item1); + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [Pure] + public STuple Concat((T2, T3) tuple) + { + return new STuple(this.Item1, tuple.Item1, tuple.Item2); + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [Pure] + public STuple Concat((T2, T3, T4) tuple) + { + return new STuple(this.Item1, tuple.Item1, tuple.Item2, tuple.Item3); + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [Pure] + public STuple Concat((T2, T3, T4, T5) tuple) + { + return new STuple(this.Item1, tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4); + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [Pure] + public STuple Concat((T2, T3, T4, T5, T6) tuple) + { + return new STuple(this.Item1, tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4, tuple.Item5); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public ValueTuple ToValueTuple() + { + return new ValueTuple(this.Item1); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator STuple(ValueTuple t) + { + return new STuple(t.Item1); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator ValueTuple(STuple t) + { + return new ValueTuple(t.Item1); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + bool IEquatable>.Equals(ValueTuple other) + { + return SimilarValueComparer.Default.Equals(this.Item1, other.Item1); + } + + public static bool operator ==(STuple left, ValueTuple right) + { + return SimilarValueComparer.Default.Equals(left.Item1, right.Item1); + } + + public static bool operator ==(ValueTuple left, STuple right) + { + return SimilarValueComparer.Default.Equals(left.Item1, right.Item1); + } + + public static bool operator !=(STuple left, ValueTuple right) + { + return !SimilarValueComparer.Default.Equals(left.Item1, right.Item1); + } + + public static bool operator !=(ValueTuple left, STuple right) + { + return !SimilarValueComparer.Default.Equals(left.Item1, right.Item1); + } + + public sealed class Comparer : IComparer> + { + public static Comparer Default { [NotNull] get; } = new Comparer(); + + private static readonly Comparer Comparer1 = Comparer.Default; + + private Comparer() { } + + public int Compare(STuple x, STuple y) + { + return Comparer1.Compare(x.Item1, y.Item1); + } + } + + public sealed class EqualityComparer : IEqualityComparer> + { + public static EqualityComparer Default { [NotNull] get; } = new EqualityComparer(); + + private static readonly EqualityComparer Comparer1 = EqualityComparer.Default; + + private EqualityComparer() { } + + public bool Equals(STuple x, STuple y) + { + return Comparer1.Equals(x.Item1, y.Item1); + } + + public int GetHashCode(STuple obj) + { + return Comparer1.GetHashCode(obj.Item1); + } + } + + } + +} diff --git a/FoundationDB.Client/Shared/Tuples/STuple`2.cs b/FoundationDB.Client/Shared/Tuples/STuple`2.cs new file mode 100644 index 000000000..c8ce9e20d --- /dev/null +++ b/FoundationDB.Client/Shared/Tuples/STuple`2.cs @@ -0,0 +1,398 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Collections.Tuples +{ + using System; + using System.Collections; + using System.Collections.Generic; + using System.ComponentModel; + using System.Diagnostics; + using System.Runtime.CompilerServices; + using Doxense.Collections.Tuples.Encoding; + using Doxense.Diagnostics.Contracts; + using Doxense.Runtime.Converters; + using JetBrains.Annotations; + + /// Tuple that holds a pair of items + /// Type of the first item + /// Type of the second item + [ImmutableObject(true), DebuggerDisplay("{ToString(),nq}")] + public readonly struct STuple : ITuple, IEquatable>, IEquatable<(T1, T2)> + { + // This is mostly used by code that create a lot of temporary pair, to reduce the pressure on the Garbage Collector by allocating them on the stack. + // Please note that if you return an STuple as an ITuple, it will be boxed by the CLR and all memory gains will be lost + + /// First element of the pair + public readonly T1 Item1; + /// Seconde element of the pair + public readonly T2 Item2; + + [DebuggerStepThrough] + public STuple(T1 item1, T2 item2) + { + this.Item1 = item1; + this.Item2 = item2; + } + + public int Count => 2; + + public object this[int index] + { + get + { + switch (index) + { + case 0: case -2: return this.Item1; + case 1: case -1: return this.Item2; + default: return TupleHelpers.FailIndexOutOfRange(index, 2); + } + } + } + + public ITuple this[int? fromIncluded, int? toExcluded] + { + [MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return TupleHelpers.Splice(this, fromIncluded, toExcluded); } + } + + /// Return the typed value of an item of the tuple, given its position + /// Expected type of the item + /// Position of the item (if negative, means relative from the end) + /// Value of the item at position , adapted into type . + public TItem Get(int index) + { + switch(index) + { + case 0: case -2: return TypeConverters.Convert(this.Item1); + case 1: case -1: return TypeConverters.Convert(this.Item2); + default: return TupleHelpers.FailIndexOutOfRange(index, 2); + } + } + + /// Return the value of the last item in the tuple + public T2 Last + { + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return this.Item2; } + } + + /// Return a tuple without the first item + public STuple Tail + { + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return new STuple(this.Item2); } + } + + ITuple ITuple.Append(T3 value) + { + return new STuple(this.Item1, this.Item2, value); + } + + /// Appends a single new item at the end of the current tuple. + /// Value that will be added as an embedded item + /// New tuple with one extra item + /// If is a tuple, and you want to append the *items* of this tuple, and not the tuple itself, please call ! + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public STuple Append(T3 value) + { + return new STuple(this.Item1, this.Item2, value); + // Note: By create a STuple we risk an explosion of the number of combinations of Ts which could potentially cause problems at runtime (too many variants of the same generic types). + // ex: if we have N possible types, then there could be N^3 possible variants of STuple that the JIT has to deal with. + // => if this starts becoming a problem, then we should return a list tuple ! + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public ITuple Concat(ITuple tuple) + { + return STuple.Concat(this, tuple); + } + + /// Copy both items of this pair into an array at the specified offset + public void CopyTo(object[] array, int offset) + { + array[offset] = this.Item1; + array[offset + 1] = this.Item2; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void Deconstruct(out T1 item1, out T2 item2) + { + item1 = this.Item1; + item2 = this.Item2; + } + + /// Execute a lambda Action with the content of this tuple + /// Action that will be passed the content of this tuple as parameters + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void With([NotNull] Action lambda) + { + lambda(this.Item1, this.Item2); + } + + /// Execute a lambda Function with the content of this tuple + /// Action that will be passed the content of this tuple as parameters + /// Result of calling with the items of this tuple + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public TItem With([NotNull] Func lambda) + { + return lambda(this.Item1, this.Item2); + } + + public IEnumerator GetEnumerator() + { + yield return this.Item1; + yield return this.Item2; + } + + System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() + { + return this.GetEnumerator(); + } + + public override string ToString() + { + return string.Concat( + "(", + STuple.Formatter.Stringify(this.Item1), ", ", + STuple.Formatter.Stringify(this.Item2), + ")" + ); + } + + public override bool Equals(object obj) + { + return obj != null && ((IStructuralEquatable)this).Equals(obj, SimilarValueComparer.Default); + } + + public bool Equals(ITuple other) + { + return other != null && ((IStructuralEquatable)this).Equals(other, SimilarValueComparer.Default); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public bool Equals(STuple other) + { + return SimilarValueComparer.Default.Equals(this.Item1, other.Item1) + && SimilarValueComparer.Default.Equals(this.Item2, other.Item2); + } + + public override int GetHashCode() + { + return ((IStructuralEquatable)this).GetHashCode(SimilarValueComparer.Default); + } + + public static bool operator ==(STuple left, STuple right) + { + return SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + && SimilarValueComparer.Default.Equals(left.Item2, right.Item2); + } + + public static bool operator !=(STuple left, STuple right) + { + return !SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + || !SimilarValueComparer.Default.Equals(left.Item2, right.Item2); + } + + bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer) + { + if (other == null) return false; + if (other is STuple stuple) + { + return comparer.Equals(this.Item1, stuple.Item1) + && comparer.Equals(this.Item2, stuple.Item2); + } + if (other is ValueTuple vtuple) + { + return comparer.Equals(this.Item1, vtuple.Item1) + && comparer.Equals(this.Item2, vtuple.Item2); + } + return TupleHelpers.Equals(this, other, comparer); + } + + int IStructuralEquatable.GetHashCode(IEqualityComparer comparer) + { + return HashCodes.Combine( + comparer.GetHashCode(this.Item1), + comparer.GetHashCode(this.Item2) + ); + } + + [Pure] + public static implicit operator STuple([NotNull] Tuple t) + { + Contract.NotNull(t, nameof(t)); + return new STuple(t.Item1, t.Item2); + } + + [Pure, NotNull] + public static explicit operator Tuple(STuple t) + { + return new Tuple(t.Item1, t.Item2); + } + + public void Fill(ref (T1, T2) t) + { + t.Item1 = this.Item1; + t.Item2 = this.Item2; + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [Pure] + public STuple Concat(ValueTuple tuple) + { + return new STuple(this.Item1, this.Item2, tuple.Item1); + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [Pure] + public STuple Concat((T3, T4) tuple) + { + return new STuple(this.Item1, this.Item2, tuple.Item1, tuple.Item2); + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [Pure] + public STuple Concat((T3, T4, T5) tuple) + { + return new STuple(this.Item1, this.Item2, tuple.Item1, tuple.Item2, tuple.Item3); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public (T1, T2) ToValueTuple() + { + return (this.Item1, this.Item2); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator STuple((T1, T2) t) + { + return new STuple(t.Item1, t.Item2); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator (T1, T2)(STuple t) + { + return (t.Item1, t.Item2); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + bool IEquatable<(T1, T2)>.Equals((T1, T2) other) + { + return SimilarValueComparer.Default.Equals(this.Item1, other.Item1) + && SimilarValueComparer.Default.Equals(this.Item2, other.Item2); + } + + public static bool operator ==(STuple left, (T1, T2) right) + { + return SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + && SimilarValueComparer.Default.Equals(left.Item2, right.Item2); + } + + public static bool operator ==((T1, T2) left, STuple right) + { + return SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + && SimilarValueComparer.Default.Equals(left.Item2, right.Item2); + } + + public static bool operator !=(STuple left, (T1, T2) right) + { + return !SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + || !SimilarValueComparer.Default.Equals(left.Item2, right.Item2); + } + + public static bool operator !=((T1, T2) left, STuple right) + { + return !SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + || !SimilarValueComparer.Default.Equals(left.Item2, right.Item2); + } + + public sealed class Comparer : IComparer> + { + + public static Comparer Default { [NotNull] get; } = new Comparer(); + + private static readonly Comparer Comparer1 = Comparer.Default; + private static readonly Comparer Comparer2 = Comparer.Default; + + private Comparer() { } + + public int Compare(STuple x, STuple y) + { + int cmp = Comparer1.Compare(x.Item1, y.Item1); + if (cmp == 0) cmp = Comparer2.Compare(x.Item2, y.Item2); + return cmp; + } + + } + + public sealed class EqualityComparer : IEqualityComparer> + { + + public static EqualityComparer Default { [NotNull] get; } = new EqualityComparer(); + + private static readonly EqualityComparer Comparer1 = EqualityComparer.Default; + private static readonly EqualityComparer Comparer2 = EqualityComparer.Default; + + private EqualityComparer() { } + + public bool Equals(STuple x, STuple y) + { + return Comparer1.Equals(x.Item1, y.Item1) + && Comparer2.Equals(x.Item2, y.Item2); + } + + public int GetHashCode(STuple obj) + { + return HashCodes.Combine( + Comparer1.GetHashCode(obj.Item1), + Comparer2.GetHashCode(obj.Item2) + ); + } + + } + + } + +} diff --git a/FoundationDB.Client/Shared/Tuples/STuple`3.cs b/FoundationDB.Client/Shared/Tuples/STuple`3.cs new file mode 100644 index 000000000..edcff3d8c --- /dev/null +++ b/FoundationDB.Client/Shared/Tuples/STuple`3.cs @@ -0,0 +1,437 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Collections.Tuples +{ + using System; + using System.Collections; + using System.Collections.Generic; + using System.ComponentModel; + using System.Diagnostics; + using System.Runtime.CompilerServices; + using Doxense.Diagnostics.Contracts; + using Doxense.Runtime.Converters; + using JetBrains.Annotations; + + /// Tuple that can hold three items + /// Type of the first item + /// Type of the second item + /// Type of the third item + [ImmutableObject(true), DebuggerDisplay("{ToString(),nq}")] + public readonly struct STuple : ITuple, IEquatable>, IEquatable<(T1, T2, T3)> + { + // This is mostly used by code that create a lot of temporary triplet, to reduce the pressure on the Garbage Collector by allocating them on the stack. + // Please note that if you return an STuple as an ITuple, it will be boxed by the CLR and all memory gains will be lost + + /// First element of the triplet + public readonly T1 Item1; + /// Second element of the triplet + public readonly T2 Item2; + /// Third and last elemnt of the triplet + public readonly T3 Item3; + + [DebuggerStepThrough] + public STuple(T1 item1, T2 item2, T3 item3) + { + this.Item1 = item1; + this.Item2 = item2; + this.Item3 = item3; + } + + public int Count => 3; + + public object this[int index] + { + get + { + switch (index) + { + case 0: case -3: return this.Item1; + case 1: case -2: return this.Item2; + case 2: case -1: return this.Item3; + default: return TupleHelpers.FailIndexOutOfRange(index, 3); + } + } + } + + public ITuple this[int? fromIncluded, int? toExcluded] + { + [MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return TupleHelpers.Splice(this, fromIncluded, toExcluded); } + } + + /// Return the typed value of an item of the tuple, given its position + /// Expected type of the item + /// Position of the item (if negative, means relative from the end) + /// Value of the item at position , adapted into type . + public TItem Get(int index) + { + switch(index) + { + case 0: case -3: return TypeConverters.Convert(this.Item1); + case 1: case -2: return TypeConverters.Convert(this.Item2); + case 2: case -1: return TypeConverters.Convert(this.Item3); + default: return TupleHelpers.FailIndexOutOfRange(index, 3); + } + } + + /// Return the value of the last item in the tuple + public T3 Last + { + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return this.Item3; } + } + + /// Return a tuple without the first item + public STuple Tail + { + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return new STuple(this.Item2, this.Item3); } + } + + ITuple ITuple.Append(T4 value) + { + // here, the caller doesn't care about the exact tuple type, so we simply return a boxed List Tuple. + return new ListTuple(new object[4] { this.Item1, this.Item2, this.Item3, value }, 0, 4); + } + + /// Appends a single new item at the end of the current tuple. + /// Value that will be added as an embedded item + /// New tuple with one extra item + /// If is a tuple, and you want to append the *items* of this tuple, and not the tuple itself, please call ! + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public STuple Append(T4 value) + { + // Here, the caller was explicitly using the STuple struct so probably care about memory footprint, so we keep returning a struct + return new STuple(this.Item1, this.Item2, this.Item3, value); + + // Note: By create a STuple we risk an explosion of the number of combinations of Ts which could potentially cause problems at runtime (too many variants of the same generic types). + // ex: if we have N possible types, then there could be N^4 possible variants of STuple that the JIT has to deal with. + // => if this starts becoming a problem, then we should return a list tuple ! + } + + /// Copy all the items of this tuple into an array at the specified offset + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public STuple Append(ITuple value) + { + //note: this override exists to prevent the explosion of tuple types such as STuple, STuple, STuple> ! + return new STuple(this.Item1, this.Item2, this.Item3, value); + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public ITuple Concat(ITuple tuple) + { + return STuple.Concat(this, tuple); + } + + public void CopyTo(object[] array, int offset) + { + array[offset] = this.Item1; + array[offset + 1] = this.Item2; + array[offset + 2] = this.Item3; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void Deconstruct(out T1 item1, out T2 item2, out T3 item3) + { + item1 = this.Item1; + item2 = this.Item2; + item3 = this.Item3; + } + + /// Execute a lambda Action with the content of this tuple + /// Action that will be passed the content of this tuple as parameters + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void With([NotNull] Action lambda) + { + lambda(this.Item1, this.Item2, this.Item3); + } + + /// Execute a lambda Function with the content of this tuple + /// Action that will be passed the content of this tuple as parameters + /// Result of calling with the items of this tuple + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public TItem With([NotNull] Func lambda) + { + return lambda(this.Item1, this.Item2, this.Item3); + } + + public IEnumerator GetEnumerator() + { + yield return this.Item1; + yield return this.Item2; + yield return this.Item3; + } + + System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() + { + return this.GetEnumerator(); + } + + public override string ToString() + { + return string.Concat( + "(", + STuple.Formatter.Stringify(this.Item1), ", ", + STuple.Formatter.Stringify(this.Item2), ", ", + STuple.Formatter.Stringify(this.Item3), + ")" + ); + } + + public override bool Equals(object obj) + { + return obj != null && ((IStructuralEquatable)this).Equals(obj, SimilarValueComparer.Default); + } + + public bool Equals(ITuple other) + { + return other != null && ((IStructuralEquatable)this).Equals(other, SimilarValueComparer.Default); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public bool Equals(STuple other) + { + return SimilarValueComparer.Default.Equals(this.Item1, other.Item1) + && SimilarValueComparer.Default.Equals(this.Item2, other.Item2) + && SimilarValueComparer.Default.Equals(this.Item3, other.Item3); + } + + public override int GetHashCode() + { + return ((IStructuralEquatable)this).GetHashCode(SimilarValueComparer.Default); + } + + public static bool operator ==(STuple left, STuple right) + { + var comparer = SimilarValueComparer.Default; + return comparer.Equals(left.Item1, right.Item1) + && comparer.Equals(left.Item2, right.Item2) + && comparer.Equals(left.Item3, right.Item3); + } + + public static bool operator !=(STuple left, STuple right) + { + var comparer = SimilarValueComparer.Default; + return !comparer.Equals(left.Item1, right.Item1) + || !comparer.Equals(left.Item2, right.Item2) + || !comparer.Equals(left.Item3, right.Item3); + } + + bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer) + { + if (other == null) return false; + if (other is STuple stuple) + { + return comparer.Equals(this.Item1, stuple.Item1) + && comparer.Equals(this.Item2, stuple.Item2) + && comparer.Equals(this.Item3, stuple.Item3); + } + if (other is ValueTuple vtuple) + { + return comparer.Equals(this.Item1, vtuple.Item1) + && comparer.Equals(this.Item2, vtuple.Item2) + && comparer.Equals(this.Item3, vtuple.Item3); + } + return TupleHelpers.Equals(this, other, comparer); + } + + int IStructuralEquatable.GetHashCode(IEqualityComparer comparer) + { + return HashCodes.Combine( + comparer.GetHashCode(this.Item1), + comparer.GetHashCode(this.Item2), + comparer.GetHashCode(this.Item3) + ); + } + + [Pure] + public static implicit operator STuple([NotNull] Tuple t) + { + Contract.NotNull(t, nameof(t)); + return new STuple(t.Item1, t.Item2, t.Item3); + } + + [Pure, NotNull] + public static explicit operator Tuple(STuple t) + { + return new Tuple(t.Item1, t.Item2, t.Item3); + } + + public void Fill(ref (T1, T2, T3) t) + { + t.Item1 = this.Item1; + t.Item2 = this.Item2; + t.Item3 = this.Item3; + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [Pure] + public STuple Concat(ValueTuple tuple) + { + return new STuple(this.Item1, this.Item2, this.Item3, tuple.Item1); + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [Pure] + public STuple Concat(ValueTuple tuple) + { + return new STuple(this.Item1, this.Item2, this.Item3, tuple.Item1, tuple.Item2); + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [Pure] + public STuple Concat((T4, T5, T6) tuple) + { + return new STuple(this.Item1, this.Item2, this.Item3, tuple.Item1, tuple.Item2, tuple.Item3); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public (T1, T2, T3) ToValueTuple() + { + return (this.Item1, this.Item2, this.Item3); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator STuple((T1, T2, T3) t) + { + return new STuple(t.Item1, t.Item2, t.Item3); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator (T1, T2, T3) (STuple t) + { + return (t.Item1, t.Item2, t.Item3); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + bool IEquatable<(T1, T2, T3)>.Equals((T1, T2, T3) other) + { + return SimilarValueComparer.Default.Equals(this.Item1, this.Item1) + && SimilarValueComparer.Default.Equals(this.Item2, this.Item2) + && SimilarValueComparer.Default.Equals(this.Item3, this.Item3); + } + + public static bool operator ==(STuple left, (T1, T2, T3) right) + { + return SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + && SimilarValueComparer.Default.Equals(left.Item2, right.Item2) + && SimilarValueComparer.Default.Equals(left.Item3, right.Item3); + } + + public static bool operator ==((T1, T2, T3) left, STuple right) + { + return SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + && SimilarValueComparer.Default.Equals(left.Item2, right.Item2) + && SimilarValueComparer.Default.Equals(left.Item3, right.Item3); + } + + public static bool operator !=(STuple left, (T1, T2, T3) right) + { + return !SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + || !SimilarValueComparer.Default.Equals(left.Item2, right.Item2) + || !SimilarValueComparer.Default.Equals(left.Item3, right.Item3); + } + + public static bool operator !=((T1, T2, T3) left, STuple right) + { + return !SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + || !SimilarValueComparer.Default.Equals(left.Item2, right.Item2) + || !SimilarValueComparer.Default.Equals(left.Item3, right.Item3); + } + + public sealed class Comparer : IComparer> + { + + public static Comparer Default { [NotNull] get; } = new Comparer(); + + private static readonly Comparer Comparer1 = Comparer.Default; + private static readonly Comparer Comparer2 = Comparer.Default; + private static readonly Comparer Comparer3 = Comparer.Default; + + private Comparer() { } + + public int Compare(STuple x, STuple y) + { + int cmp = Comparer1.Compare(x.Item1, y.Item1); + if (cmp == 0) cmp = Comparer2.Compare(x.Item2, y.Item2); + if (cmp == 0) cmp = Comparer3.Compare(x.Item3, y.Item3); + return cmp; + } + + } + + public sealed class EqualityComparer : IEqualityComparer> + { + + public static EqualityComparer Default { [NotNull] get; } = new EqualityComparer(); + + private static readonly EqualityComparer Comparer1 = EqualityComparer.Default; + private static readonly EqualityComparer Comparer2 = EqualityComparer.Default; + private static readonly EqualityComparer Comparer3 = EqualityComparer.Default; + + private EqualityComparer() { } + + public bool Equals(STuple x, STuple y) + { + return Comparer1.Equals(x.Item1, y.Item1) + && Comparer2.Equals(x.Item2, y.Item2) + && Comparer3.Equals(x.Item3, y.Item3); + } + + public int GetHashCode(STuple obj) + { + return HashCodes.Combine( + Comparer1.GetHashCode(obj.Item1), + Comparer2.GetHashCode(obj.Item2), + Comparer3.GetHashCode(obj.Item3) + ); + } + } + + } + +} diff --git a/FoundationDB.Client/Shared/Tuples/STuple`4.cs b/FoundationDB.Client/Shared/Tuples/STuple`4.cs new file mode 100644 index 000000000..31ec0fac8 --- /dev/null +++ b/FoundationDB.Client/Shared/Tuples/STuple`4.cs @@ -0,0 +1,448 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Collections.Tuples +{ + using System; + using System.Collections; + using System.Collections.Generic; + using System.ComponentModel; + using System.Diagnostics; + using System.Runtime.CompilerServices; + using Doxense.Collections.Tuples.Encoding; + using Doxense.Diagnostics.Contracts; + using Doxense.Runtime.Converters; + using JetBrains.Annotations; + + /// Tuple that can hold four items + /// Type of the first item + /// Type of the second item + /// Type of the third item + /// Type of the fourth item + [ImmutableObject(true), DebuggerDisplay("{ToString(),nq}")] + public readonly struct STuple : ITuple, IEquatable>, IEquatable<(T1, T2, T3, T4)> + { + // This is mostly used by code that create a lot of temporary quartets, to reduce the pressure on the Garbage Collector by allocating them on the stack. + // Please note that if you return an STuple as an ITuple, it will be boxed by the CLR and all memory gains will be lost + + /// First element of the quartet + public readonly T1 Item1; + /// Second element of the quartet + public readonly T2 Item2; + /// Third element of the quartet + public readonly T3 Item3; + /// Fourth and last element of the quartet + public readonly T4 Item4; + + /// Create a tuple containing for items + [DebuggerStepThrough] + public STuple(T1 item1, T2 item2, T3 item3, T4 item4) + { + this.Item1 = item1; + this.Item2 = item2; + this.Item3 = item3; + this.Item4 = item4; + } + + /// Number of items in this tuple + public int Count => 4; + + /// Return the Nth item in this tuple + public object this[int index] + { + get + { + switch (index) + { + case 0: case -4: return this.Item1; + case 1: case -3: return this.Item2; + case 2: case -2: return this.Item3; + case 3: case -1: return this.Item4; + default: return TupleHelpers.FailIndexOutOfRange(index, 4); + } + } + } + + public ITuple this[int? fromIncluded, int? toExcluded] + { + [MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return TupleHelpers.Splice(this, fromIncluded, toExcluded); } + } + + /// Return the typed value of an item of the tuple, given its position + /// Expected type of the item + /// Position of the item (if negative, means relative from the end) + /// Value of the item at position , adapted into type . + public TItem Get(int index) + { + switch(index) + { + case 0: case -4: return TypeConverters.Convert(this.Item1); + case 1: case -3: return TypeConverters.Convert(this.Item2); + case 2: case -2: return TypeConverters.Convert(this.Item3); + case 3: case -1: return TypeConverters.Convert(this.Item4); + default: return TupleHelpers.FailIndexOutOfRange(index, 4); + } + } + + /// Return the value of the last item in the tuple + public T4 Last + { + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return this.Item4; } + } + + /// Return a tuple without the first item + public STuple Tail + { + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return new STuple(this.Item2, this.Item3, this.Item4); } + } + + ITuple ITuple.Append(T5 value) + { + // the caller doesn't care about the return type, so just box everything into a list tuple + return new ListTuple(new object[5] { this.Item1, this.Item2, this.Item3, this.Item4, value }, 0, 5); + } + + /// Appends a single new item at the end of the current tuple. + /// Value that will be added as an embedded item + /// New tuple with one extra item + /// If is a tuple, and you want to append the *items* of this tuple, and not the tuple itself, please call ! + [NotNull, Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public LinkedTuple Append(T5 value) + { + // the caller probably cares about the return type, since it is using a struct, but whatever tuple type we use will end up boxing this tuple on the heap, and we will loose type information. + // but, by returning a LinkedTuple, the tuple will still remember the exact type, and efficiently serializer/convert the values (without having to guess the type) + return new LinkedTuple(this, value); + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public ITuple Concat(ITuple tuple) + { + return STuple.Concat(this, tuple); + } + + /// Copy all the items of this tuple into an array at the specified offset + public void CopyTo(object[] array, int offset) + { + array[offset] = this.Item1; + array[offset + 1] = this.Item2; + array[offset + 2] = this.Item3; + array[offset + 3] = this.Item4; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void Deconstruct(out T1 item1, out T2 item2, out T3 item3, out T4 item4) + { + item1 = this.Item1; + item2 = this.Item2; + item3 = this.Item3; + item4 = this.Item4; + } + + /// Execute a lambda Action with the content of this tuple + /// Action that will be passed the content of this tuple as parameters + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void With([NotNull] Action lambda) + { + lambda(this.Item1, this.Item2, this.Item3, this.Item4); + } + + /// Execute a lambda Function with the content of this tuple + /// Action that will be passed the content of this tuple as parameters + /// Result of calling with the items of this tuple + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public TItem With([NotNull] Func lambda) + { + return lambda(this.Item1, this.Item2, this.Item3, this.Item4); + } + + public IEnumerator GetEnumerator() + { + yield return this.Item1; + yield return this.Item2; + yield return this.Item3; + yield return this.Item4; + } + + System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() + { + return this.GetEnumerator(); + } + + public override string ToString() + { + return string.Concat( + "(", + STuple.Formatter.Stringify(this.Item1), ", ", + STuple.Formatter.Stringify(this.Item2), ", ", + STuple.Formatter.Stringify(this.Item3), ", ", + STuple.Formatter.Stringify(this.Item4), + ")" + ); + } + + public override bool Equals(object obj) + { + return obj != null && ((IStructuralEquatable)this).Equals(obj, SimilarValueComparer.Default); + } + + public bool Equals(ITuple other) + { + return other != null && ((IStructuralEquatable)this).Equals(other, SimilarValueComparer.Default); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public bool Equals(STuple other) + { + return SimilarValueComparer.Default.Equals(this.Item1, other.Item1) + && SimilarValueComparer.Default.Equals(this.Item2, other.Item2) + && SimilarValueComparer.Default.Equals(this.Item3, other.Item3) + && SimilarValueComparer.Default.Equals(this.Item4, other.Item4); + } + + public override int GetHashCode() + { + return ((IStructuralEquatable)this).GetHashCode(SimilarValueComparer.Default); + } + + public static bool operator ==(STuple left, STuple right) + { + var comparer = SimilarValueComparer.Default; + return comparer.Equals(left.Item1, right.Item1) + && comparer.Equals(left.Item2, right.Item2) + && comparer.Equals(left.Item3, right.Item3) + && comparer.Equals(left.Item4, right.Item4); + } + + public static bool operator !=(STuple left, STuple right) + { + var comparer = SimilarValueComparer.Default; + return !comparer.Equals(left.Item1, right.Item1) + || !comparer.Equals(left.Item2, right.Item2) + || !comparer.Equals(left.Item3, right.Item3) + || !comparer.Equals(left.Item4, right.Item4); + } + + bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer) + { + if (other == null) return false; + if (other is STuple stuple) + { + return comparer.Equals(this.Item1, stuple.Item1) + && comparer.Equals(this.Item2, stuple.Item2) + && comparer.Equals(this.Item3, stuple.Item3) + && comparer.Equals(this.Item4, stuple.Item4); + } + if (other is ValueTuple vtuple) + { + return comparer.Equals(this.Item1, vtuple.Item1) + && comparer.Equals(this.Item2, vtuple.Item2) + && comparer.Equals(this.Item3, vtuple.Item3) + && comparer.Equals(this.Item4, vtuple.Item4); + } + return TupleHelpers.Equals(this, other, comparer); + } + + int IStructuralEquatable.GetHashCode(IEqualityComparer comparer) + { + return HashCodes.Combine( + comparer.GetHashCode(this.Item1), + comparer.GetHashCode(this.Item2), + comparer.GetHashCode(this.Item3), + comparer.GetHashCode(this.Item4) + ); + } + + [Pure] + public static implicit operator STuple([NotNull] Tuple t) + { + Contract.NotNull(t, nameof(t)); + return new STuple(t.Item1, t.Item2, t.Item3, t.Item4); + } + + [Pure, NotNull] + public static explicit operator Tuple(STuple t) + { + return new Tuple(t.Item1, t.Item2, t.Item3, t.Item4); + } + + public void Fill(ref (T1, T2, T3, T4) t) + { + t.Item1 = this.Item1; + t.Item2 = this.Item2; + t.Item3 = this.Item3; + t.Item4 = this.Item4; + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [Pure] + public STuple Concat(ValueTuple tuple) + { + return new STuple(this.Item1, this.Item2, this.Item3, this.Item4, tuple.Item1); + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [Pure] + public STuple Concat((T5, T6) tuple) + { + return new STuple(this.Item1, this.Item2, this.Item3, this.Item4, tuple.Item1, tuple.Item2); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public (T1, T2, T3, T4) ToValueTuple() + { + return (this.Item1, this.Item2, this.Item3, this.Item4); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator STuple((T1, T2, T3, T4) t) + { + return new STuple(t.Item1, t.Item2, t.Item3, t.Item4); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator (T1, T2, T3, T4) (STuple t) + { + return (t.Item1, t.Item2, t.Item3, t.Item4); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + bool IEquatable<(T1, T2, T3, T4)>.Equals((T1, T2, T3, T4) other) + { + return SimilarValueComparer.Default.Equals(this.Item1, this.Item1) + && SimilarValueComparer.Default.Equals(this.Item2, this.Item2) + && SimilarValueComparer.Default.Equals(this.Item3, this.Item3) + && SimilarValueComparer.Default.Equals(this.Item4, this.Item4); + } + + public static bool operator ==(STuple left, (T1, T2, T3, T4) right) + { + return SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + && SimilarValueComparer.Default.Equals(left.Item2, right.Item2) + && SimilarValueComparer.Default.Equals(left.Item3, right.Item3) + && SimilarValueComparer.Default.Equals(left.Item4, right.Item4); + } + + public static bool operator ==((T1, T2, T3, T4) left, STuple right) + { + return SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + && SimilarValueComparer.Default.Equals(left.Item2, right.Item2) + && SimilarValueComparer.Default.Equals(left.Item3, right.Item3) + && SimilarValueComparer.Default.Equals(left.Item4, right.Item4); + } + + public static bool operator !=(STuple left, (T1, T2, T3, T4) right) + { + return !SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + || !SimilarValueComparer.Default.Equals(left.Item2, right.Item2) + || !SimilarValueComparer.Default.Equals(left.Item3, right.Item3) + || !SimilarValueComparer.Default.Equals(left.Item4, right.Item4); + } + + public static bool operator !=((T1, T2, T3, T4) left, STuple right) + { + return !SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + || !SimilarValueComparer.Default.Equals(left.Item2, right.Item2) + || !SimilarValueComparer.Default.Equals(left.Item3, right.Item3) + || !SimilarValueComparer.Default.Equals(left.Item4, right.Item4); + } + + public sealed class Comparer : IComparer> + { + + public static Comparer Default { [NotNull] get; } = new Comparer(); + + private static readonly Comparer Comparer1 = Comparer.Default; + private static readonly Comparer Comparer2 = Comparer.Default; + private static readonly Comparer Comparer3 = Comparer.Default; + private static readonly Comparer Comparer4 = Comparer.Default; + + private Comparer() { } + + public int Compare(STuple x, STuple y) + { + int cmp = Comparer1.Compare(x.Item1, y.Item1); + if (cmp == 0) cmp = Comparer2.Compare(x.Item2, y.Item2); + if (cmp == 0) cmp = Comparer3.Compare(x.Item3, y.Item3); + if (cmp == 0) cmp = Comparer4.Compare(x.Item4, y.Item4); + return cmp; + } + + } + + public sealed class EqualityComparer : IEqualityComparer> + { + + public static EqualityComparer Default { [NotNull] get; } = new EqualityComparer(); + + private static readonly EqualityComparer Comparer1 = EqualityComparer.Default; + private static readonly EqualityComparer Comparer2 = EqualityComparer.Default; + private static readonly EqualityComparer Comparer3 = EqualityComparer.Default; + private static readonly EqualityComparer Comparer4 = EqualityComparer.Default; + + private EqualityComparer() { } + + public bool Equals(STuple x, STuple y) + { + return Comparer1.Equals(x.Item1, y.Item1) + && Comparer2.Equals(x.Item2, y.Item2) + && Comparer3.Equals(x.Item3, y.Item3) + && Comparer4.Equals(x.Item4, y.Item4); + } + + public int GetHashCode(STuple obj) + { + return HashCodes.Combine( + Comparer1.GetHashCode(obj.Item1), + Comparer2.GetHashCode(obj.Item2), + Comparer3.GetHashCode(obj.Item3), + Comparer4.GetHashCode(obj.Item4) + ); + } + } + + } + +} diff --git a/FoundationDB.Client/Shared/Tuples/STuple`5.cs b/FoundationDB.Client/Shared/Tuples/STuple`5.cs new file mode 100644 index 000000000..eb5ee695c --- /dev/null +++ b/FoundationDB.Client/Shared/Tuples/STuple`5.cs @@ -0,0 +1,465 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Collections.Tuples +{ + using System; + using System.Collections; + using System.Collections.Generic; + using System.ComponentModel; + using System.Diagnostics; + using System.Runtime.CompilerServices; + using Doxense.Collections.Tuples.Encoding; + using Doxense.Diagnostics.Contracts; + using Doxense.Runtime.Converters; + using JetBrains.Annotations; + + /// Tuple that can hold five items + /// Type of the 1st item + /// Type of the 2nd item + /// Type of the 3rd item + /// Type of the 4th item + /// Type of the 5th item + [ImmutableObject(true), DebuggerDisplay("{ToString(),nq}")] + public readonly struct STuple : ITuple, IEquatable>, IEquatable<(T1, T2, T3, T4, T5)> + { + // This is mostly used by code that create a lot of temporary quartets, to reduce the pressure on the Garbage Collector by allocating them on the stack. + // Please note that if you return an STuple as an ITuple, it will be boxed by the CLR and all memory gains will be lost + + /// First element of the tuple + public readonly T1 Item1; + /// Second element of the tuple + public readonly T2 Item2; + /// Third element of the tuple + public readonly T3 Item3; + /// Fourth element of the tuple + public readonly T4 Item4; + /// Fifth and last element of the tuple + public readonly T5 Item5; + + /// Create a tuple containing for items + [DebuggerStepThrough] + public STuple(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) + { + this.Item1 = item1; + this.Item2 = item2; + this.Item3 = item3; + this.Item4 = item4; + this.Item5 = item5; + } + + /// Number of items in this tuple + public int Count => 5; + + /// Return the Nth item in this tuple + public object this[int index] + { + get + { + switch (index) + { + case 0: case -5: return this.Item1; + case 1: case -4: return this.Item2; + case 2: case -3: return this.Item3; + case 3: case -2: return this.Item4; + case 4: case -1: return this.Item5; + default: return TupleHelpers.FailIndexOutOfRange(index, 5); + } + } + } + + public ITuple this[int? fromIncluded, int? toExcluded] + { + [MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return TupleHelpers.Splice(this, fromIncluded, toExcluded); } + } + + /// Return the typed value of an item of the tuple, given its position + /// Expected type of the item + /// Position of the item (if negative, means relative from the end) + /// Value of the item at position , adapted into type . + public TItem Get(int index) + { + switch(index) + { + case 0: case -5: return TypeConverters.Convert(this.Item1); + case 1: case -4: return TypeConverters.Convert(this.Item2); + case 2: case -3: return TypeConverters.Convert(this.Item3); + case 3: case -2: return TypeConverters.Convert(this.Item4); + case 4: case -1: return TypeConverters.Convert(this.Item5); + default: return TupleHelpers.FailIndexOutOfRange(index, 5); + } + } + + /// Return the value of the last item in the tuple + public T5 Last + { + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return this.Item5; } + } + + /// Return a tuple without the first item + public STuple Tail + { + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return new STuple(this.Item2, this.Item3, this.Item4, this.Item5); } + } + + ITuple ITuple.Append(T6 value) + { + // the caller doesn't care about the return type, so just box everything into a list tuple + return new ListTuple(new object[6] { this.Item1, this.Item2, this.Item3, this.Item4, this.Item5, value }, 0, 6); + } + + /// Appends a single new item at the end of the current tuple. + /// Value that will be added as an embedded item + /// New tuple with one extra item + /// If is a tuple, and you want to append the *items* of this tuple, and not the tuple itself, please call ! + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public STuple Append(T6 value) + { + return new STuple(this.Item1, this.Item2, this.Item3, this.Item4, this.Item5, value); + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public ITuple Concat(ITuple tuple) + { + return STuple.Concat(this, tuple); + } + + /// Copy all the items of this tuple into an array at the specified offset + public void CopyTo(object[] array, int offset) + { + array[offset] = this.Item1; + array[offset + 1] = this.Item2; + array[offset + 2] = this.Item3; + array[offset + 3] = this.Item4; + array[offset + 4] = this.Item5; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void Deconstruct(out T1 item1, out T2 item2, out T3 item3, out T4 item4, out T5 item5) + { + item1 = this.Item1; + item2 = this.Item2; + item3 = this.Item3; + item4 = this.Item4; + item5 = this.Item5; + } + + /// Execute a lambda Action with the content of this tuple + /// Action that will be passed the content of this tuple as parameters + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void With([NotNull] Action lambda) + { + lambda(this.Item1, this.Item2, this.Item3, this.Item4, this.Item5); + } + + /// Execute a lambda Function with the content of this tuple + /// Action that will be passed the content of this tuple as parameters + /// Result of calling with the items of this tuple + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public TItem With([NotNull] Func lambda) + { + return lambda(this.Item1, this.Item2, this.Item3, this.Item4, this.Item5); + } + + public IEnumerator GetEnumerator() + { + yield return this.Item1; + yield return this.Item2; + yield return this.Item3; + yield return this.Item4; + yield return this.Item5; + } + + System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() + { + return this.GetEnumerator(); + } + + public override string ToString() + { + return string.Join("", new[] + { + "(", + STuple.Formatter.Stringify(this.Item1), ", ", + STuple.Formatter.Stringify(this.Item2), ", ", + STuple.Formatter.Stringify(this.Item3), ", ", + STuple.Formatter.Stringify(this.Item4), ", ", + STuple.Formatter.Stringify(this.Item5), + ")" + }); + } + + public override bool Equals(object obj) + { + return obj != null && ((IStructuralEquatable)this).Equals(obj, SimilarValueComparer.Default); + } + + public bool Equals(ITuple other) + { + return other != null && ((IStructuralEquatable)this).Equals(other, SimilarValueComparer.Default); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public bool Equals(STuple other) + { + return SimilarValueComparer.Default.Equals(this.Item1, other.Item1) + && SimilarValueComparer.Default.Equals(this.Item2, other.Item2) + && SimilarValueComparer.Default.Equals(this.Item3, other.Item3) + && SimilarValueComparer.Default.Equals(this.Item4, other.Item4) + && SimilarValueComparer.Default.Equals(this.Item5, other.Item5); + } + + public override int GetHashCode() + { + return ((IStructuralEquatable)this).GetHashCode(SimilarValueComparer.Default); + } + + public static bool operator ==(STuple left, STuple right) + { + var comparer = SimilarValueComparer.Default; + return comparer.Equals(left.Item1, right.Item1) + && comparer.Equals(left.Item2, right.Item2) + && comparer.Equals(left.Item3, right.Item3) + && comparer.Equals(left.Item4, right.Item4) + && comparer.Equals(left.Item5, right.Item5); + } + + public static bool operator !=(STuple left, STuple right) + { + var comparer = SimilarValueComparer.Default; + return !comparer.Equals(left.Item1, right.Item1) + || !comparer.Equals(left.Item2, right.Item2) + || !comparer.Equals(left.Item3, right.Item3) + || !comparer.Equals(left.Item4, right.Item4) + || !comparer.Equals(left.Item5, right.Item5); + } + + bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer) + { + if (other == null) return false; + if (other is STuple stuple) + { + return comparer.Equals(this.Item1, stuple.Item1) + && comparer.Equals(this.Item2, stuple.Item2) + && comparer.Equals(this.Item3, stuple.Item3) + && comparer.Equals(this.Item4, stuple.Item4) + && comparer.Equals(this.Item5, stuple.Item5); + } + if (other is ValueTuple vtuple) + { + return comparer.Equals(this.Item1, vtuple.Item1) + && comparer.Equals(this.Item2, vtuple.Item2) + && comparer.Equals(this.Item3, vtuple.Item3) + && comparer.Equals(this.Item4, vtuple.Item4) + && comparer.Equals(this.Item5, vtuple.Item5); + } + return TupleHelpers.Equals(this, other, comparer); + } + + int IStructuralEquatable.GetHashCode(IEqualityComparer comparer) + { + return HashCodes.Combine( + comparer.GetHashCode(this.Item1), + comparer.GetHashCode(this.Item2), + comparer.GetHashCode(this.Item3), + comparer.GetHashCode(this.Item4), + comparer.GetHashCode(this.Item5) + ); + } + + [Pure] + public static implicit operator STuple([NotNull] Tuple t) + { + Contract.NotNull(t, nameof(t)); + return new STuple(t.Item1, t.Item2, t.Item3, t.Item4, t.Item5); + } + + [Pure, NotNull] + public static explicit operator Tuple(STuple t) + { + return new Tuple(t.Item1, t.Item2, t.Item3, t.Item4, t.Item5); + } + + public void Fill(ref (T1, T2, T3, T4, T5) t) + { + t.Item1 = this.Item1; + t.Item2 = this.Item2; + t.Item3 = this.Item3; + t.Item4 = this.Item4; + t.Item5 = this.Item5; + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [Pure] + public STuple Concat(ValueTuple tuple) + { + return new STuple(this.Item1, this.Item2, this.Item3, this.Item4, this.Item5, tuple.Item1); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public (T1, T2, T3, T4, T5) ToValueTuple() + { + return (this.Item1, this.Item2, this.Item3, this.Item4, this.Item5); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator STuple((T1, T2, T3, T4, T5) t) + { + return new STuple(t.Item1, t.Item2, t.Item3, t.Item4, t.Item5); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator (T1, T2, T3, T4, T5) (STuple t) + { + return (t.Item1, t.Item2, t.Item3, t.Item4, t.Item5); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + bool IEquatable<(T1, T2, T3, T4, T5)>.Equals((T1, T2, T3, T4, T5) other) + { + return SimilarValueComparer.Default.Equals(this.Item1, this.Item1) + && SimilarValueComparer.Default.Equals(this.Item2, this.Item2) + && SimilarValueComparer.Default.Equals(this.Item3, this.Item3) + && SimilarValueComparer.Default.Equals(this.Item4, this.Item4) + && SimilarValueComparer.Default.Equals(this.Item5, this.Item5); + } + + public static bool operator ==(STuple left, (T1, T2, T3, T4, T5) right) + { + return SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + && SimilarValueComparer.Default.Equals(left.Item2, right.Item2) + && SimilarValueComparer.Default.Equals(left.Item3, right.Item3) + && SimilarValueComparer.Default.Equals(left.Item4, right.Item4) + && SimilarValueComparer.Default.Equals(left.Item5, right.Item5); + } + + public static bool operator ==((T1, T2, T3, T4, T5) left, STuple right) + { + return SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + && SimilarValueComparer.Default.Equals(left.Item2, right.Item2) + && SimilarValueComparer.Default.Equals(left.Item3, right.Item3) + && SimilarValueComparer.Default.Equals(left.Item4, right.Item4) + && SimilarValueComparer.Default.Equals(left.Item5, right.Item5); + } + + public static bool operator !=(STuple left, (T1, T2, T3, T4, T5) right) + { + return !SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + || !SimilarValueComparer.Default.Equals(left.Item2, right.Item2) + || !SimilarValueComparer.Default.Equals(left.Item3, right.Item3) + || !SimilarValueComparer.Default.Equals(left.Item4, right.Item4) + || !SimilarValueComparer.Default.Equals(left.Item5, right.Item5); + } + + public static bool operator !=((T1, T2, T3, T4, T5) left, STuple right) + { + return !SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + || !SimilarValueComparer.Default.Equals(left.Item2, right.Item2) + || !SimilarValueComparer.Default.Equals(left.Item3, right.Item3) + || !SimilarValueComparer.Default.Equals(left.Item4, right.Item4) + || !SimilarValueComparer.Default.Equals(left.Item5, right.Item5); + } + + public sealed class Comparer : IComparer> + { + + public static Comparer Default { [NotNull] get; } = new Comparer(); + + private static readonly Comparer Comparer1 = Comparer.Default; + private static readonly Comparer Comparer2 = Comparer.Default; + private static readonly Comparer Comparer3 = Comparer.Default; + private static readonly Comparer Comparer4 = Comparer.Default; + private static readonly Comparer Comparer5 = Comparer.Default; + + private Comparer() { } + + public int Compare(STuple x, STuple y) + { + int cmp = Comparer1.Compare(x.Item1, y.Item1); + if (cmp == 0) cmp = Comparer2.Compare(x.Item2, y.Item2); + if (cmp == 0) cmp = Comparer3.Compare(x.Item3, y.Item3); + if (cmp == 0) cmp = Comparer4.Compare(x.Item4, y.Item4); + if (cmp == 0) cmp = Comparer5.Compare(x.Item5, y.Item5); + return cmp; + } + + } + + public sealed class EqualityComparer : IEqualityComparer> + { + + public static EqualityComparer Default { [NotNull] get; } = new EqualityComparer(); + + private static readonly EqualityComparer Comparer1 = EqualityComparer.Default; + private static readonly EqualityComparer Comparer2 = EqualityComparer.Default; + private static readonly EqualityComparer Comparer3 = EqualityComparer.Default; + private static readonly EqualityComparer Comparer4 = EqualityComparer.Default; + private static readonly EqualityComparer Comparer5 = EqualityComparer.Default; + + private EqualityComparer() { } + + public bool Equals(STuple x, STuple y) + { + return Comparer1.Equals(x.Item1, y.Item1) + && Comparer2.Equals(x.Item2, y.Item2) + && Comparer3.Equals(x.Item3, y.Item3) + && Comparer4.Equals(x.Item4, y.Item4) + && Comparer5.Equals(x.Item5, y.Item5); + } + + public int GetHashCode(STuple obj) + { + return HashCodes.Combine( + Comparer1.GetHashCode(obj.Item1), + Comparer2.GetHashCode(obj.Item2), + Comparer3.GetHashCode(obj.Item3), + Comparer4.GetHashCode(obj.Item4), + Comparer5.GetHashCode(obj.Item5) + ); + } + } + + } + +} diff --git a/FoundationDB.Client/Shared/Tuples/STuple`6.cs b/FoundationDB.Client/Shared/Tuples/STuple`6.cs new file mode 100644 index 000000000..2403ea2c2 --- /dev/null +++ b/FoundationDB.Client/Shared/Tuples/STuple`6.cs @@ -0,0 +1,484 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Collections.Tuples +{ + using System; + using System.Collections; + using System.Collections.Generic; + using System.ComponentModel; + using System.Diagnostics; + using System.Runtime.CompilerServices; + using Doxense.Collections.Tuples.Encoding; + using Doxense.Diagnostics.Contracts; + using Doxense.Runtime.Converters; + using JetBrains.Annotations; + + /// Tuple that can hold five items + /// Type of the 1st item + /// Type of the 2nd item + /// Type of the 3rd item + /// Type of the 4th item + /// Type of the 5th item + /// Type of the 5th item + [ImmutableObject(true), DebuggerDisplay("{ToString(),nq}")] + public readonly struct STuple : ITuple, IEquatable>, IEquatable<(T1, T2, T3, T4, T5, T6)> + { + // This is mostly used by code that create a lot of temporary quartets, to reduce the pressure on the Garbage Collector by allocating them on the stack. + // Please note that if you return an STuple as an ITuple, it will be boxed by the CLR and all memory gains will be lost + + /// First element of the tuple + public readonly T1 Item1; + /// Second element of the tuple + public readonly T2 Item2; + /// Third element of the tuple + public readonly T3 Item3; + /// Fourth element of the tuple + public readonly T4 Item4; + /// Fifth of the tuple + public readonly T5 Item5; + /// Sixth and last element of the tuple + public readonly T6 Item6; + + /// Create a tuple containing for items + [DebuggerStepThrough] + public STuple(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6) + { + this.Item1 = item1; + this.Item2 = item2; + this.Item3 = item3; + this.Item4 = item4; + this.Item5 = item5; + this.Item6 = item6; + } + + /// Number of items in this tuple + public int Count => 6; + + /// Return the Nth item in this tuple + public object this[int index] + { + get + { + switch (index) + { + case 0: case -6: return this.Item1; + case 1: case -5: return this.Item2; + case 2: case -4: return this.Item3; + case 3: case -3: return this.Item4; + case 4: case -2: return this.Item5; + case 5: case -1: return this.Item6; + default: return TupleHelpers.FailIndexOutOfRange(index, 6); + } + } + } + + public ITuple this[int? fromIncluded, int? toExcluded] + { + [MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return TupleHelpers.Splice(this, fromIncluded, toExcluded); } + } + + /// Return the typed value of an item of the tuple, given its position + /// Expected type of the item + /// Position of the item (if negative, means relative from the end) + /// Value of the item at position , adapted into type . + public TItem Get(int index) + { + switch(index) + { + case 0: case -6: return TypeConverters.Convert(this.Item1); + case 1: case -5: return TypeConverters.Convert(this.Item2); + case 2: case -4: return TypeConverters.Convert(this.Item3); + case 3: case -3: return TypeConverters.Convert(this.Item4); + case 4: case -2: return TypeConverters.Convert(this.Item5); + case 5: case -1: return TypeConverters.Convert(this.Item6); + default: return TupleHelpers.FailIndexOutOfRange(index, 6); + } + } + + /// Return the value of the last item in the tuple + public T6 Last + { + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return this.Item6; } + } + + /// Return a tuple without the first item + public STuple Tail + { + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + get { return new STuple(this.Item2, this.Item3, this.Item4, this.Item5, this.Item6); } + } + + ITuple ITuple.Append(T7 value) + { + // the caller doesn't care about the return type, so just box everything into a list tuple + return new ListTuple(new object[7] { this.Item1, this.Item2, this.Item3, this.Item4, this.Item5, this.Item6, value }, 0, 7); + } + + /// Appends a single new item at the end of the current tuple. + /// Value that will be added as an embedded item + /// New tuple with one extra item + /// If is a tuple, and you want to append the *items* of this tuple, and not the tuple itself, please call ! + [NotNull, Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public LinkedTuple Append(T7 value) + { + // the caller probably cares about the return type, since it is using a struct, but whatever tuple type we use will end up boxing this tuple on the heap, and we will loose type information. + // but, by returning a LinkedTuple, the tuple will still remember the exact type, and efficiently serializer/convert the values (without having to guess the type) + return new LinkedTuple(this, value); + } + + /// Appends the items of a tuple at the end of the current tuple. + /// Tuple whose items are to be appended at the end + /// New tuple composed of the current tuple's items, followed by 's items + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public ITuple Concat(ITuple tuple) + { + return STuple.Concat(this, tuple); + } + + /// Copy all the items of this tuple into an array at the specified offset + public void CopyTo(object[] array, int offset) + { + array[offset] = this.Item1; + array[offset + 1] = this.Item2; + array[offset + 2] = this.Item3; + array[offset + 3] = this.Item4; + array[offset + 4] = this.Item5; + array[offset + 5] = this.Item6; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void Deconstruct(out T1 item1, out T2 item2, out T3 item3, out T4 item4, out T5 item5, out T6 item6) + { + item1 = this.Item1; + item2 = this.Item2; + item3 = this.Item3; + item4 = this.Item4; + item5 = this.Item5; + item6 = this.Item6; + } + + /// Execute a lambda Action with the content of this tuple + /// Action that will be passed the content of this tuple as parameters + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void With([NotNull] Action lambda) + { + lambda(this.Item1, this.Item2, this.Item3, this.Item4, this.Item5, this.Item6); + } + + /// Execute a lambda Function with the content of this tuple + /// Action that will be passed the content of this tuple as parameters + /// Result of calling with the items of this tuple + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public TItem With([NotNull] Func lambda) + { + return lambda(this.Item1, this.Item2, this.Item3, this.Item4, this.Item5, this.Item6); + } + + public IEnumerator GetEnumerator() + { + yield return this.Item1; + yield return this.Item2; + yield return this.Item3; + yield return this.Item4; + yield return this.Item5; + yield return this.Item6; + } + + System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() + { + return this.GetEnumerator(); + } + + public override string ToString() + { + return string.Join("", new[] + { + "(", + STuple.Formatter.Stringify(this.Item1), ", ", + STuple.Formatter.Stringify(this.Item2), ", ", + STuple.Formatter.Stringify(this.Item3), ", ", + STuple.Formatter.Stringify(this.Item4), ", ", + STuple.Formatter.Stringify(this.Item5), ", ", + STuple.Formatter.Stringify(this.Item6), + ")" + }); + } + + public override bool Equals(object obj) + { + return obj != null && ((IStructuralEquatable)this).Equals(obj, SimilarValueComparer.Default); + } + + public bool Equals(ITuple other) + { + return other != null && ((IStructuralEquatable)this).Equals(other, SimilarValueComparer.Default); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public bool Equals(STuple other) + { + return SimilarValueComparer.Default.Equals(this.Item1, other.Item1) + && SimilarValueComparer.Default.Equals(this.Item2, other.Item2) + && SimilarValueComparer.Default.Equals(this.Item3, other.Item3) + && SimilarValueComparer.Default.Equals(this.Item4, other.Item4) + && SimilarValueComparer.Default.Equals(this.Item5, other.Item5) + && SimilarValueComparer.Default.Equals(this.Item6, other.Item6); + } + + public override int GetHashCode() + { + return ((IStructuralEquatable)this).GetHashCode(SimilarValueComparer.Default); + } + + public static bool operator ==(STuple left, STuple right) + { + var comparer = SimilarValueComparer.Default; + return comparer.Equals(left.Item1, right.Item1) + && comparer.Equals(left.Item2, right.Item2) + && comparer.Equals(left.Item3, right.Item3) + && comparer.Equals(left.Item4, right.Item4) + && comparer.Equals(left.Item5, right.Item5) + && comparer.Equals(left.Item6, right.Item6); + } + + public static bool operator !=(STuple left, STuple right) + { + var comparer = SimilarValueComparer.Default; + return !comparer.Equals(left.Item1, right.Item1) + || !comparer.Equals(left.Item2, right.Item2) + || !comparer.Equals(left.Item3, right.Item3) + || !comparer.Equals(left.Item4, right.Item4) + || !comparer.Equals(left.Item5, right.Item5) + || !comparer.Equals(left.Item6, right.Item6); + } + + bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer) + { + if (other == null) return false; + if (other is STuple stuple) + { + return comparer.Equals(this.Item1, stuple.Item1) + && comparer.Equals(this.Item2, stuple.Item2) + && comparer.Equals(this.Item3, stuple.Item3) + && comparer.Equals(this.Item4, stuple.Item4) + && comparer.Equals(this.Item5, stuple.Item5) + && comparer.Equals(this.Item6, stuple.Item6); + } + if (other is ValueTuple vtuple) + { + return comparer.Equals(this.Item1, vtuple.Item1) + && comparer.Equals(this.Item2, vtuple.Item2) + && comparer.Equals(this.Item3, vtuple.Item3) + && comparer.Equals(this.Item4, vtuple.Item4) + && comparer.Equals(this.Item5, vtuple.Item5) + && comparer.Equals(this.Item6, vtuple.Item6); + } + return TupleHelpers.Equals(this, other, comparer); + } + + int IStructuralEquatable.GetHashCode(IEqualityComparer comparer) + { + return HashCodes.Combine( + comparer.GetHashCode(this.Item1), + comparer.GetHashCode(this.Item2), + comparer.GetHashCode(this.Item3), + comparer.GetHashCode(this.Item4), + comparer.GetHashCode(this.Item5), + comparer.GetHashCode(this.Item6) + ); + } + + [Pure] + public static implicit operator STuple([NotNull] Tuple tuple) + { + Contract.NotNull(tuple, nameof(tuple)); + return new STuple(tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4, tuple.Item5, tuple.Item6); + } + + [Pure, NotNull] + public static explicit operator Tuple(STuple tuple) + { + return new Tuple(tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4, tuple.Item5, tuple.Item6); + } + + public void Fill(ref (T1, T2, T3, T4, T5, T6) t) + { + t.Item1 = this.Item1; + t.Item2 = this.Item2; + t.Item3 = this.Item3; + t.Item4 = this.Item4; + t.Item5 = this.Item5; + t.Item6 = this.Item6; + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public (T1, T2, T3, T4, T5, T6) ToValueTuple() + { + return (this.Item1, this.Item2, this.Item3, this.Item4, this.Item5, this.Item6); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator STuple((T1, T2, T3, T4, T5, T6) t) + { + return new STuple(t.Item1, t.Item2, t.Item3, t.Item4, t.Item5, t.Item6); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator (T1, T2, T3, T4, T5, T6) (STuple t) + { + return (t.Item1, t.Item2, t.Item3, t.Item4, t.Item5, t.Item6); + } + + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + bool IEquatable<(T1, T2, T3, T4, T5, T6)>.Equals((T1, T2, T3, T4, T5, T6) other) + { + return SimilarValueComparer.Default.Equals(this.Item1, this.Item1) + && SimilarValueComparer.Default.Equals(this.Item2, this.Item2) + && SimilarValueComparer.Default.Equals(this.Item3, this.Item3) + && SimilarValueComparer.Default.Equals(this.Item4, this.Item4) + && SimilarValueComparer.Default.Equals(this.Item5, this.Item5) + && SimilarValueComparer.Default.Equals(this.Item6, this.Item6); + } + + public static bool operator ==(STuple left, (T1, T2, T3, T4, T5, T6) right) + { + return SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + && SimilarValueComparer.Default.Equals(left.Item2, right.Item2) + && SimilarValueComparer.Default.Equals(left.Item3, right.Item3) + && SimilarValueComparer.Default.Equals(left.Item4, right.Item4) + && SimilarValueComparer.Default.Equals(left.Item5, right.Item5) + && SimilarValueComparer.Default.Equals(left.Item6, right.Item6); + } + + public static bool operator ==((T1, T2, T3, T4, T5, T6) left, STuple right) + { + return SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + && SimilarValueComparer.Default.Equals(left.Item2, right.Item2) + && SimilarValueComparer.Default.Equals(left.Item3, right.Item3) + && SimilarValueComparer.Default.Equals(left.Item4, right.Item4) + && SimilarValueComparer.Default.Equals(left.Item5, right.Item5) + && SimilarValueComparer.Default.Equals(left.Item6, right.Item6); + } + + public static bool operator !=(STuple left, (T1, T2, T3, T4, T5, T6) right) + { + return !SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + || !SimilarValueComparer.Default.Equals(left.Item2, right.Item2) + || !SimilarValueComparer.Default.Equals(left.Item3, right.Item3) + || !SimilarValueComparer.Default.Equals(left.Item4, right.Item4) + || !SimilarValueComparer.Default.Equals(left.Item5, right.Item5) + || !SimilarValueComparer.Default.Equals(left.Item6, right.Item6); + } + + public static bool operator !=((T1, T2, T3, T4, T5, T6) left, STuple right) + { + return !SimilarValueComparer.Default.Equals(left.Item1, right.Item1) + || !SimilarValueComparer.Default.Equals(left.Item2, right.Item2) + || !SimilarValueComparer.Default.Equals(left.Item3, right.Item3) + || !SimilarValueComparer.Default.Equals(left.Item4, right.Item4) + || !SimilarValueComparer.Default.Equals(left.Item5, right.Item5) + || !SimilarValueComparer.Default.Equals(left.Item6, right.Item6); + } + + public sealed class Comparer : IComparer> + { + + public static Comparer Default { [NotNull] get; } = new Comparer(); + + private static readonly Comparer Comparer1 = Comparer.Default; + private static readonly Comparer Comparer2 = Comparer.Default; + private static readonly Comparer Comparer3 = Comparer.Default; + private static readonly Comparer Comparer4 = Comparer.Default; + private static readonly Comparer Comparer5 = Comparer.Default; + private static readonly Comparer Comparer6 = Comparer.Default; + + private Comparer() { } + + public int Compare(STuple x, STuple y) + { + int cmp = Comparer1.Compare(x.Item1, y.Item1); + if (cmp == 0) cmp = Comparer2.Compare(x.Item2, y.Item2); + if (cmp == 0) cmp = Comparer3.Compare(x.Item3, y.Item3); + if (cmp == 0) cmp = Comparer4.Compare(x.Item4, y.Item4); + if (cmp == 0) cmp = Comparer5.Compare(x.Item5, y.Item5); + if (cmp == 0) cmp = Comparer6.Compare(x.Item6, y.Item6); + return cmp; + } + + } + + public sealed class EqualityComparer : IEqualityComparer> + { + + public static EqualityComparer Default { [NotNull] get; } = new EqualityComparer(); + + private static readonly EqualityComparer Comparer1 = EqualityComparer.Default; + private static readonly EqualityComparer Comparer2 = EqualityComparer.Default; + private static readonly EqualityComparer Comparer3 = EqualityComparer.Default; + private static readonly EqualityComparer Comparer4 = EqualityComparer.Default; + private static readonly EqualityComparer Comparer5 = EqualityComparer.Default; + private static readonly EqualityComparer Comparer6 = EqualityComparer.Default; + + private EqualityComparer() { } + + public bool Equals(STuple x, STuple y) + { + return Comparer1.Equals(x.Item1, y.Item1) + && Comparer2.Equals(x.Item2, y.Item2) + && Comparer3.Equals(x.Item3, y.Item3) + && Comparer4.Equals(x.Item4, y.Item4) + && Comparer5.Equals(x.Item5, y.Item5) + && Comparer6.Equals(x.Item6, y.Item6); + } + + public int GetHashCode(STuple obj) + { + return HashCodes.Combine( + Comparer1.GetHashCode(obj.Item1), + Comparer2.GetHashCode(obj.Item2), + Comparer3.GetHashCode(obj.Item3), + Comparer4.GetHashCode(obj.Item4), + Comparer5.GetHashCode(obj.Item5), + Comparer6.GetHashCode(obj.Item6) + ); + } + } + + } +} diff --git a/FoundationDB.Client/Layers/Tuples/FdbTupleComparisons.cs b/FoundationDB.Client/Shared/Tuples/TupleComparisons.cs similarity index 76% rename from FoundationDB.Client/Layers/Tuples/FdbTupleComparisons.cs rename to FoundationDB.Client/Shared/Tuples/TupleComparisons.cs index a75534672..7c9c0c7e2 100644 --- a/FoundationDB.Client/Layers/Tuples/FdbTupleComparisons.cs +++ b/FoundationDB.Client/Shared/Tuples/TupleComparisons.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,17 +26,17 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples { - using FoundationDB.Client.Converters; - using JetBrains.Annotations; using System; using System.Collections; using System.Collections.Generic; using System.Runtime.CompilerServices; + using Doxense.Runtime.Converters; + using JetBrains.Annotations; /// Helper class for tuple comparisons - public static class FdbTupleComparisons + public static class TupleComparisons { /// Tuple comparer that treats similar values as equal ("123" = 123 = 123L = 123.0d) public static readonly EqualityComparer Default = new EqualityComparer(SimilarValueComparer.Default); @@ -44,10 +44,7 @@ public static class FdbTupleComparisons /// Tuple comparer that uses the default BCL object comparison ("123" != 123 != 123L != 123.0d) public static readonly EqualityComparer Bcl = new EqualityComparer(EqualityComparer.Default); - /// Tuple comparer that compared the packed bytes (slow!) - public static readonly BinaryComparer Binary = new BinaryComparer(); - - public sealed class EqualityComparer : IEqualityComparer, IEqualityComparer + public sealed class EqualityComparer : IEqualityComparer, IEqualityComparer { private readonly IEqualityComparer m_comparer; @@ -56,7 +53,7 @@ internal EqualityComparer(IEqualityComparer comparer) m_comparer = comparer; } - public bool Equals(IFdbTuple x, IFdbTuple y) + public bool Equals(ITuple x, ITuple y) { if (object.ReferenceEquals(x, y)) return true; if (object.ReferenceEquals(x, null) || object.ReferenceEquals(y, null)) return false; @@ -64,9 +61,9 @@ public bool Equals(IFdbTuple x, IFdbTuple y) return x.Equals(y, m_comparer); } - public int GetHashCode(IFdbTuple obj) + public int GetHashCode(ITuple obj) { - return obj != null ? obj.GetHashCode(m_comparer) : 0; + return HashCodes.Compute(obj, m_comparer); } public new bool Equals(object x, object y) @@ -74,10 +71,9 @@ public int GetHashCode(IFdbTuple obj) if (object.ReferenceEquals(x, y)) return true; if (x == null || y == null) return false; - var t = x as IFdbTuple; - if (t != null) return t.Equals(y, m_comparer); + if (x is ITuple t) return t.Equals(y, m_comparer); - t = y as IFdbTuple; + t = y as ITuple; if (t != null) return t.Equals(x, m_comparer); return false; @@ -87,54 +83,13 @@ public int GetHashCode(object obj) { if (obj == null) return 0; - var t = obj as IFdbTuple; + var t = obj as ITuple; if (!object.ReferenceEquals(t, null)) return t.GetHashCode(m_comparer); // returns a hash base on the pointers return RuntimeHelpers.GetHashCode(obj); } } - - public sealed class BinaryComparer : IEqualityComparer, IEqualityComparer - { - internal BinaryComparer() - { } - - - public bool Equals(IFdbTuple x, IFdbTuple y) - { - if (object.ReferenceEquals(x, y)) return true; - if (object.ReferenceEquals(x, null) || object.ReferenceEquals(y, null)) return false; - - return x.ToSlice().Equals(y.ToSlice()); - } - - public int GetHashCode(IFdbTuple obj) - { - return object.ReferenceEquals(obj, null) ? 0 : obj.ToSlice().GetHashCode(); - } - - public new bool Equals(object x, object y) - { - if (object.ReferenceEquals(x, y)) return true; - if (x == null || y == null) return false; - - var tx = x as IFdbTuple; - var ty = y as IFdbTuple; - if (object.ReferenceEquals(tx, null) || object.ReferenceEquals(ty, null)) return false; - return tx.ToSlice().Equals(ty.ToSlice()); - } - - public int GetHashCode(object obj) - { - if (obj == null) return 0; - - var tuple = obj as IFdbTuple; - if (!object.ReferenceEquals(tuple, null)) return tuple.ToSlice().GetHashCode(); - - return RuntimeHelpers.GetHashCode(obj); - } - } /// Create a new instance that compares a single item position in two tuples /// Type of the item to compare @@ -142,7 +97,7 @@ public int GetHashCode(object obj) /// Comparer for the item's type /// New comparer instance [NotNull] - public static IComparer Composite(int offset = 0, IComparer comparer = null) + public static IComparer Composite(int offset = 0, IComparer comparer = null) { return new CompositeComparer(offset, comparer); } @@ -155,7 +110,7 @@ public static IComparer Composite(int offset = 0, IComparer c /// Comparer for the second item's type /// New comparer instance [NotNull] - public static IComparer Composite(int offset = 0, IComparer comparer1 = null, IComparer comparer2 = null) + public static CompositeComparer Composite(int offset = 0, IComparer comparer1 = null, IComparer comparer2 = null) { return new CompositeComparer(offset, comparer1, comparer2); } @@ -170,17 +125,17 @@ public static IComparer Composite(int offset = 0, IComparerComparer for the third item's type /// New comparer instance [NotNull] - public static IComparer Composite(int offset = 0, IComparer comparer1 = null, IComparer comparer2 = null, IComparer comparer3 = null) + public static IComparer Composite(int offset = 0, IComparer comparer1 = null, IComparer comparer2 = null, IComparer comparer3 = null) { return new CompositeComparer(offset, comparer1, comparer2, comparer3); } /// Comparer that compares tuples with at least 1 item /// Type of the item - public sealed class CompositeComparer : IComparer + public sealed class CompositeComparer : IComparer { - public static readonly IComparer Default = new CompositeComparer(); + public static readonly IComparer Default = new CompositeComparer(); /// Constructor for a new tuple comparer public CompositeComparer() @@ -203,16 +158,16 @@ public CompositeComparer(int offset, IComparer comparer) /// Offset in the tuples where the comparison starts /// If negative, comparison starts from the end. - public int Offset { get; private set; } + public int Offset { get; } /// Comparer for the first element (at possition ) - public IComparer Comparer { get; private set; } + public IComparer Comparer { get; } /// Compare a single item in both tuples /// First tuple /// Second tuple /// Returns a positive value if x is greater than y, a negative value if x is less than y and 0 if x is equal to y. - public int Compare(IFdbTuple x, IFdbTuple y) + public int Compare(ITuple x, ITuple y) { if (y == null) return x == null ? 0 : +1; if (x == null) return -1; @@ -230,10 +185,10 @@ public int Compare(IFdbTuple x, IFdbTuple y) /// Comparer that compares tuples with at least 2 items /// Type of the first item /// Type of the second item - public sealed class CompositeComparer : IComparer + public sealed class CompositeComparer : IComparer, IComparer>, IComparer<(T1, T2)> { - public static readonly IComparer Default = new CompositeComparer(); + public static readonly IComparer Default = new CompositeComparer(); /// Constructor for a new tuple comparer public CompositeComparer() @@ -258,19 +213,19 @@ public CompositeComparer(int offset, IComparer comparer1, IComparer comp /// Offset in the tuples where the comparison starts /// If negative, comparison starts from the end. - public int Offset { get; private set; } + public int Offset { get; } /// Comparer for the first element (at possition ) - public IComparer Comparer1 { get; private set; } + public IComparer Comparer1 { get; } /// Comparer for the second element (at possition + 1) - public IComparer Comparer2 { get; private set; } + public IComparer Comparer2 { get; } /// Compare up to two items in both tuples /// First tuple /// Second tuple /// Returns a positive value if x is greater than y, a negative value if x is less than y and 0 if x is equal to y. - public int Compare(IFdbTuple x, IFdbTuple y) + public int Compare(ITuple x, ITuple y) { if (y == null) return x == null ? 0 : +1; if (x == null) return -1; @@ -281,13 +236,37 @@ public int Compare(IFdbTuple x, IFdbTuple y) int p = this.Offset; - int c = this.Comparer1.Compare(x.Get(p), y.Get(p)); - if (c != 0) return c; + int cmp = this.Comparer1.Compare(x.Get(p), y.Get(p)); + if (cmp != 0) return cmp; if (ny == 1 || nx == 1) return nx - ny; - c = this.Comparer2.Compare(x.Get(p + 1), y.Get(p + 1)); + cmp = this.Comparer2.Compare(x.Get(p + 1), y.Get(p + 1)); - return c; + return cmp; + } + + /// Compare two tuples + /// First tuple + /// Second tuple + /// Returns a positive value if x is greater than y, a negative value if x is less than y and 0 if x is equal to y. + public int Compare(STuple x, STuple y) + { + if (this.Offset != 0) throw new InvalidOperationException("Cannot compare fixed tuples with non-zero offset."); + int cmp = this.Comparer1.Compare(x.Item1, y.Item1); + if (cmp == 0) cmp = this.Comparer2.Compare(x.Item2, y.Item2); + return cmp; + } + + /// Compare two tuples + /// First tuple + /// Second tuple + /// Returns a positive value if x is greater than y, a negative value if x is less than y and 0 if x is equal to y. + public int Compare((T1, T2) x, (T1, T2) y) + { + if (this.Offset != 0) throw new InvalidOperationException("Cannot compare fixed tuples with non-zero offset."); + int cmp = this.Comparer1.Compare(x.Item1, y.Item1); + if (cmp == 0) cmp = this.Comparer2.Compare(x.Item2, y.Item2); + return cmp; } } @@ -296,10 +275,10 @@ public int Compare(IFdbTuple x, IFdbTuple y) /// Type of the first item /// Type of the second item /// Type of the thrid item - public sealed class CompositeComparer : IComparer + public sealed class CompositeComparer : IComparer { - public static readonly IComparer Default = new CompositeComparer(); + public static readonly IComparer Default = new CompositeComparer(); /// Constructor for a new tuple comparer public CompositeComparer() @@ -326,22 +305,22 @@ public CompositeComparer(int offset, IComparer comparer1, IComparer comp /// Offset in the tuples where the comparison starts /// If negative, comparison starts from the end. - public int Offset { get; private set; } + public int Offset { get; } /// Comparer for the first element (at possition ) - public IComparer Comparer1 { get; private set; } + public IComparer Comparer1 { get; } /// Comparer for the second element (at possition + 1) - public IComparer Comparer2 { get; private set; } + public IComparer Comparer2 { get; } /// Comparer for the third element (at possition + 2) - public IComparer Comparer3 { get; private set; } + public IComparer Comparer3 { get; } /// Compare up to three items in both tuples /// First tuple /// Second tuple /// Returns a positive value if x is greater than y, a negative value if x is less than y and 0 if x is equal to y. - public int Compare(IFdbTuple x, IFdbTuple y) + public int Compare(ITuple x, ITuple y) { if (y == null) return x == null ? 0 : +1; if (x == null) return -1; diff --git a/FoundationDB.Client/Layers/Tuples/FdbTupleExtensions.cs b/FoundationDB.Client/Shared/Tuples/TupleExtensions.cs similarity index 64% rename from FoundationDB.Client/Layers/Tuples/FdbTupleExtensions.cs rename to FoundationDB.Client/Shared/Tuples/TupleExtensions.cs index 71d965e6e..562ee330c 100644 --- a/FoundationDB.Client/Layers/Tuples/FdbTupleExtensions.cs +++ b/FoundationDB.Client/Shared/Tuples/TupleExtensions.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,38 +26,41 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples { - using FoundationDB.Client; - using JetBrains.Annotations; using System; using System.Collections.Generic; + using System.ComponentModel; + using System.Runtime.CompilerServices; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; /// Add extensions methods that deal with tuples on various types - public static class FdbTupleExtensions + public static class TupleExtensions { - #region IFdbTuple extensions... + #region ITuple extensions... /// Returns true if the tuple is either null or empty [ContractAnnotation("null => true")] - public static bool IsNullOrEmpty(this IFdbTuple tuple) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static bool IsNullOrEmpty(this ITuple tuple) { return tuple == null || tuple.Count == 0; } /// Returns true if the tuple is not null, and contains only one item [ContractAnnotation("null => false")] - public static bool IsSingleton(this IFdbTuple tuple) + public static bool IsSingleton(this ITuple tuple) { return tuple != null && tuple.Count == 1; } /// Returns an array containing all the objects of a tuple [NotNull, ItemCanBeNull] - public static object[] ToArray([NotNull] this IFdbTuple tuple) + public static object[] ToArray([NotNull] this ITuple tuple) { - if (tuple == null) throw new ArgumentNullException("tuple"); + Contract.NotNull(tuple, nameof(tuple)); var items = new object[tuple.Count]; if (items.Length > 0) @@ -69,9 +72,9 @@ public static object[] ToArray([NotNull] this IFdbTuple tuple) /// Returns a typed array containing all the items of a tuple [NotNull] - public static T[] ToArray([NotNull] this IFdbTuple tuple) + public static T[] ToArray([NotNull] this ITuple tuple) { - if (tuple == null) throw new ArgumentNullException("tuple"); + Contract.NotNull(tuple, nameof(tuple)); var items = new T[tuple.Count]; if (items.Length > 0) @@ -84,120 +87,49 @@ public static T[] ToArray([NotNull] this IFdbTuple tuple) return items; } - /// Returns a byte array containing the packed version of a tuple - [CanBeNull] - public static byte[] GetBytes([NotNull] this IFdbTuple tuple) - { - return tuple.ToSlice().GetBytes(); - } - /// Returns the typed value of the first item in this tuple /// Expected type of the first item /// Value of the first item, adapted into type . - public static T First([NotNull] this IFdbTuple tuple) + [Pure] + [ContractAnnotation("null => true")] + public static T First([NotNull] this ITuple tuple) { - if (tuple == null) throw new ArgumentNullException("tuple"); return tuple.Get(0); } - /// Appends two values at the end of a tuple - [NotNull] - public static IFdbTuple Append([NotNull] this IFdbTuple tuple, T1 value1, T2 value2) + /// Return the typed value of the last item in the tuple + /// Expected type of the item + /// Value of the last item of this tuple, adapted into type + /// Equivalent of tuple.Get<T>(-1) + [Pure] + [ContractAnnotation("null => true")] + public static T Last([NotNull] this ITuple tuple) { - if (tuple == null) throw new ArgumentNullException("tuple"); - return new FdbJoinedTuple(tuple, FdbTuple.Create(value1, value2)); + return tuple.Get(-1); } - /// Appends three values at the end of a tuple + /// Appends two values at the end of a tuple [NotNull] - public static IFdbTuple Append([NotNull] this IFdbTuple tuple, T1 value1, T2 value2, T3 value3) + public static ITuple Append([NotNull] this ITuple tuple, T1 value1, T2 value2) { - if (tuple == null) throw new ArgumentNullException("tuple"); - return new FdbJoinedTuple(tuple, FdbTuple.Create(value1, value2, value3)); + Contract.NotNull(tuple, nameof(tuple)); + return new JoinedTuple(tuple, STuple.Create(value1, value2)); } - /// Appends four values at the end of a tuple + /// Appends three values at the end of a tuple [NotNull] - public static IFdbTuple Append([NotNull] this IFdbTuple tuple, T1 value1, T2 value2, T3 value3, T4 value4) + public static ITuple Append([NotNull] this ITuple tuple, T1 value1, T2 value2, T3 value3) { - if (tuple == null) throw new ArgumentNullException("tuple"); - return new FdbJoinedTuple(tuple, FdbTuple.Create(value1, value2, value3, value4)); - } - - /// Creates a key range containing all children of this tuple, from tuple.pack()+'\0' to tuple.pack()+'\xFF' - /// Tuple that is the suffix of all keys - /// Range of all keys suffixed by the tuple. The tuple itself will not be included - public static FdbKeyRange ToRange([NotNull] this IFdbTuple tuple) - { - return ToRange(tuple, false); - } - - /// Creates a key range containing all children of tuple, optionally including the tuple itself. - /// Tuple that is the prefix of all keys - /// If true, the tuple key itself is included, if false only the children keys are included - /// Range of all keys suffixed by the tuple. The tuple itself will be included if is true - public static FdbKeyRange ToRange([NotNull] this IFdbTuple tuple, bool includePrefix) - { - if (tuple == null) throw new ArgumentNullException("tuple"); - - // We want to allocate only one byte[] to store both keys, and map both Slice to each chunk - // So we will serialize the tuple two times in the same writer - - var writer = new TupleWriter(); - - tuple.PackTo(ref writer); - writer.Output.EnsureBytes(writer.Output.Position + 2); - if (!includePrefix) writer.Output.WriteByte(0); - int p0 = writer.Output.Position; - - tuple.PackTo(ref writer); - writer.Output.WriteByte(0xFF); - int p1 = writer.Output.Position; - - return new FdbKeyRange( - new Slice(writer.Output.Buffer, 0, p0), - new Slice(writer.Output.Buffer, p0, p1 - p0) - ); - } - - /// Creates pre-packed and isolated copy of this tuple - /// - /// Create a copy of the tuple that can be reused frequently to pack values - /// If the tuple is already memoized, the current instance will be returned - [CanBeNull, ContractAnnotation("null => null")] - public static FdbMemoizedTuple Memoize(this IFdbTuple tuple) - { - if (tuple == null) return null; - - var memoized = tuple as FdbMemoizedTuple ?? new FdbMemoizedTuple(tuple.ToArray(), tuple.ToSlice()); - - return memoized; + Contract.NotNull(tuple, nameof(tuple)); + return new JoinedTuple(tuple, STuple.Create(value1, value2, value3)); } - /// Unpack a tuple from this slice - /// - /// Unpacked tuple if the slice contains data, FdbTuple.Empty if the slice is empty, or null if the slice is Slice.Nil + /// Appends four values at the end of a tuple [NotNull] - public static IFdbTuple ToTuple(this Slice slice) + public static ITuple Append([NotNull] this ITuple tuple, T1 value1, T2 value2, T3 value3, T4 value4) { - //note: this method is here to allow a fluent API with method chaining, like "something.ToFoundationDbKey().ToTuple().With((int x, int y) => .....)" - return FdbTuple.Unpack(slice); - } - - /// Unpack a tuple from this slice - /// - /// Unpacked tuple if the slice contains data, FdbTuple.Empty if the slice is empty, or null if the slice is Slice.Nil - [CanBeNull] - public static IFdbTuple ToTupleOrDefault(this Slice slice) - { - //note: this method is here to allow a fluent API with method chaining, like "something.ToFoundationDbKey().ToTuple().With((int x, int y) => .....)" - - if (slice.IsNullOrEmpty) - { - return slice.HasValue ? FdbTuple.Empty : null; - } - - return FdbTuple.Unpack(slice); + Contract.NotNull(tuple, nameof(tuple)); + return new JoinedTuple(tuple, STuple.Create(value1, value2, value3, value4)); } /// Returns a substring of the current tuple @@ -205,9 +137,9 @@ public static IFdbTuple ToTupleOrDefault(this Slice slice) /// Offset from the start of the current tuple (negative value means from the end) /// Tuple that contains only the items past the first items of the current tuple [NotNull] - public static IFdbTuple Substring([NotNull] this IFdbTuple tuple, int offset) + public static ITuple Substring([NotNull] this ITuple tuple, int offset) { - if (tuple == null) throw new ArgumentNullException("tuple"); + Contract.NotNull(tuple, nameof(tuple)); return tuple[offset, null]; } @@ -218,12 +150,12 @@ public static IFdbTuple Substring([NotNull] this IFdbTuple tuple, int offset) /// Number of items to keep /// Tuple that contains only the selected items from the current tuple [NotNull] - public static IFdbTuple Substring([NotNull] this IFdbTuple tuple, int offset, int count) + public static ITuple Substring([NotNull] this ITuple tuple, int offset, int count) { - if (tuple == null) throw new ArgumentNullException("tuple"); - if (count < 0) throw new ArgumentOutOfRangeException("count", count, "Count cannot be negative."); + Contract.NotNull(tuple, nameof(tuple)); + Contract.Positive(count, nameof(count)); - if (count == 0) return FdbTuple.Empty; + if (count == 0) return STuple.Empty; return tuple[offset, offset + count]; } @@ -236,7 +168,7 @@ public static IFdbTuple Substring([NotNull] this IFdbTuple tuple, int offset, in /// (a, b, c).Truncate(2) => (a, b) /// (a, b, c).Truncate(-2) => (b, c) /// - public static IFdbTuple Truncate([NotNull] this IFdbTuple tuple, int count) + public static ITuple Truncate([NotNull] this ITuple tuple, int count) { tuple.OfSizeAtLeast(Math.Abs(count)); @@ -255,35 +187,35 @@ public static IFdbTuple Truncate([NotNull] this IFdbTuple tuple, int count) /// Larger tuple /// Smaller tuple /// True if the beginning of is equal to or if both tuples are identical - public static bool StartsWith([NotNull] this IFdbTuple left, [NotNull] IFdbTuple right) + public static bool StartsWith([NotNull] this ITuple left, [NotNull] ITuple right) { - if (left == null) throw new ArgumentNullException("left"); - if (right == null) throw new ArgumentNullException("right"); + Contract.NotNull(left, nameof(left)); + Contract.NotNull(right, nameof(right)); - //REVIEW: move this on IFdbTuple interface ? - return FdbTuple.StartsWith(left, right); + //REVIEW: move this on ITuple interface ? + return TupleHelpers.StartsWith(left, right); } /// Test if the end of current tuple is equal to another tuple /// Larger tuple /// Smaller tuple /// True if the end of is equal to or if both tuples are identical - public static bool EndsWith([NotNull] this IFdbTuple left, [NotNull] IFdbTuple right) + public static bool EndsWith([NotNull] this ITuple left, [NotNull] ITuple right) { - if (left == null) throw new ArgumentNullException("left"); - if (right == null) throw new ArgumentNullException("right"); + Contract.NotNull(left, nameof(left)); + Contract.NotNull(right, nameof(right)); - //REVIEW: move this on IFdbTuple interface ? - return FdbTuple.EndsWith(left, right); + //REVIEW: move this on ITuple interface ? + return TupleHelpers.EndsWith(left, right); } /// Transform a tuple of N elements into a list of N singletons /// Tuple that contains any number of elements /// Sequence of tuples that contains a single element /// (123, ABC, false,).Explode() => [ (123,), (ABC,), (false,) ] - public static IEnumerable Explode([NotNull] this IFdbTuple tuple) + public static IEnumerable Explode([NotNull] this ITuple tuple) { - if (tuple == null) throw new ArgumentNullException("tuple"); + Contract.NotNull(tuple, nameof(tuple)); int p = 0; int n = tuple.Count; @@ -294,22 +226,6 @@ public static IEnumerable Explode([NotNull] this IFdbTuple tuple) } } - /// Returns a key that is immediately after the packed representation of this tuple - /// This is the equivalent of manually packing the tuple and incrementing the resulting slice - public static Slice Increment([NotNull] this IFdbTuple tuple) - { - if (tuple == null) throw new ArgumentNullException("tuple"); - return FdbKey.Increment(tuple.ToSlice()); - } - - /// Returns a Key Selector pair that defines the range of all items contained under this tuple - public static FdbKeySelectorPair ToSelectorPair([NotNull] this IFdbTuple tuple) - { - if (tuple == null) throw new ArgumentNullException("tuple"); - - return FdbKeySelectorPair.StartsWith(tuple.ToSlice()); - } - /// Verify that this tuple has the expected size /// Tuple which must be of a specific size /// Expected number of items in this tuple @@ -318,7 +234,7 @@ public static FdbKeySelectorPair ToSelectorPair([NotNull] this IFdbTuple tuple) /// If is smaller or larger than [ContractAnnotation("halt <= tuple:null")] [NotNull] - public static IFdbTuple OfSize(this IFdbTuple tuple, int size) + public static ITuple OfSize(this ITuple tuple, int size) { if (tuple == null || tuple.Count != size) ThrowInvalidTupleSize(tuple, size, 0); return tuple; @@ -332,7 +248,7 @@ public static IFdbTuple OfSize(this IFdbTuple tuple, int size) /// If is smaller than [ContractAnnotation("halt <= tuple:null")] [NotNull] - public static IFdbTuple OfSizeAtLeast(this IFdbTuple tuple, int size) + public static ITuple OfSizeAtLeast(this ITuple tuple, int size) { if (tuple == null || tuple.Count < size) ThrowInvalidTupleSize(tuple, size, -1); return tuple; @@ -346,24 +262,21 @@ public static IFdbTuple OfSizeAtLeast(this IFdbTuple tuple, int size) /// If is larger than [ContractAnnotation("halt <= tuple:null")] [NotNull] - public static IFdbTuple OfSizeAtMost(this IFdbTuple tuple, int size) + public static ITuple OfSizeAtMost(this ITuple tuple, int size) { if (tuple == null || tuple.Count > size) ThrowInvalidTupleSize(tuple, size, 1); return tuple; } [ContractAnnotation("=> halt")] - internal static void ThrowInvalidTupleSize(IFdbTuple tuple, int expected, int test) + internal static void ThrowInvalidTupleSize(ITuple tuple, int expected, int test) { - if (tuple == null) - { - throw new ArgumentNullException("tuple"); - } + Contract.NotNull(tuple, nameof(tuple)); switch(test) { - case 1: throw new InvalidOperationException(String.Format("This operation requires a tuple of size {0} or less, but this tuple has {1} elements", expected, tuple.Count)); - case -1: throw new InvalidOperationException(String.Format("This operation requires a tuple of size {0} or more, but this tuple has {1} elements", expected, tuple.Count)); - default: throw new InvalidOperationException(String.Format("This operation requires a tuple of size {0}, but this tuple has {1} elements", expected, tuple.Count)); + case 1: throw new InvalidOperationException($"This operation requires a tuple of size {expected} or less, but this tuple has {tuple.Count} elements"); + case -1: throw new InvalidOperationException($"This operation requires a tuple of size {expected} or more, but this tuple has {tuple.Count} elements"); + default: throw new InvalidOperationException($"This operation requires a tuple of size {expected}, but this tuple has {tuple.Count} elements"); } } @@ -371,10 +284,10 @@ internal static void ThrowInvalidTupleSize(IFdbTuple tuple, int expected, int te /// Expected type of the single element /// Tuple that must be of size 1 /// Equivalent tuple, with its element converted to the specified type - public static FdbTuple As([NotNull] this IFdbTuple tuple) + public static STuple As([NotNull] this ITuple tuple) { tuple.OfSize(1); - return new FdbTuple(tuple.Get(0)); + return new STuple(tuple.Get(0)); } /// Returns a typed version of a tuple of size 2 @@ -382,12 +295,12 @@ public static FdbTuple As([NotNull] this IFdbTuple tuple) /// Expected type of the second element /// Tuple that must be of size 2 /// Equivalent tuple, with its elements converted to the specified types - public static FdbTuple As([NotNull] this IFdbTuple tuple) + public static STuple As([NotNull] this ITuple tuple) { tuple.OfSize(2); - return new FdbTuple( + return new STuple( tuple.Get(0), - tuple.Get(1) + tuple.Get(1) ); } @@ -397,13 +310,13 @@ public static FdbTuple As([NotNull] this IFdbTuple tuple) /// Expected type of the third element /// Tuple that must be of size 3 /// Equivalent tuple, with its elements converted to the specified types - public static FdbTuple As([NotNull] this IFdbTuple tuple) + public static STuple As([NotNull] this ITuple tuple) { tuple.OfSize(3); - return new FdbTuple( + return new STuple( tuple.Get(0), tuple.Get(1), - tuple.Get(2) + tuple.Get(2) ); } @@ -414,14 +327,14 @@ public static FdbTuple As([NotNull] this IFdbTuple tuple /// Expected type of the fourth element /// Tuple that must be of size 4 /// Equivalent tuple, with its elements converted to the specified types - public static FdbTuple As([NotNull] this IFdbTuple tuple) + public static STuple As([NotNull] this ITuple tuple) { tuple.OfSize(4); - return new FdbTuple( + return new STuple( tuple.Get(0), tuple.Get(1), tuple.Get(2), - tuple.Get(3) + tuple.Get(3) ); } @@ -433,15 +346,37 @@ public static FdbTuple As([NotNull] this IFdbTup /// Expected type of the fifth element /// Tuple that must be of size 5 /// Equivalent tuple, with its elements converted to the specified types - public static FdbTuple As([NotNull] this IFdbTuple tuple) + public static STuple As([NotNull] this ITuple tuple) { tuple.OfSize(5); - return new FdbTuple( + return new STuple( tuple.Get(0), tuple.Get(1), tuple.Get(2), tuple.Get(3), - tuple.Get(4) + tuple.Get(4) + ); + } + + /// Returns a typed version of a tuple of size 5 + /// Expected type of the first element + /// Expected type of the second element + /// Expected type of the third element + /// Expected type of the fourth element + /// Expected type of the fifth element + /// Expected type of the sixth element + /// Tuple that must be of size 5 + /// Equivalent tuple, with its elements converted to the specified types + public static STuple As([NotNull] this ITuple tuple) + { + tuple.OfSize(6); + return new STuple( + tuple.Get(0), + tuple.Get(1), + tuple.Get(2), + tuple.Get(3), + tuple.Get(4), + tuple.Get(5) ); } @@ -449,7 +384,7 @@ public static FdbTuple As([NotNull] this /// Tuple of size 1 /// Action that will be passed the content of this tuple as parameters /// If has not the expected size - public static void With([NotNull] this IFdbTuple tuple, [NotNull] Action lambda) + public static void With([NotNull] this ITuple tuple, [NotNull] Action lambda) { OfSize(tuple, 1); lambda(tuple.Get(0)); @@ -459,7 +394,7 @@ public static void With([NotNull] this IFdbTuple tuple, [NotNull] Action /// Tuple of size 2 /// Action that will be passed the content of this tuple as parameters /// If has not the expected size - public static void With([NotNull] this IFdbTuple tuple, [NotNull] Action lambda) + public static void With([NotNull] this ITuple tuple, [NotNull] Action lambda) { OfSize(tuple, 2); lambda(tuple.Get(0), tuple.Get(1)); @@ -469,7 +404,7 @@ public static void With([NotNull] this IFdbTuple tuple, [NotNull] Action /// Tuple of size 3 /// Action that will be passed the content of this tuple as parameters /// If has not the expected size - public static void With([NotNull] this IFdbTuple tuple, [NotNull] Action lambda) + public static void With([NotNull] this ITuple tuple, [NotNull] Action lambda) { OfSize(tuple, 3); lambda(tuple.Get(0), tuple.Get(1), tuple.Get(2)); @@ -479,7 +414,7 @@ public static void With([NotNull] this IFdbTuple tuple, [NotNull] Ac /// Tuple of size 4 /// Action that will be passed the content of this tuple as parameters /// If has not the expected size - public static void With([NotNull] this IFdbTuple tuple, [NotNull] Action lambda) + public static void With([NotNull] this ITuple tuple, [NotNull] Action lambda) { OfSize(tuple, 4); lambda(tuple.Get(0), tuple.Get(1), tuple.Get(2), tuple.Get(3)); @@ -489,7 +424,7 @@ public static void With([NotNull] this IFdbTuple tuple, [NotNull /// Tuple of size 5 /// Action that will be passed the content of this tuple as parameters /// If has not the expected size - public static void With([NotNull] this IFdbTuple tuple, [NotNull] Action lambda) + public static void With([NotNull] this ITuple tuple, [NotNull] Action lambda) { OfSize(tuple, 5); lambda(tuple.Get(0), tuple.Get(1), tuple.Get(2), tuple.Get(3), tuple.Get(4)); @@ -499,7 +434,7 @@ public static void With([NotNull] this IFdbTuple tuple, [Not /// Tuple of size 6 /// Action that will be passed the content of this tuple as parameters /// If has not the expected size - public static void With([NotNull] this IFdbTuple tuple, [NotNull] Action lambda) + public static void With([NotNull] this ITuple tuple, [NotNull] Action lambda) { OfSize(tuple, 6); lambda(tuple.Get(0), tuple.Get(1), tuple.Get(2), tuple.Get(3), tuple.Get(4), tuple.Get(5)); @@ -509,7 +444,7 @@ public static void With([NotNull] this IFdbTuple tuple, /// Tuple of size 7 /// Action that will be passed the content of this tuple as parameters /// If has not the expected size - public static void With([NotNull] this IFdbTuple tuple, [NotNull] Action lambda) + public static void With([NotNull] this ITuple tuple, [NotNull] Action lambda) { OfSize(tuple, 7); lambda(tuple.Get(0), tuple.Get(1), tuple.Get(2), tuple.Get(3), tuple.Get(4), tuple.Get(5), tuple.Get(6)); @@ -519,7 +454,7 @@ public static void With([NotNull] this IFdbTuple tup /// Tuple of size 8 /// Action that will be passed the content of this tuple as parameters /// If has not the expected size - public static void With([NotNull] this IFdbTuple tuple, [NotNull] Action lambda) + public static void With([NotNull] this ITuple tuple, [NotNull] Action lambda) { OfSize(tuple, 8); lambda(tuple.Get(0), tuple.Get(1), tuple.Get(2), tuple.Get(3), tuple.Get(4), tuple.Get(5), tuple.Get(6), tuple.Get(7)); @@ -530,10 +465,9 @@ public static void With([NotNull] this IFdbTuple /// Action that will be passed the content of this tuple as parameters /// Result of calling with the items of this tuple /// If has not the expected size - public static TResult With([NotNull] this IFdbTuple tuple, [NotNull] Func lambda) + public static TResult With([NotNull] this ITuple tuple, [NotNull] Func lambda) { - OfSize(tuple, 1); - return lambda(tuple.Get(0)); + return lambda(tuple.OfSize(1).Get(0)); } /// Execute a lambda Function with the content of this tuple @@ -541,7 +475,7 @@ public static TResult With([NotNull] this IFdbTuple tuple, [NotNull /// Function that will be passed the content of this tuple as parameters /// Result of calling with the items of this tuple /// If has not the expected size - public static TResult With([NotNull] this IFdbTuple tuple, [NotNull] Func lambda) + public static TResult With([NotNull] this ITuple tuple, [NotNull] Func lambda) { OfSize(tuple, 2); return lambda(tuple.Get(0), tuple.Get(1)); @@ -552,7 +486,7 @@ public static TResult With([NotNull] this IFdbTuple tuple, [Not /// Action that will be passed the content of this tuple as parameters /// Result of calling with the items of this tuple /// If has not the expected size - public static TResult With([NotNull] this IFdbTuple tuple, [NotNull] Func lambda) + public static TResult With([NotNull] this ITuple tuple, [NotNull] Func lambda) { OfSize(tuple, 3); return lambda(tuple.Get(0), tuple.Get(1), tuple.Get(2)); @@ -563,7 +497,7 @@ public static TResult With([NotNull] this IFdbTuple tuple, /// Function that will be passed the content of this tuple as parameters /// Result of calling with the items of this tuple /// If has not the expected size - public static TResult With([NotNull] this IFdbTuple tuple, [NotNull] Func lambda) + public static TResult With([NotNull] this ITuple tuple, [NotNull] Func lambda) { OfSize(tuple, 4); return lambda(tuple.Get(0), tuple.Get(1), tuple.Get(2), tuple.Get(3)); @@ -574,7 +508,7 @@ public static TResult With([NotNull] this IFdbTuple tup /// Function that will be passed the content of this tuple as parameters /// Result of calling with the items of this tuple /// If has not the expected size - public static TResult With([NotNull] this IFdbTuple tuple, [NotNull] Func lambda) + public static TResult With([NotNull] this ITuple tuple, [NotNull] Func lambda) { OfSize(tuple, 5); return lambda(tuple.Get(0), tuple.Get(1), tuple.Get(2), tuple.Get(3), tuple.Get(4)); @@ -585,7 +519,7 @@ public static TResult With([NotNull] this IFdbTuple /// Function that will be passed the content of this tuple as parameters /// Result of calling with the items of this tuple /// If has not the expected size - public static TResult With([NotNull] this IFdbTuple tuple, [NotNull] Func lambda) + public static TResult With([NotNull] this ITuple tuple, [NotNull] Func lambda) { OfSize(tuple, 6); return lambda(tuple.Get(0), tuple.Get(1), tuple.Get(2), tuple.Get(3), tuple.Get(4), tuple.Get(5)); @@ -596,7 +530,7 @@ public static TResult With([NotNull] this IFdbT /// Function that will be passed the content of this tuple as parameters /// Result of calling with the items of this tuple /// If has not the expected size - public static TResult With([NotNull] this IFdbTuple tuple, [NotNull] Func lambda) + public static TResult With([NotNull] this ITuple tuple, [NotNull] Func lambda) { OfSize(tuple, 7); return lambda(tuple.Get(0), tuple.Get(1), tuple.Get(2), tuple.Get(3), tuple.Get(4), tuple.Get(5), tuple.Get(6)); @@ -607,7 +541,7 @@ public static TResult With([NotNull] this I /// Function that will be passed the content of this tuple as parameters /// Result of calling with the items of this tuple /// If has not the expected size - public static TResult With([NotNull] this IFdbTuple tuple, [NotNull] Func lambda) + public static TResult With([NotNull] this ITuple tuple, [NotNull] Func lambda) { OfSize(tuple, 8); return lambda(tuple.Get(0), tuple.Get(1), tuple.Get(2), tuple.Get(3), tuple.Get(4), tuple.Get(5), tuple.Get(6), tuple.Get(7)); @@ -615,6 +549,139 @@ public static TResult With([NotNull] th #endregion + #region Deconstruction + + [EditorBrowsable(EditorBrowsableState.Never)] + public static void Deconstruct(this ITuple value, out T1 item1) + { + item1 = value.OfSize(1).Get(0); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static void Deconstruct(this ITuple value, out T1 item1, out T2 item2) + { + value.OfSize(2); + item1 = value.Get(0); + item2 = value.Get(1); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static void Deconstruct(this ITuple value, out T1 item1, out T2 item2, out T3 item3) + { + value.OfSize(3); + item1 = value.Get(0); + item2 = value.Get(1); + item3 = value.Get(2); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static void Deconstruct(this ITuple value, out T1 item1, out T2 item2, out T3 item3, out T4 item4) + { + value.OfSize(4); + item1 = value.Get(0); + item2 = value.Get(1); + item3 = value.Get(2); + item4 = value.Get(3); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static void Deconstruct(this ITuple value, out T1 item1, out T2 item2, out T3 item3, out T4 item4, out T5 item5) + { + value.OfSize(5); + item1 = value.Get(0); + item2 = value.Get(1); + item3 = value.Get(2); + item4 = value.Get(3); + item5 = value.Get(4); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static void Deconstruct(this ITuple value, out T1 item1, out T2 item2, out T3 item3, out T4 item4, out T5 item5, out T6 item6) + { + value.OfSize(6); + item1 = value.Get(0); + item2 = value.Get(1); + item3 = value.Get(2); + item4 = value.Get(3); + item5 = value.Get(4); + item6 = value.Get(5); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static void Deconstruct(this ITuple value, out T1 item1, out T2 item2, out T3 item3, out T4 item4, out T5 item5, out T6 item6, out T7 item7) + { + value.OfSize(7); + item1 = value.Get(0); + item2 = value.Get(1); + item3 = value.Get(2); + item4 = value.Get(3); + item5 = value.Get(4); + item6 = value.Get(5); + item7 = value.Get(6); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static void Deconstruct(this ITuple value, out T1 item1, out T2 item2, out T3 item3, out T4 item4, out T5 item5, out T6 item6, out T7 item7, out T8 item8) + { + value.OfSize(8); + item1 = value.Get(0); + item2 = value.Get(1); + item3 = value.Get(2); + item4 = value.Get(3); + item5 = value.Get(4); + item6 = value.Get(5); + item7 = value.Get(6); + item8 = value.Get(7); + } + + #endregion + + #region ValueTuple + + [Pure] + public static STuple ToSTuple(this ValueTuple tuple) + { + return default(STuple); + } + + [Pure] + public static STuple ToSTuple(this ValueTuple tuple) + { + return new STuple(tuple.Item1); + } + + [Pure] + public static STuple ToSTuple(this (T1, T2) tuple) + { + return new STuple(tuple.Item1, tuple.Item2); + } + + [Pure] + public static STuple ToSTuple(this (T1, T2, T3) tuple) + { + return new STuple(tuple.Item1, tuple.Item2, tuple.Item3); + } + + [Pure] + public static STuple ToSTuple(this (T1, T2, T3, T4) tuple) + { + return new STuple(tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4); + } + + [Pure] + public static STuple ToSTuple(this (T1, T2, T3, T4, T5) tuple) + { + return new STuple(tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4, tuple.Item5); + } + + [Pure] + public static STuple ToSTuple(this (T1, T2, T3, T4, T5, T6) tuple) + { + return new STuple(tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4, tuple.Item5, tuple.Item6); + } + + #endregion + } } diff --git a/FoundationDB.Client/Shared/Tuples/TupleHelpers.cs b/FoundationDB.Client/Shared/Tuples/TupleHelpers.cs new file mode 100644 index 000000000..7e0f65b98 --- /dev/null +++ b/FoundationDB.Client/Shared/Tuples/TupleHelpers.cs @@ -0,0 +1,234 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Collections.Tuples +{ + using System; + using System.Collections; + using System.Runtime.CompilerServices; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; + + public static class TupleHelpers + { + + /// Default (non-optimized) implementation of ITuple.this[long?, long?] + /// Tuple to slice + /// Start offset of the section (included) + /// End offset of the section (included) + /// New tuple only containing items inside this section + [NotNull] + public static ITuple Splice([NotNull] ITuple tuple, int? fromIncluded, int? toExcluded) + { + Contract.Requires(tuple != null); + int count = tuple.Count; + if (count == 0) return STuple.Empty; + + int start = fromIncluded.HasValue ? MapIndexBounded(fromIncluded.Value, count) : 0; + int end = toExcluded.HasValue ? MapIndexBounded(toExcluded.Value, count) : count; + + int len = end - start; + + if (len <= 0) return STuple.Empty; + if (start == 0 && len == count) return tuple; + switch (len) + { + case 1: + return new ListTuple(new[] { tuple[start] }, 0, 1); + case 2: + return new ListTuple(new[] { tuple[start], tuple[start + 1] }, 0, 2); + default: + { + var items = new object[len]; + //note: can be slow for tuples using linked-lists, but hopefully they will have their own Slice implementation... + int q = start; + for (int p = 0; p < items.Length; p++) + { + items[p] = tuple[q++]; + } + return new ListTuple(items, 0, len); + } + } + } + + /// Default (non-optimized) implementation for ITuple.StartsWith() + /// Larger tuple + /// Smaller tuple + /// True if starts with (or is equal to) + public static bool StartsWith([NotNull] ITuple a, [NotNull] ITuple b) + { + Contract.Requires(a != null && b != null); + if (object.ReferenceEquals(a, b)) return true; + int an = a.Count; + int bn = b.Count; + + if (bn > an) return false; + if (bn == 0) return true; // note: 'an' can only be 0 because of previous test + + for (int i = 0; i < bn; i++) + { + if (!object.Equals(a[i], b[i])) return false; + } + return true; + } + + /// Default (non-optimized) implementation for ITuple.EndsWith() + /// Larger tuple + /// Smaller tuple + /// True if starts with (or is equal to) + public static bool EndsWith([NotNull] ITuple a, [NotNull] ITuple b) + { + Contract.Requires(a != null && b != null); + if (object.ReferenceEquals(a, b)) return true; + int an = a.Count; + int bn = b.Count; + + if (bn > an) return false; + if (bn == 0) return true; // note: 'an' can only be 0 because of previous test + + int offset = an - bn; + for (int i = 0; i < bn; i++) + { + if (!object.Equals(a[offset + i], b[i])) return false; + } + return true; + } + + /// Helper to copy the content of a tuple at a specific position in an array + /// Updated offset just after the last element of the copied tuple + public static int CopyTo([NotNull] ITuple tuple, [NotNull] object[] array, int offset) + { + Contract.Requires(tuple != null && array != null && offset >= 0); + + foreach (var item in tuple) + { + array[offset++] = item; + } + return offset; + } + + /// Maps a relative index into an absolute index + /// Relative index in the tuple (from the end if negative) + /// Size of the tuple + /// Absolute index from the start of the tuple, or exception if outside of the tuple + /// If the absolute index is outside of the tuple (<0 or >=) + public static int MapIndex(int index, int count) + { + int offset = index; + if (offset < 0) offset += count; + if (offset < 0 || offset >= count) return FailIndexOutOfRange(index, count); + return offset; + } + + /// Maps a relative index into an absolute index + /// Relative index in the tuple (from the end if negative) + /// Size of the tuple + /// Absolute index from the start of the tuple. Truncated to 0 if index is before the start of the tuple, or to if the index is after the end of the tuple + public static int MapIndexBounded(int index, int count) + { + if (index < 0) index += count; + return Math.Max(Math.Min(index, count), 0); + } + + [ContractAnnotation("=> halt"), MethodImpl(MethodImplOptions.NoInlining)] + public static T FailIndexOutOfRange(int index, int count) + { + throw new IndexOutOfRangeException($"Index {index} is outside of the tuple's range (0..{count - 1})"); + } + + public static bool Equals(ITuple left, object other, [NotNull] IEqualityComparer comparer) + { + return object.ReferenceEquals(left, null) ? other == null : Equals(left, other as ITuple, comparer); + } + + public static bool Equals(ITuple x, ITuple y, [NotNull] IEqualityComparer comparer) + { + if (object.ReferenceEquals(x, y)) return true; + if (object.ReferenceEquals(x, null) || object.ReferenceEquals(y, null)) return false; + + return x.Count == y.Count && DeepEquals(x, y, comparer); + } + + public static bool DeepEquals([NotNull] ITuple x, [NotNull] ITuple y, [NotNull] IEqualityComparer comparer) + { + Contract.Requires(x != null && y != null && comparer != null); + + using (var xs = x.GetEnumerator()) + using (var ys = y.GetEnumerator()) + { + while (xs.MoveNext()) + { + if (!ys.MoveNext()) return false; + if (!comparer.Equals(xs.Current, ys.Current)) return false; + } + + return !ys.MoveNext(); + } + } + + public static int StructuralGetHashCode(ITuple tuple, [NotNull] IEqualityComparer comparer) + { + Contract.Requires(comparer != null); + + if (object.ReferenceEquals(tuple, null)) + { + return comparer.GetHashCode(null); + } + + int h = 0; + foreach (var item in tuple) + { + h = HashCodes.Combine(h, comparer.GetHashCode(item)); + } + return h; + } + + public static int StructuralCompare(ITuple x, ITuple y, [NotNull] IComparer comparer) + { + Contract.Requires(comparer != null); + + if (object.ReferenceEquals(x, y)) return 0; + if (object.ReferenceEquals(x, null)) return -1; + if (object.ReferenceEquals(y, null)) return 1; + + using (var xs = x.GetEnumerator()) + using (var ys = y.GetEnumerator()) + { + while (xs.MoveNext()) + { + if (!ys.MoveNext()) return 1; + + int cmp = comparer.Compare(xs.Current, ys.Current); + if (cmp != 0) return cmp; + + } + return ys.MoveNext() ? -1 : 0; + } + } + } +} diff --git a/FoundationDB.Client/Shared/TypeSystem/Encoders/DynamicKeyEncoderBase.cs b/FoundationDB.Client/Shared/TypeSystem/Encoders/DynamicKeyEncoderBase.cs new file mode 100644 index 000000000..d070347af --- /dev/null +++ b/FoundationDB.Client/Shared/TypeSystem/Encoders/DynamicKeyEncoderBase.cs @@ -0,0 +1,177 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace FoundationDB.Client +{ + using System; + using Doxense.Collections.Tuples; + using Doxense.Memory; + using Doxense.Serialization.Encoders; + + public abstract class DynamicKeyEncoderBase : IDynamicKeyEncoder + { + + public abstract IKeyEncoding Encoding { get; } + + public virtual (Slice Begin, Slice End) ToRange(Slice prefix) + { + return KeyRange.StartsWith(prefix); + } + + public abstract void PackKey(ref SliceWriter writer, TTuple items) where TTuple : ITuple; + + public virtual void EncodeKey(ref SliceWriter writer, T1 item1) + { + PackKey(ref writer, STuple.Create(item1)); + } + + public virtual void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2) + { + PackKey(ref writer, STuple.Create(item1, item2)); + } + + public virtual void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3) + { + PackKey(ref writer, STuple.Create(item1, item2, item3)); + } + + public virtual void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4) + { + PackKey(ref writer, STuple.Create(item1, item2, item3, item4)); + } + + public virtual void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) + { + PackKey(ref writer, STuple.Create(item1, item2, item3, item4, item5)); + } + + public virtual void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6) + { + PackKey(ref writer, STuple.Create(item1, item2, item3, item4, item5, item6)); + } + + public virtual void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7) + { + PackKey(ref writer, STuple.Create(item1, item2, item3, item4, item5, item6, item7)); + } + + public virtual void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7, T8 item8) + { + PackKey(ref writer, STuple.Create(item1, item2, item3, item4, item5, item6, item7, item8)); + } + + public abstract ITuple UnpackKey(Slice packed); + + public virtual T DecodeKey(Slice packed) + { + return UnpackKey(packed).OfSize(1).Get(0); + } + + public virtual T DecodeKeyFirst(Slice packed) + { + return UnpackKey(packed).OfSizeAtLeast(1).Get(0); + } + + public virtual T DecodeKeyLast(Slice packed) + { + return UnpackKey(packed).OfSizeAtLeast(1).Get(-1); + } + + public virtual STuple DecodeKey(Slice packed) + { + return UnpackKey(packed).With((T1 a, T2 b) => STuple.Create(a, b)); + } + + public virtual STuple DecodeKey(Slice packed) + { + return UnpackKey(packed).With((T1 a, T2 b, T3 c) => STuple.Create(a, b, c)); + } + + public virtual STuple DecodeKey(Slice packed) + { + return UnpackKey(packed).With((T1 a, T2 b, T3 c, T4 d) => STuple.Create(a, b, c, d)); + } + + public virtual STuple DecodeKey(Slice packed) + { + return UnpackKey(packed).With((T1 a, T2 b, T3 c, T4 d, T5 e) => STuple.Create(a, b, c, d, e)); + } + + public virtual STuple DecodeKey(Slice packed) + { + return UnpackKey(packed).With((T1 a, T2 b, T3 c, T4 d, T5 e, T6 f) => STuple.Create(a, b, c, d, e, f)); + } + + public virtual (Slice Begin, Slice End) ToRange(Slice prefix, ITuple items) + { + var writer = new SliceWriter(prefix, 16); + PackKey(ref writer, items); + return ToRange(writer.ToSlice()); + } + + public virtual (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1) + { + return ToRange(prefix, STuple.Create(item1)); + } + + public virtual (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2) + { + return ToRange(prefix, STuple.Create(item1, item2)); + } + + public virtual (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3) + { + return ToRange(prefix, STuple.Create(item1, item3, item3)); + } + + public virtual (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4) + { + return ToRange(prefix, STuple.Create(item1, item3, item3, item4)); + } + + public virtual (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) + { + return ToRange(prefix, STuple.Create(item1, item3, item3, item4, item5)); + } + + public virtual (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6) + { + return ToRange(prefix, STuple.Create(item1, item3, item3, item4, item5, item6)); + } + + public virtual (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7) + { + return ToRange(prefix, STuple.Create(item1, item3, item3, item4, item5, item6, item7)); + } + + public virtual (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7, T8 item8) + { + return ToRange(prefix, STuple.Create(item1, item3, item3, item4, item5, item6, item7, item8)); + } + } +} diff --git a/FoundationDB.Client/Shared/TypeSystem/Encoders/ICompositeKeyEncoder.cs b/FoundationDB.Client/Shared/TypeSystem/Encoders/ICompositeKeyEncoder.cs new file mode 100644 index 000000000..4a720cf3d --- /dev/null +++ b/FoundationDB.Client/Shared/TypeSystem/Encoders/ICompositeKeyEncoder.cs @@ -0,0 +1,191 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Serialization.Encoders +{ + using System; + using System.Runtime.CompilerServices; + using Doxense.Memory; + + public interface ICompositeKeyEncoder : IKeyEncoder<(T1, T2)> + { + /// Write some or all parts of a composite key + void WriteKeyPartsTo(ref SliceWriter writer, int count, ref (T1, T2) key); + + /// Read some or all parts of a composite key + void ReadKeyPartsFrom(ref SliceReader reader, int count, out (T1, T2) items); + } + + public interface ICompositeKeyEncoder : IKeyEncoder<(T1, T2, T3)> + { + /// Write some or all parts of a composite key + void WriteKeyPartsTo(ref SliceWriter writer, int count, ref (T1, T2, T3) key); + + /// Read some or all parts of a composite key + void ReadKeyPartsFrom(ref SliceReader reader, int count, out (T1, T2, T3) items); + } + + public interface ICompositeKeyEncoder : IKeyEncoder<(T1, T2, T3, T4)> + { + /// Write some or all parts of a composite key + void WriteKeyPartsTo(ref SliceWriter writer, int count, ref (T1, T2, T3, T4) key); + + /// Read some or all parts of a composite key + void ReadKeyPartsFrom(ref SliceReader reader, int count, out (T1, T2, T3, T4) items); + } + + public interface ICompositeKeyEncoder : IKeyEncoder<(T1, T2, T3, T4, T5)> + { + /// Write some or all parts of a composite key + void WriteKeyPartsTo(ref SliceWriter writer, int count, ref (T1, T2, T3, T4, T5) key); + + /// Read some or all parts of a composite key + void ReadKeyPartsFrom(ref SliceReader reader, int count, out (T1, T2, T3, T4, T5) items); + } + + public interface ICompositeKeyEncoder : IKeyEncoder<(T1, T2, T3, T4, T5, T6)> + { + /// Write some or all parts of a composite key + void WriteKeyPartsTo(ref SliceWriter writer, int count, ref (T1, T2, T3, T4, T5, T6) key); + + /// Read some or all parts of a composite key + void ReadKeyPartsFrom(ref SliceReader reader, int count, out (T1, T2, T3, T4, T5, T6) items); + } + + /// Wrapper for encoding and decoding a pair with lambda functions + public abstract class CompositeKeyEncoder : ICompositeKeyEncoder + { + + public abstract IKeyEncoding Encoding { get; } + + public abstract void WriteKeyPartsTo(ref SliceWriter writer, int count, ref (T1, T2) items); + + public abstract void ReadKeyPartsFrom(ref SliceReader reader, int count, out (T1, T2) items); + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void WriteKeyTo(ref SliceWriter writer, (T1, T2) items) + { + WriteKeyPartsTo(ref writer, 2, ref items); + } + + public void ReadKeyFrom(ref SliceReader reader, out (T1, T2) items) + { + ReadKeyPartsFrom(ref reader, 2, out items); + } + + } + + /// Wrapper for encoding and decoding a triplet with lambda functions + public abstract class CompositeKeyEncoder : ICompositeKeyEncoder + { + + public abstract IKeyEncoding Encoding { get; } + + public abstract void WriteKeyPartsTo(ref SliceWriter writer, int count, ref (T1, T2, T3) items); + + public abstract void ReadKeyPartsFrom(ref SliceReader reader, int count, out (T1, T2, T3) items); + + public void WriteKeyTo(ref SliceWriter writer, (T1, T2, T3) items) + { + WriteKeyPartsTo(ref writer, 3, ref items); + } + + public void ReadKeyFrom(ref SliceReader reader, out (T1, T2, T3) items) + { + ReadKeyPartsFrom(ref reader, 3, out items); + } + + } + + /// Wrapper for encoding and decoding a quad with lambda functions + public abstract class CompositeKeyEncoder : ICompositeKeyEncoder + { + + public abstract IKeyEncoding Encoding { get; } + + public abstract void WriteKeyPartsTo(ref SliceWriter writer, int count, ref (T1, T2, T3, T4) items); + + public abstract void ReadKeyPartsFrom(ref SliceReader reader, int count, out (T1, T2, T3, T4) items); + + public void WriteKeyTo(ref SliceWriter writer, (T1, T2, T3, T4) items) + { + WriteKeyPartsTo(ref writer, 4, ref items); + } + + public void ReadKeyFrom(ref SliceReader reader, out (T1, T2, T3, T4) items) + { + ReadKeyPartsFrom(ref reader, 4, out items); + } + + } + + /// Wrapper for encoding and decoding five items with lambda functions + public abstract class CompositeKeyEncoder : ICompositeKeyEncoder + { + + public abstract IKeyEncoding Encoding { get; } + + public abstract void WriteKeyPartsTo(ref SliceWriter writer, int count, ref (T1, T2, T3, T4, T5) items); + + public abstract void ReadKeyPartsFrom(ref SliceReader reader, int count, out (T1, T2, T3, T4, T5) items); + + public void WriteKeyTo(ref SliceWriter writer, (T1, T2, T3, T4, T5) items) + { + WriteKeyPartsTo(ref writer, 5, ref items); + } + + public void ReadKeyFrom(ref SliceReader reader, out (T1, T2, T3, T4, T5) items) + { + ReadKeyPartsFrom(ref reader, 5, out items); + } + + } + + /// Wrapper for encoding and decoding six items with lambda functions + public abstract class CompositeKeyEncoder : ICompositeKeyEncoder + { + + public abstract IKeyEncoding Encoding { get; } + + public abstract void WriteKeyPartsTo(ref SliceWriter writer, int count, ref (T1, T2, T3, T4, T5, T6) items); + + public abstract void ReadKeyPartsFrom(ref SliceReader reader, int count, out (T1, T2, T3, T4, T5, T6) items); + + public void WriteKeyTo(ref SliceWriter writer, (T1, T2, T3, T4, T5, T6) items) + { + WriteKeyPartsTo(ref writer, 6, ref items); + } + + public void ReadKeyFrom(ref SliceReader reader, out (T1, T2, T3, T4, T5, T6) items) + { + ReadKeyPartsFrom(ref reader, 6, out items); + } + + } + +} diff --git a/FoundationDB.Client/TypeSystem/IDynamicKeyEncoder.cs b/FoundationDB.Client/Shared/TypeSystem/Encoders/IDynamicKeyEncoder.cs similarity index 87% rename from FoundationDB.Client/TypeSystem/IDynamicKeyEncoder.cs rename to FoundationDB.Client/Shared/TypeSystem/Encoders/IDynamicKeyEncoder.cs index 2825449cb..ede420b09 100644 --- a/FoundationDB.Client/TypeSystem/IDynamicKeyEncoder.cs +++ b/FoundationDB.Client/Shared/TypeSystem/Encoders/IDynamicKeyEncoder.cs @@ -26,25 +26,21 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -using System; -using FoundationDB.Layers.Tuples; -using JetBrains.Annotations; - -namespace FoundationDB.Client +namespace Doxense.Serialization.Encoders { + using System; + using Doxense.Collections.Tuples; + using Doxense.Memory; + using FoundationDB.Client; + using JetBrains.Annotations; /// Encoder that can process keys of variable size and types public interface IDynamicKeyEncoder { /// Return the parent key encoding - IFdbKeyEncoding Encoding {[NotNull] get; } - - /// Return a range that contains all the keys under a subspace of the encoder subspace, using the semantic of the encoding - /// Optional binary prefix - /// Key range which derives from the semantic of the current encoding - /// For example, the Tuple encoding will produce ranges of the form "(Key + \x00) <= x < (Key + \xFF)", while a binary-based encoding would produce ranges of the form "Key <= x < Increment(Key)" - FdbKeyRange ToRange(Slice prefix = default(Slice)); + [NotNull] + IKeyEncoding Encoding { get; } #region Encoding... @@ -52,7 +48,7 @@ public interface IDynamicKeyEncoder /// Buffer where to append the binary representation /// Tuple of any size (0 to N) /// If some elements in are not supported by this type system - void PackKey(ref SliceWriter writer, IFdbTuple items); + void PackKey(ref SliceWriter writer, TTuple items) where TTuple : ITuple; /// Encode a key composed of a single element into a binary slice /// Type of the element @@ -165,7 +161,7 @@ public interface IDynamicKeyEncoder /// Decode a binary slice into a tuple or arbitrary length /// Binary slice produced by a previous call to /// Tuple of any size (0 to N) - IFdbTuple UnpackKey(Slice packed); + ITuple UnpackKey(Slice packed); /// Decode a binary slice containing exactly on element /// Expected type of the element @@ -182,7 +178,8 @@ public interface IDynamicKeyEncoder /// Expected type of the second element /// Binary slice produced by a previous call to or /// Tuple containing two elements, or an exception if the data is invalid, or the tuples has less or more than two elements - FdbTuple DecodeKey(Slice packed); + STuple DecodeKey(Slice packed); + //REVIEW: return ValueTuple instead? /// Decode a binary slice containing exactly three elements /// Expected type of the first element @@ -190,7 +187,8 @@ public interface IDynamicKeyEncoder /// Expected type of the third element /// Binary slice produced by a previous call to or /// Tuple containing three elements, or an exception if the data is invalid, or the tuples has less or more than three elements - FdbTuple DecodeKey(Slice packed); + STuple DecodeKey(Slice packed); + //REVIEW: return ValueTuple instead? /// Decode a binary slice containing exactly four elements /// Expected type of the first element @@ -199,7 +197,8 @@ public interface IDynamicKeyEncoder /// Expected type of the fourth element /// Binary slice produced by a previous call to or /// Tuple containing four elements, or an exception if the data is invalid, or the tuples has less or more than four elements - FdbTuple DecodeKey(Slice packed); + STuple DecodeKey(Slice packed); + //REVIEW: return ValueTuple instead? /// Decode a binary slice containing exactly five elements /// Expected type of the first element @@ -209,22 +208,41 @@ public interface IDynamicKeyEncoder /// Expected type of the fifth element /// Binary slice produced by a previous call to or /// Tuple containing five elements, or an exception if the data is invalid, or the tuples has less or more than five elements - FdbTuple DecodeKey(Slice packed); + STuple DecodeKey(Slice packed); + //REVIEW: return ValueTuple instead? + + /// Decode a binary slice containing exactly six elements + /// Expected type of the first element + /// Expected type of the second element + /// Expected type of the third element + /// Expected type of the fourth element + /// Expected type of the fifth element + /// Expected type of the sixth element + /// Binary slice produced by a previous call to or + /// Tuple containing five elements, or an exception if the data is invalid, or the tuples has less or more than five elements + STuple DecodeKey(Slice packed); + //REVIEW: return ValueTuple instead? #endregion #region Ranges... + /// Return a range that contains all the keys under a subspace of the encoder subspace, using the semantic of the encoding + /// Optional binary prefix + /// Key range which derives from the semantic of the current encoding + /// For example, the Tuple encoding will produce ranges of the form "(Key + \x00) <= x < (Key + \xFF)", while a binary-based encoding would produce ranges of the form "Key <= x < Increment(Key)" + (Slice Begin, Slice End) ToRange(Slice prefix = default(Slice)); + /// Return a key range using a tuple as a prefix /// Optional binary prefix that should be added before encoding the key /// Tuple of any size (0 to N) - FdbKeyRange ToRange(Slice prefix, IFdbTuple items); + (Slice Begin, Slice End) ToRange(Slice prefix, ITuple items); /// Return a key range using a single element as a prefix /// Type of the element /// Optional binary prefix that should be added before encoding the key /// Element to encode - FdbKeyRange ToKeyRange(Slice prefix, T1 item1); + (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1); /// Return a key range using two elements as a prefix /// Type of the first element @@ -232,7 +250,7 @@ public interface IDynamicKeyEncoder /// Optional binary prefix that should be added before encoding the key /// First element to encode /// Second element to encode - FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2); + (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2); /// Return a key range using three elements as a prefix /// Type of the first element @@ -242,7 +260,7 @@ public interface IDynamicKeyEncoder /// First element to encode /// Second element to encode /// Third element to encode - FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3); + (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3); /// Return a key range using four elements as a prefix /// Type of the first element @@ -254,7 +272,7 @@ public interface IDynamicKeyEncoder /// Second element to encode /// Third element to encode /// Fourth element to encode - FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4); + (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4); /// Return a key range using five elements as a prefix /// Type of the first element @@ -268,7 +286,7 @@ public interface IDynamicKeyEncoder /// Third element to encode /// Fourth element to encode /// Fifth element to encode - FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5); + (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5); /// Return a key range using six elements as a prefix /// Type of the first element @@ -284,7 +302,7 @@ public interface IDynamicKeyEncoder /// Fourth element to encode /// Fifth element to encode /// Sixth element to encode - FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6); + (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6); /// Return a key range using seven elements as a prefix /// Type of the first element @@ -302,7 +320,7 @@ public interface IDynamicKeyEncoder /// Fifth element to encode /// Sixth element to encode /// Seventh element to encode - FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7); + (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7); /// Return a key range using eight elements as a prefix /// Type of the first element @@ -322,7 +340,7 @@ public interface IDynamicKeyEncoder /// Sixth element to encode /// Seventh element to encode /// Eighth element to encode - FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7, T8 item8); + (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7, T8 item8); //note: I will be billing $999.99 to anyone who wants up to T11 !!! :( @@ -330,4 +348,4 @@ public interface IDynamicKeyEncoder } -} \ No newline at end of file +} diff --git a/FoundationDB.Client/TypeSystem/TypeSystem.cs b/FoundationDB.Client/Shared/TypeSystem/Encoders/IKeyEncoder.cs similarity index 74% rename from FoundationDB.Client/TypeSystem/TypeSystem.cs rename to FoundationDB.Client/Shared/TypeSystem/Encoders/IKeyEncoder.cs index da2b9e61e..329c827aa 100644 --- a/FoundationDB.Client/TypeSystem/TypeSystem.cs +++ b/FoundationDB.Client/Shared/TypeSystem/Encoders/IKeyEncoder.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,29 +26,27 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client //REVIEW: what namespace? +namespace Doxense.Serialization.Encoders { using System; + using Doxense.Memory; using JetBrains.Annotations; - using FoundationDB.Layers.Tuples; - public static class TypeSystem + /// Base interface for all key encoders + public interface IKeyEncoder { + /// Parent encoding [NotNull] - public static readonly IFdbKeyEncoding Default; - - [NotNull] - public static readonly IFdbKeyEncoding Tuples; - - static TypeSystem() - { - var tuples = new TupleKeyEncoding(); - Tuples = tuples; + IKeyEncoding Encoding { get; } + } - // default is the same a Tuples (for now?) - Default = tuples; - } + public interface IKeyEncoder : IKeyEncoder + { + /// Encode a single value + void WriteKeyTo(ref SliceWriter writer, T1 value); + /// Decode a single value + void ReadKeyFrom(ref SliceReader reader, out T1 value); } -} \ No newline at end of file +} diff --git a/FoundationDB.Client/TypeSystem/IValueEncoder.cs b/FoundationDB.Client/Shared/TypeSystem/Encoders/IValueEncoder.cs similarity index 96% rename from FoundationDB.Client/TypeSystem/IValueEncoder.cs rename to FoundationDB.Client/Shared/TypeSystem/Encoders/IValueEncoder.cs index 9f9fc1967..7d8b5ba0e 100644 --- a/FoundationDB.Client/TypeSystem/IValueEncoder.cs +++ b/FoundationDB.Client/Shared/TypeSystem/Encoders/IValueEncoder.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,7 +26,7 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client +namespace Doxense.Serialization.Encoders { using System; using JetBrains.Annotations; @@ -44,4 +44,4 @@ public interface IValueEncoder T DecodeValue(Slice encoded); } -} \ No newline at end of file +} diff --git a/FoundationDB.Client/Shared/TypeSystem/Encoders/KeyEncoderExtensions.cs b/FoundationDB.Client/Shared/TypeSystem/Encoders/KeyEncoderExtensions.cs new file mode 100644 index 000000000..8b3106dea --- /dev/null +++ b/FoundationDB.Client/Shared/TypeSystem/Encoders/KeyEncoderExtensions.cs @@ -0,0 +1,414 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Serialization.Encoders +{ + using System; + using System.Collections.Generic; + using System.Linq; + using Doxense.Collections.Tuples; + using Doxense.Diagnostics.Contracts; + using Doxense.Memory; + using JetBrains.Annotations; + + public static class KeyEncoderExtensions + { + + #region + + public static Slice EncodeKey([NotNull] this IKeyEncoder encoder, T1 value) + { + var writer = default(SliceWriter); + encoder.WriteKeyTo(ref writer, value); + return writer.ToSlice(); + } + + public static Slice EncodeKey([NotNull] this IKeyEncoder encoder, Slice prefix, T1 value) + { + var writer = new SliceWriter(prefix.Count + 16); // ~16 bytes si T1 = Guid + writer.WriteBytes(prefix); + encoder.WriteKeyTo(ref writer, value); + return writer.ToSlice(); + } + + public static T1 DecodeKey([NotNull] this IKeyEncoder decoder, Slice encoded) + { + var reader = new SliceReader(encoded); + decoder.ReadKeyFrom(ref reader, out T1 item); + //TODO: should we fail if extra bytes? + return item; + } + + #endregion + + #region + + public static void WriteKeyTo(this ICompositeKeyEncoder encoder, ref SliceWriter writer, T1 value1, T2 value2) + { + var tuple = (value1, value2); + encoder.WriteKeyPartsTo(ref writer, 2, ref tuple); + } + + public static Slice EncodeKey(this ICompositeKeyEncoder encoder, T1 item1, T2 item2) + { + var writer = default(SliceWriter); + var tuple = (item1, item2); + encoder.WriteKeyPartsTo(ref writer, 2, ref tuple); + return writer.ToSlice(); + } + + public static Slice EncodeKey(this ICompositeKeyEncoder encoder, Slice prefix, T1 item1, T2 item2) + { + var writer = new SliceWriter(prefix.Count + 24); + writer.WriteBytes(prefix); + encoder.WriteKeyTo(ref writer, item1, item2); + return writer.ToSlice(); + } + + public static Slice EncodePartialKey(this ICompositeKeyEncoder encoder, T1 item1) + { + var writer = default(SliceWriter); + var tuple = (item1, default(T2)); + encoder.WriteKeyPartsTo(ref writer, 1, ref tuple); + return writer.ToSlice(); + } + + public static Slice EncodePartialKey(this ICompositeKeyEncoder encoder, Slice prefix, T1 item1) + { + var writer = new SliceWriter(prefix.Count + 16); + writer.WriteBytes(prefix); + var tuple = (item1, default(T2)); + encoder.WriteKeyPartsTo(ref writer, 1, ref tuple); + return writer.ToSlice(); + } + + public static Slice EncodeKeyParts(this ICompositeKeyEncoder encoder, int count, (T1, T2) items) + { + var writer = default(SliceWriter); + encoder.WriteKeyPartsTo(ref writer, count, ref items); + return writer.ToSlice(); + } + + public static STuple DecodeKey(this ICompositeKeyEncoder decoder, Slice encoded) + { + var reader = new SliceReader(encoded); + decoder.ReadKeyFrom(ref reader, out var items); + //TODO: throw if extra bytes? + return items; + } + + public static STuple DecodeKeyParts(this ICompositeKeyEncoder encoder, int count, Slice encoded) + { + var reader = new SliceReader(encoded); + encoder.ReadKeyPartsFrom(ref reader, count, out var items); + return items; + } + + #endregion + + #region + + public static void WriteKeyTo(this ICompositeKeyEncoder encoder, ref SliceWriter writer, T1 value1, T2 value2, T3 value3) + { + var tuple = (value1, value2, value3); + encoder.WriteKeyPartsTo(ref writer, 3, ref tuple); + } + + public static Slice EncodeKey(this ICompositeKeyEncoder encoder, T1 item1, T2 item2, T3 item3) + { + var writer = default(SliceWriter); + var tuple = (item1, item2, item3); + encoder.WriteKeyPartsTo(ref writer, 3, ref tuple); + return writer.ToSlice(); + } + + public static Slice EncodeKey(this ICompositeKeyEncoder encoder, Slice prefix, T1 item1, T2 item2, T3 item3) + { + var writer = new SliceWriter(prefix.Count + 32); + writer.WriteBytes(prefix); + encoder.WriteKeyTo(ref writer, item1, item2, item3); + return writer.ToSlice(); + } + + public static Slice EncodeKeyParts(this ICompositeKeyEncoder encoder, int count, (T1, T2, T3) items) + { + var writer = default(SliceWriter); + encoder.WriteKeyPartsTo(ref writer, count, ref items); + return writer.ToSlice(); + } + + public static STuple DecodeKey(this ICompositeKeyEncoder decoder, Slice encoded) + { + var reader = new SliceReader(encoded); + decoder.ReadKeyFrom(ref reader, out var items); + //TODO: throw if extra bytes? + return items; + } + + public static STuple DecodeKeyParts(this ICompositeKeyEncoder encoder, int count, Slice encoded) + { + var reader = new SliceReader(encoded); + encoder.ReadKeyPartsFrom(ref reader, count, out var items); + return items; + } + + #endregion + + #region + + public static void WriteKeyTo(this ICompositeKeyEncoder encoder, ref SliceWriter writer, T1 value1, T2 value2, T3 value3, T4 value4) + { + var tuple = (value1, value2, value3, value4); + encoder.WriteKeyPartsTo(ref writer, 4, ref tuple); + } + + public static Slice EncodeKey(this ICompositeKeyEncoder encoder, T1 item1, T2 item2, T3 item3, T4 item4) + { + var writer = default(SliceWriter); + var tuple = (item1, item2, item3, item4); + encoder.WriteKeyPartsTo(ref writer, 4, ref tuple); + return writer.ToSlice(); + } + + public static Slice EncodeKey(this ICompositeKeyEncoder encoder, Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4) + { + var writer = new SliceWriter(prefix.Count + 48); + writer.WriteBytes(prefix); + encoder.WriteKeyTo(ref writer, item1, item2, item3, item4); + return writer.ToSlice(); + } + + public static Slice EncodeKeyParts(this ICompositeKeyEncoder encoder, int count, (T1, T2, T3, T4) items) + { + var writer = default(SliceWriter); + encoder.WriteKeyPartsTo(ref writer, count, ref items); + return writer.ToSlice(); + } + + public static STuple DecodeKey(this ICompositeKeyEncoder decoder, Slice encoded) + { + var reader = new SliceReader(encoded); + decoder.ReadKeyFrom(ref reader, out var items); + //TODO: throw if extra bytes? + return items; + } + + public static STuple DecodeKeyParts(this ICompositeKeyEncoder encoder, int count, Slice encoded) + { + var reader = new SliceReader(encoded); + encoder.ReadKeyPartsFrom(ref reader, count, out var items); + return items; + } + + #endregion + + #region + + public static void WriteKeyTo(this ICompositeKeyEncoder encoder, ref SliceWriter writer, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5) + { + var tuple = (value1, value2, value3, value4, value5); + encoder.WriteKeyPartsTo(ref writer, 5, ref tuple); + } + + public static Slice EncodeKey(this ICompositeKeyEncoder encoder, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) + { + var writer = default(SliceWriter); + var tuple = (item1, item2, item3, item4, item5); + encoder.WriteKeyPartsTo(ref writer, 5, ref tuple); + return writer.ToSlice(); + } + + public static Slice EncodeKey(this ICompositeKeyEncoder encoder, Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) + { + var writer = new SliceWriter(prefix.Count + 56); + writer.WriteBytes(prefix); + encoder.WriteKeyTo(ref writer, item1, item2, item3, item4, item5); + return writer.ToSlice(); + } + + public static Slice EncodeKeyParts(this ICompositeKeyEncoder encoder, int count, (T1, T2, T3, T4, T5) items) + { + var writer = default(SliceWriter); + encoder.WriteKeyPartsTo(ref writer, count, ref items); + return writer.ToSlice(); + } + + public static STuple DecodeKey(this ICompositeKeyEncoder decoder, Slice encoded) + { + var reader = new SliceReader(encoded); + decoder.ReadKeyFrom(ref reader, out var items); + //TODO: throw if extra bytes? + return items; + } + + public static STuple DecodeKeyParts(this ICompositeKeyEncoder encoder, int count, Slice encoded) + { + var reader = new SliceReader(encoded); + encoder.ReadKeyPartsFrom(ref reader, count, out var items); + return items; + } + + + #endregion + + #region Batched... + + /// Convert an array of s into an array of slices, using a serializer (or the default serializer if none is provided) + [NotNull] + public static Slice[] EncodeKeys([NotNull] this IKeyEncoder encoder, [NotNull] params T[] values) + { + Contract.NotNull(encoder, nameof(encoder)); + Contract.NotNull(values, nameof(values)); + + var slices = new Slice[values.Length]; + for (int i = 0; i < values.Length; i++) + { + slices[i] = encoder.EncodeKey(values[i]); + } + return slices; + } + + /// Convert an array of s into an array of slices, using a serializer (or the default serializer if none is provided) + [NotNull] + public static Slice[] EncodeKeys([NotNull] this IKeyEncoder encoder, [NotNull] IEnumerable elements, Func selector) + { + Contract.NotNull(encoder, nameof(encoder)); + Contract.NotNull(elements, nameof(elements)); + Contract.NotNull(selector, nameof(selector)); + + TElement[] arr; + ICollection coll; + + if ((arr = elements as TElement[]) != null) + { // fast path for arrays + return EncodeKeys(encoder, arr, selector); + } + if ((coll = elements as ICollection) != null) + { // we can pre-allocate the result array + var slices = new Slice[coll.Count]; + int p = 0; + foreach(var item in coll) + { + slices[p++] = encoder.EncodeKey(selector(item)); + } + return slices; + } + // slow path + return elements.Select((item) => encoder.EncodeKey(selector(item))).ToArray(); + } + + /// Convert an array of s into an array of slices, using a serializer (or the default serializer if none is provided) + [NotNull] + public static Slice[] EncodeKeys([NotNull] this IKeyEncoder encoder, [NotNull] TElement[] elements, Func selector) + { + Contract.NotNull(encoder, nameof(encoder)); + Contract.NotNull(elements, nameof(elements)); + Contract.NotNull(selector, nameof(selector)); + + var slices = new Slice[elements.Length]; + for (int i = 0; i < elements.Length; i++) + { + slices[i] = encoder.EncodeKey(selector(elements[i])); + } + return slices; + } + + /// Transform a sequence of s into a sequence of slices, using a serializer (or the default serializer if none is provided) + [NotNull] + public static IEnumerable EncodeKeys([NotNull] this IKeyEncoder encoder, [NotNull] IEnumerable values) + { + Contract.NotNull(encoder, nameof(encoder)); + Contract.NotNull(values, nameof(values)); + + // note: T=>Slice usually is used for writing batches as fast as possible, which means that keys will be consumed immediately and don't need to be streamed + + if (values is T[] array) + { // optimized path for arrays + return EncodeKeys(encoder, array); + } + + if (values is ICollection coll) + { // optimized path when we know the count + var slices = new List(coll.Count); + foreach (var value in coll) + { + slices.Add(encoder.EncodeKey(value)); + } + return slices; + } + + // "slow" path + return values.Select(value => encoder.EncodeKey(value)); + } + + /// Convert an array of slices back into an array of s, using a serializer (or the default serializer if none is provided) + [NotNull] + public static T[] DecodeKeys([NotNull] this IKeyEncoder encoder, [NotNull] params Slice[] slices) + { + Contract.NotNull(encoder, nameof(encoder)); + Contract.NotNull(slices, nameof(slices)); + + var values = new T[slices.Length]; + for (int i = 0; i < slices.Length; i++) + { + values[i] = encoder.DecodeKey(slices[i]); + } + return values; + } + + /// Convert the keys of an array of key value pairs of slices back into an array of s, using a serializer (or the default serializer if none is provided) + [NotNull] + public static T[] DecodeKeys([NotNull] this IKeyEncoder encoder, [NotNull] KeyValuePair[] items) + { + Contract.NotNull(encoder, nameof(encoder)); + Contract.NotNull(items, nameof(items)); + + var values = new T[items.Length]; + for (int i = 0; i < items.Length; i++) + { + values[i] = encoder.DecodeKey(items[i].Key); + } + return values; + } + + /// Transform a sequence of slices back into a sequence of s, using a serializer (or the default serializer if none is provided) + [NotNull] + public static IEnumerable DecodeKeys([NotNull] this IKeyEncoder encoder, [NotNull] IEnumerable slices) + { + Contract.NotNull(encoder, nameof(encoder)); + Contract.NotNull(slices, nameof(slices)); + + // Slice=>T may be filtered in LINQ queries, so we should probably stream the values (so no optimization needed) + + return slices.Select(slice => encoder.DecodeKey(slice)); + } + + #endregion + + } +} diff --git a/FoundationDB.Client/Shared/TypeSystem/Encoders/KeyValueEncoders.Ordered.cs b/FoundationDB.Client/Shared/TypeSystem/Encoders/KeyValueEncoders.Ordered.cs new file mode 100644 index 000000000..56b09d110 --- /dev/null +++ b/FoundationDB.Client/Shared/TypeSystem/Encoders/KeyValueEncoders.Ordered.cs @@ -0,0 +1,316 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Serialization.Encoders +{ + using JetBrains.Annotations; + using System; + using Doxense.Diagnostics.Contracts; + using Doxense.Memory; + + /// Helper class for all key/value encoders + public static partial class KeyValueEncoders + { + + /// Encoders that produce lexicographically ordered slices, suitable for keys where lexicographical ordering is required + [PublicAPI] + public static class Ordered + { + [NotNull] + public static IKeyEncoder BinaryEncoder => Tuples.Key(); + + [NotNull] + public static IKeyEncoder StringEncoder => Tuples.Key(); + + [NotNull] + public static IKeyEncoder Int32Encoder => Tuples.Key(); + + [NotNull] + public static IKeyEncoder Int64Encoder => Tuples.Key(); + + [NotNull] + public static IKeyEncoder UInt64Encoder => Tuples.Key(); + + [NotNull] + public static IKeyEncoder GuidEncoder => Tuples.Key(); + + public sealed class OrderedKeyEncoder : IKeyEncoder, IKeyEncoding + { + private readonly IOrderedTypeCodec m_codec; + + public OrderedKeyEncoder(IOrderedTypeCodec codec) + { + Contract.Requires(codec != null); + m_codec = codec; + } + + public void WriteKeyTo(ref SliceWriter writer, T key) + { + //TODO: PERF: optimize this! + writer.WriteBytes(m_codec.EncodeOrdered(key)); + } + + public void ReadKeyFrom(ref SliceReader reader, out T key) + { + key = m_codec.DecodeOrdered(reader.ReadToEnd()); + } + + public IKeyEncoding Encoding => this; + + #region IKeyEncoding... + + IDynamicKeyEncoder IKeyEncoding.GetDynamicKeyEncoder() => throw new NotSupportedException(); + + IKeyEncoder IKeyEncoding.GetKeyEncoder() + { + if (typeof(T1) != typeof(T)) throw new NotSupportedException(); + return (IKeyEncoder) (object) this; + } + + ICompositeKeyEncoder IKeyEncoding.GetKeyEncoder() => throw new NotSupportedException(); + + ICompositeKeyEncoder IKeyEncoding.GetKeyEncoder() => throw new NotSupportedException(); + + ICompositeKeyEncoder IKeyEncoding.GetKeyEncoder() => throw new NotSupportedException(); + + #endregion + + } + + public sealed class CodecCompositeKeyEncoder : CompositeKeyEncoder, IKeyEncoding + { + private readonly IOrderedTypeCodec m_codec1; + private readonly IOrderedTypeCodec m_codec2; + + public CodecCompositeKeyEncoder(IOrderedTypeCodec codec1, IOrderedTypeCodec codec2) + { + m_codec1 = codec1; + m_codec2 = codec2; + } + + public override void WriteKeyPartsTo(ref SliceWriter writer, int count, ref (T1, T2) items) + { + Contract.Requires(count > 0); + if (count >= 1) m_codec1.EncodeOrderedSelfTerm(ref writer, items.Item1); + if (count >= 2) m_codec2.EncodeOrderedSelfTerm(ref writer, items.Item2); + } + + public override void ReadKeyPartsFrom(ref SliceReader reader, int count, out (T1, T2) items) + { + Contract.Requires(count > 0); + + items.Item1 = count >= 1 ? m_codec1.DecodeOrderedSelfTerm(ref reader) : default; + items.Item2 = count >= 2 ? m_codec2.DecodeOrderedSelfTerm(ref reader) : default; + if (reader.HasMore) throw new InvalidOperationException($"Unexpected data at the end of composite key after {count} items"); + } + + public override IKeyEncoding Encoding => this; + + #region IKeyEncoding... + + IDynamicKeyEncoder IKeyEncoding.GetDynamicKeyEncoder() => throw new NotSupportedException(); + + IKeyEncoder IKeyEncoding.GetKeyEncoder() => throw new NotSupportedException(); + + ICompositeKeyEncoder IKeyEncoding.GetKeyEncoder() + { + if (typeof(T1B) != typeof(T1) && typeof(T2B) != typeof(T2)) throw new NotSupportedException(); + return (ICompositeKeyEncoder) (object) this; + } + + ICompositeKeyEncoder IKeyEncoding.GetKeyEncoder() => throw new NotSupportedException(); + + ICompositeKeyEncoder IKeyEncoding.GetKeyEncoder() => throw new NotSupportedException(); + + #endregion + + } + + public sealed class CodecCompositeKeyEncoder : CompositeKeyEncoder, IKeyEncoding + { + private readonly IOrderedTypeCodec m_codec1; + private readonly IOrderedTypeCodec m_codec2; + private readonly IOrderedTypeCodec m_codec3; + + public CodecCompositeKeyEncoder(IOrderedTypeCodec codec1, IOrderedTypeCodec codec2, IOrderedTypeCodec codec3) + { + m_codec1 = codec1; + m_codec2 = codec2; + m_codec3 = codec3; + } + + public override void WriteKeyPartsTo(ref SliceWriter writer, int count, ref (T1, T2, T3) items) + { + Contract.Requires(count > 0 && count <= 3); + if (count >= 1) m_codec1.EncodeOrderedSelfTerm(ref writer, items.Item1); + if (count >= 2) m_codec2.EncodeOrderedSelfTerm(ref writer, items.Item2); + if (count >= 3) m_codec3.EncodeOrderedSelfTerm(ref writer, items.Item3); + } + + public override void ReadKeyPartsFrom(ref SliceReader reader, int count, out (T1, T2, T3) items) + { + Contract.Requires(count > 0); + + items.Item1 = count >= 1 ? m_codec1.DecodeOrderedSelfTerm(ref reader) : default; + items.Item2 = count >= 2 ? m_codec2.DecodeOrderedSelfTerm(ref reader) : default; + items.Item3 = count >= 3 ? m_codec3.DecodeOrderedSelfTerm(ref reader) : default; + if (reader.HasMore) throw new InvalidOperationException($"Unexpected data at the end of composite key after {count} items"); + } + + public override IKeyEncoding Encoding => this; + + #region IKeyEncoding... + + IDynamicKeyEncoder IKeyEncoding.GetDynamicKeyEncoder() => throw new NotSupportedException(); + + IKeyEncoder IKeyEncoding.GetKeyEncoder() => throw new NotSupportedException(); + + ICompositeKeyEncoder IKeyEncoding.GetKeyEncoder() => throw new NotSupportedException(); + + ICompositeKeyEncoder IKeyEncoding.GetKeyEncoder() + { + if (typeof(T1B) != typeof(T1) && typeof(T2B) != typeof(T2) && typeof(T3B) != typeof(T3)) throw new NotSupportedException(); + return (ICompositeKeyEncoder) (object) this; + } + + ICompositeKeyEncoder IKeyEncoding.GetKeyEncoder() => throw new NotSupportedException(); + + #endregion + } + + public sealed class CodecCompositeKeyEncoder : CompositeKeyEncoder, IKeyEncoding + { + private readonly IOrderedTypeCodec m_codec1; + private readonly IOrderedTypeCodec m_codec2; + private readonly IOrderedTypeCodec m_codec3; + private readonly IOrderedTypeCodec m_codec4; + + public CodecCompositeKeyEncoder(IOrderedTypeCodec codec1, IOrderedTypeCodec codec2, IOrderedTypeCodec codec3, IOrderedTypeCodec codec4) + { + m_codec1 = codec1; + m_codec2 = codec2; + m_codec3 = codec3; + m_codec4 = codec4; + } + + public override void WriteKeyPartsTo(ref SliceWriter writer, int count, ref (T1, T2, T3, T4) items) + { + Contract.Requires(count > 0 && count <= 4); + if (count >= 1) m_codec1.EncodeOrderedSelfTerm(ref writer, items.Item1); + if (count >= 2) m_codec2.EncodeOrderedSelfTerm(ref writer, items.Item2); + if (count >= 3) m_codec3.EncodeOrderedSelfTerm(ref writer, items.Item3); + if (count >= 4) m_codec4.EncodeOrderedSelfTerm(ref writer, items.Item4); + } + + public override void ReadKeyPartsFrom(ref SliceReader reader, int count, out (T1, T2, T3, T4) items) + { + Contract.Requires(count > 0); + items.Item1 = count >= 1 ? m_codec1.DecodeOrderedSelfTerm(ref reader) : default; + items.Item2 = count >= 2 ? m_codec2.DecodeOrderedSelfTerm(ref reader) : default; + items.Item3 = count >= 3 ? m_codec3.DecodeOrderedSelfTerm(ref reader) : default; + items.Item4 = count >= 4 ? m_codec4.DecodeOrderedSelfTerm(ref reader) : default; + if (reader.HasMore) throw new InvalidOperationException($"Unexpected data at the end of composite key after {count} items"); + } + + public override IKeyEncoding Encoding => this; + + #region IKeyEncoding... + + IDynamicKeyEncoder IKeyEncoding.GetDynamicKeyEncoder() => throw new NotSupportedException(); + + IKeyEncoder IKeyEncoding.GetKeyEncoder() => throw new NotSupportedException(); + + ICompositeKeyEncoder IKeyEncoding.GetKeyEncoder() => throw new NotSupportedException(); + + ICompositeKeyEncoder IKeyEncoding.GetKeyEncoder() => throw new NotSupportedException(); + + ICompositeKeyEncoder IKeyEncoding.GetKeyEncoder() + { + if (typeof(T1B) != typeof(T1) && typeof(T2B) != typeof(T2) && typeof(T3B) != typeof(T3) && typeof(T4B) != typeof(T4)) throw new NotSupportedException(); + return (ICompositeKeyEncoder) (object) this; + } + + #endregion + } + + /// Create a simple encoder from a codec + [NotNull] + public static IKeyEncoder Bind([NotNull] IOrderedTypeCodec codec) + { + Contract.NotNull(codec, nameof(codec)); + + return new OrderedKeyEncoder(codec); + } + + /// Create a composite encoder from a pair of codecs + [NotNull] + public static ICompositeKeyEncoder Bind([NotNull] IOrderedTypeCodec codec1, [NotNull] IOrderedTypeCodec codec2) + { + Contract.NotNull(codec1, nameof(codec1)); + Contract.NotNull(codec2, nameof(codec2)); + + return new CodecCompositeKeyEncoder(codec1, codec2); + } + + /// Create a composite encoder from a triplet of codecs + [NotNull] + public static ICompositeKeyEncoder Bind([NotNull] IOrderedTypeCodec codec1, [NotNull] IOrderedTypeCodec codec2, [NotNull] IOrderedTypeCodec codec3) + { + Contract.NotNull(codec1, nameof(codec1)); + Contract.NotNull(codec2, nameof(codec2)); + Contract.NotNull(codec3, nameof(codec3)); + + return new CodecCompositeKeyEncoder(codec1, codec2, codec3); + } + + public static void Partial(ref SliceWriter writer, IOrderedTypeCodec codec1, T1 value1) + { + Contract.Assert(codec1 != null); + codec1.EncodeOrderedSelfTerm(ref writer, value1); + } + + public static void Encode(ref SliceWriter writer, [NotNull] IOrderedTypeCodec codec1, T1 value1, [NotNull] IOrderedTypeCodec codec2, T2 value2) + { + Contract.Assert(codec1 != null && codec2 != null); + codec1.EncodeOrderedSelfTerm(ref writer, value1); + codec2.EncodeOrderedSelfTerm(ref writer, value2); + } + + public static void Encode(ref SliceWriter writer, [NotNull] IOrderedTypeCodec codec1, T1 value1, [NotNull] IOrderedTypeCodec codec2, T2 value2, [NotNull] IOrderedTypeCodec codec3, T3 value3) + { + Contract.Assert(codec1 != null && codec2 != null && codec3 != null); + codec1.EncodeOrderedSelfTerm(ref writer, value1); + codec2.EncodeOrderedSelfTerm(ref writer, value2); + codec3.EncodeOrderedSelfTerm(ref writer, value3); + } + + } + + } + +} diff --git a/FoundationDB.Client/Shared/TypeSystem/Encoders/KeyValueEncoders.Tuples.cs b/FoundationDB.Client/Shared/TypeSystem/Encoders/KeyValueEncoders.Tuples.cs new file mode 100644 index 000000000..acb13ea7b --- /dev/null +++ b/FoundationDB.Client/Shared/TypeSystem/Encoders/KeyValueEncoders.Tuples.cs @@ -0,0 +1,86 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Serialization.Encoders +{ + using System; + using Doxense.Collections.Tuples.Encoding; + using JetBrains.Annotations; + + /// Helper class for all key/value encoders + public static partial class KeyValueEncoders + { + + /// Encoders that use the Tuple Encoding, suitable for keys + [PublicAPI] + public static class Tuples + { + + #region Keys + + [NotNull] + public static IKeyEncoder Key() + { + return TupleEncoder.Encoder.Default; + } + + [NotNull] + public static ICompositeKeyEncoder CompositeKey() + { + return TupleEncoder.CompositeEncoder.Default; + } + + [NotNull] + public static ICompositeKeyEncoder CompositeKey() + { + return TupleEncoder.CompositeEncoder.Default; + } + + [NotNull] + public static ICompositeKeyEncoder CompositeKey() + { + return TupleEncoder.CompositeEncoder.Default; + } + + #endregion + + #region Values... + + [NotNull] + public static IValueEncoder Value() + { + return TupleEncoder.Encoder.Default; + } + + #endregion + + } + + } + +} diff --git a/FoundationDB.Client/Linq/IFdbAsyncEnumerable.cs b/FoundationDB.Client/Shared/TypeSystem/Encoders/KeyValueEncoders.Unordered.cs similarity index 62% rename from FoundationDB.Client/Linq/IFdbAsyncEnumerable.cs rename to FoundationDB.Client/Shared/TypeSystem/Encoders/KeyValueEncoders.Unordered.cs index e426f8a8d..2cea05e1e 100644 --- a/FoundationDB.Client/Linq/IFdbAsyncEnumerable.cs +++ b/FoundationDB.Client/Shared/TypeSystem/Encoders/KeyValueEncoders.Unordered.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,23 +26,38 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Serialization.Encoders { - using FoundationDB.Async; - using JetBrains.Annotations; using System; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; - public interface IFdbAsyncEnumerable : IAsyncEnumerable + /// Helper class for all key/value encoders + public static partial class KeyValueEncoders { - /// - /// Gets an asynchronous enumerator over the sequence. - /// - /// Defines how the enumerator will be used by the caller. The source provider can use the mode to optimize how the results are produced. - /// Enumerator for asynchronous enumeration over the sequence. - [NotNull] - IFdbAsyncEnumerator GetEnumerator(FdbAsyncMode mode = FdbAsyncMode.Default); + /// Encoders that produce compact but unordered slices, suitable for keys that don't benefit from having lexicographical ordering + [PublicAPI] + public static class Unordered + { + + /// Create a simple encoder from a codec + [NotNull] + public static IKeyEncoder Bind([NotNull] IUnorderedTypeCodec codec) + { + Contract.NotNull(codec, nameof(codec)); + + // ReSharper disable once SuspiciousTypeConversion.Global + if (codec is IKeyEncoder encoder) return encoder; + + return new Singleton( + (value) => codec.EncodeUnordered(value), + (encoded) => codec.DecodeUnordered(encoded) + ); + } + + } } -} \ No newline at end of file +} diff --git a/FoundationDB.Client/Shared/TypeSystem/Encoders/KeyValueEncoders.Values.cs b/FoundationDB.Client/Shared/TypeSystem/Encoders/KeyValueEncoders.Values.cs new file mode 100644 index 000000000..2c3ab0e05 --- /dev/null +++ b/FoundationDB.Client/Shared/TypeSystem/Encoders/KeyValueEncoders.Values.cs @@ -0,0 +1,133 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Serialization.Encoders +{ + using System; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; + + /// Helper class for all key/value encoders + public static partial class KeyValueEncoders + { + + /// Encoders that produce compact but unordered slices, suitable for values + [PublicAPI] + public static class Values + { + private static readonly GenericEncoder s_default = new GenericEncoder(); + + [NotNull] + public static IValueEncoder BinaryEncoder => s_default; + + [NotNull] + public static IValueEncoder StringEncoder => s_default; + + [NotNull] + public static IValueEncoder Int32Encoder => s_default; + + [NotNull] + public static IValueEncoder Int64Encoder => s_default; + + [NotNull] + public static IValueEncoder GuidEncoder => s_default; + + /// Create a simple encoder from a codec + [NotNull] + public static IValueEncoder Bind([NotNull] IUnorderedTypeCodec codec) + { + Contract.NotNull(codec, nameof(codec)); + + if (codec is IValueEncoder encoder) return encoder; + + return new Singleton( + (value) => codec.EncodeUnordered(value), + (encoded) => codec.DecodeUnordered(encoded) + ); + } + + internal sealed class GenericEncoder : IValueEncoder, IValueEncoder, IValueEncoder, IValueEncoder, IValueEncoder + { + + public Slice EncodeValue(Slice value) + { + return value; + } + + Slice IValueEncoder.DecodeValue(Slice encoded) + { + return encoded; + } + + public Slice EncodeValue(string value) + { + return Slice.FromString(value); + } + + string IValueEncoder.DecodeValue(Slice encoded) + { + return encoded.ToUnicode(); + } + + public Slice EncodeValue(int value) + { + return Slice.FromInt32(value); + } + + int IValueEncoder.DecodeValue(Slice encoded) + { + return encoded.ToInt32(); + } + + public Slice EncodeValue(long value) + { + return Slice.FromInt64(value); + } + + long IValueEncoder.DecodeValue(Slice encoded) + { + return encoded.ToInt64(); + } + + public Slice EncodeValue(Guid value) + { + return Slice.FromGuid(value); + } + + Guid IValueEncoder.DecodeValue(Slice encoded) + { + return encoded.ToGuid(); + } + + } + + } + + } + +} diff --git a/FoundationDB.Client/Shared/TypeSystem/Encoders/KeyValueEncoders.cs b/FoundationDB.Client/Shared/TypeSystem/Encoders/KeyValueEncoders.cs new file mode 100644 index 000000000..a3b8c84b6 --- /dev/null +++ b/FoundationDB.Client/Shared/TypeSystem/Encoders/KeyValueEncoders.cs @@ -0,0 +1,169 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Serialization.Encoders +{ + using System; + using Doxense.Diagnostics.Contracts; + using Doxense.Memory; + using JetBrains.Annotations; + + /// Helper class for all key/value encoders + [PublicAPI] + public static partial class KeyValueEncoders + { + /// Identity function for binary slices + public static readonly IdentityEncoder Binary = new IdentityEncoder(); + + #region Nested Classes + + /// Identity encoder + public sealed class IdentityEncoder : IKeyEncoder, IValueEncoder, IKeyEncoding + { + + internal IdentityEncoder() { } + + #region IKeyEncoder... + + public IKeyEncoding Encoding => this; + + public void WriteKeyTo(ref SliceWriter writer, Slice key) + { + writer.WriteBytes(key); + } + + public void ReadKeyFrom(ref SliceReader reader, out Slice value) + { + value = reader.ReadToEnd(); + } + + public Slice EncodeValue(Slice value) + { + return value; + } + + public Slice DecodeValue(Slice encoded) + { + return encoded; + } + + #endregion + + IKeyEncoder IKeyEncoding.GetKeyEncoder() + { + if (typeof(T1) != typeof(Slice)) throw new NotSupportedException(); + return (IKeyEncoder) (object) this; + } + + IDynamicKeyEncoder IKeyEncoding.GetDynamicKeyEncoder() => throw new NotSupportedException(); + + ICompositeKeyEncoder IKeyEncoding.GetKeyEncoder() => throw new NotSupportedException(); + + ICompositeKeyEncoder IKeyEncoding.GetKeyEncoder() => throw new NotSupportedException(); + + ICompositeKeyEncoder IKeyEncoding.GetKeyEncoder() => throw new NotSupportedException(); + + } + + /// Wrapper for encoding and decoding a singleton with lambda functions + internal sealed class Singleton : IKeyEncoder, IValueEncoder, IKeyEncoding + { + private readonly Func m_encoder; + private readonly Func m_decoder; + + public Singleton(Func encoder, Func decoder) + { + Contract.Requires(encoder != null && decoder != null); + + m_encoder = encoder; + m_decoder = decoder; + } + + public Type[] GetTypes() + { + return new[] { typeof(T) }; + } + + public void WriteKeyTo(ref SliceWriter writer, T value) + { + writer.WriteBytes(m_encoder(value)); + } + + public void ReadKeyFrom(ref SliceReader reader, out T value) + { + value = m_decoder(reader.ReadToEnd()); + } + + public Slice EncodeValue(T value) + { + return m_encoder(value); + } + + public T DecodeValue(Slice encoded) + { + return m_decoder(encoded); + } + + public IKeyEncoding Encoding => this; + + IKeyEncoder IKeyEncoding.GetKeyEncoder() + { + if (typeof(T1) != typeof(T)) throw new NotSupportedException(); + return (IKeyEncoder) (object) this; + } + + IDynamicKeyEncoder IKeyEncoding.GetDynamicKeyEncoder() => throw new NotSupportedException(); + + ICompositeKeyEncoder IKeyEncoding.GetKeyEncoder() => throw new NotSupportedException(); + + ICompositeKeyEncoder IKeyEncoding.GetKeyEncoder() => throw new NotSupportedException(); + + ICompositeKeyEncoder IKeyEncoding.GetKeyEncoder() => throw new NotSupportedException(); + } + + #endregion + + #region Keys... + + /// Binds a pair of lambda functions to a key encoder + /// Type of the key to encode + /// Lambda function called to encode a key into a binary slice + /// Lambda function called to decode a binary slice into a key + /// Key encoder usable by any Layer that works on keys of type + [NotNull] + public static IKeyEncoder Bind([NotNull] Func encoder, [NotNull] Func decoder) + { + Contract.NotNull(encoder, nameof(encoder)); + Contract.NotNull(decoder, nameof(decoder)); + return new Singleton(encoder, decoder); + } + + #endregion + + } +} diff --git a/FoundationDB.Client/Shared/TypeSystem/Encoders/ValueEncoderExtensions.cs b/FoundationDB.Client/Shared/TypeSystem/Encoders/ValueEncoderExtensions.cs new file mode 100644 index 000000000..e64f59a77 --- /dev/null +++ b/FoundationDB.Client/Shared/TypeSystem/Encoders/ValueEncoderExtensions.cs @@ -0,0 +1,133 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +using System; + +namespace Doxense.Serialization.Encoders +{ + using System; + using System.Collections.Generic; + using System.Linq; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; + + public static class ValueEncoderExtensions + { + + /// Convert an array of s into an array of slices, using a serializer (or the default serializer if none is provided) + [NotNull] + public static Slice[] EncodeValues([NotNull] this IValueEncoder encoder, [NotNull] params T[] values) + { + Contract.NotNull(encoder, nameof(encoder)); + Contract.NotNull(values, nameof(values)); + + var slices = new Slice[values.Length]; + for (int i = 0; i < values.Length; i++) + { + slices[i] = encoder.EncodeValue(values[i]); + } + + return slices; + } + + /// Transform a sequence of s into a sequence of slices, using a serializer (or the default serializer if none is provided) + [NotNull] + public static IEnumerable EncodeValues([NotNull] this IValueEncoder encoder, [NotNull] IEnumerable values) + { + Contract.NotNull(encoder, nameof(encoder)); + Contract.NotNull(values, nameof(values)); + + // note: T=>Slice usually is used for writing batches as fast as possible, which means that keys will be consumed immediately and don't need to be streamed + + var array = values as T[]; + if (array != null) + { // optimized path for arrays + return EncodeValues(encoder, array); + } + + var coll = values as ICollection; + if (coll != null) + { // optimized path when we know the count + var slices = new List(coll.Count); + foreach (var value in coll) + { + slices.Add(encoder.EncodeValue(value)); + } + return slices; + } + + return values.Select(value => encoder.EncodeValue(value)); + } + + /// Convert an array of slices back into an array of s, using a serializer (or the default serializer if none is provided) + [NotNull] + public static T[] DecodeValues([NotNull] this IValueEncoder encoder, [NotNull] params Slice[] slices) + { + Contract.NotNull(encoder, nameof(encoder)); + Contract.NotNull(slices, nameof(slices)); + + var values = new T[slices.Length]; + for (int i = 0; i < slices.Length; i++) + { + values[i] = encoder.DecodeValue(slices[i]); + } + + return values; + } + + /// Convert the values of an array of key value pairs of slices back into an array of s, using a serializer (or the default serializer if none is provided) + [NotNull] + public static T[] DecodeValues([NotNull] this IValueEncoder encoder, [NotNull] KeyValuePair[] items) + { + Contract.NotNull(encoder, nameof(encoder)); + Contract.NotNull(items, nameof(items)); + + var values = new T[items.Length]; + for (int i = 0; i < items.Length; i++) + { + values[i] = encoder.DecodeValue(items[i].Value); + } + + return values; + } + + /// Transform a sequence of slices back into a sequence of s, using a serializer (or the default serializer if none is provided) + [NotNull] + public static IEnumerable DecodeValues([NotNull] this IValueEncoder encoder, [NotNull] IEnumerable slices) + { + Contract.NotNull(encoder, nameof(encoder)); + Contract.NotNull(slices, nameof(slices)); + + // Slice=>T may be filtered in LINQ queries, so we should probably stream the values (so no optimization needed) + + return slices.Select(slice => encoder.DecodeValue(slice)); + } + + } + +} diff --git a/FoundationDB.Client/TypeSystem/IFdbKeyEncoding.cs b/FoundationDB.Client/Shared/TypeSystem/IKeyEncoding.cs similarity index 80% rename from FoundationDB.Client/TypeSystem/IFdbKeyEncoding.cs rename to FoundationDB.Client/Shared/TypeSystem/IKeyEncoding.cs index 28ac7c288..0d7e4a3b6 100644 --- a/FoundationDB.Client/TypeSystem/IFdbKeyEncoding.cs +++ b/FoundationDB.Client/Shared/TypeSystem/IKeyEncoding.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,27 +26,31 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -using System; -using JetBrains.Annotations; - -namespace FoundationDB.Client //REVIEW: what namespace? +namespace Doxense.Serialization.Encoders { - /// Type system that handles values of arbitrary sizes and types - public interface IFdbKeyEncoding + using System; + using JetBrains.Annotations; + + /// Type system that handles encoding and decoding of differnt types of keys + /// + /// An implementation of this interface knows to create different types of Key Encoders that will all use the same "binary format" to encode and decode keys of various shapes. + /// A good analogy for values would be a 'JSON' encoding, or 'XML' encoding. + /// + public interface IKeyEncoding //REVIEW: rename to "IKeyEncodingScheme"? "IKeyTypeSystem"? { /// Returns an encoder which can process keys of any size and types /// Encoder that encodes dynamic keys /// If this encoding does not support dynamic keys [NotNull] - IDynamicKeyEncoder GetDynamicEncoder(); + IDynamicKeyEncoder GetDynamicKeyEncoder(); /// Returns an encoder which can process keys composed of a single element of a fixed type /// Type of the element to encode /// Key encoder /// If this encoding does not support static keys [NotNull] - IKeyEncoder GetEncoder(); + IKeyEncoder GetKeyEncoder(); /// Returns an encoder which can process keys composed of a two elements of fixed types /// Type of the first element to encode @@ -54,7 +58,7 @@ public interface IFdbKeyEncoding /// Composite key encoder /// If this encoding does not support static keys of size 2 [NotNull] - ICompositeKeyEncoder GetEncoder(); + ICompositeKeyEncoder GetKeyEncoder(); /// Returns an encoder which can process keys composed of a three elements of fixed types /// Type of the first element to encode @@ -63,7 +67,7 @@ public interface IFdbKeyEncoding /// Composite key encoder /// If this encoding does not support static keys of size 3 [NotNull] - ICompositeKeyEncoder GetEncoder(); + ICompositeKeyEncoder GetKeyEncoder(); /// Returns an encoder which can process keys composed of a four elements of fixed types /// Type of the first element to encode @@ -73,8 +77,7 @@ public interface IFdbKeyEncoding /// Composite key encoder /// If this encoding does not support static keys of size 4 [NotNull] - ICompositeKeyEncoder GetEncoder(); + ICompositeKeyEncoder GetKeyEncoder(); } - -} \ No newline at end of file +} diff --git a/FoundationDB.Client/TypeSystem/IOrderedTypeCodec.cs b/FoundationDB.Client/Shared/TypeSystem/IOrderedTypeCodec.cs similarity index 94% rename from FoundationDB.Client/TypeSystem/IOrderedTypeCodec.cs rename to FoundationDB.Client/Shared/TypeSystem/IOrderedTypeCodec.cs index c2b1edd44..c792e6b75 100644 --- a/FoundationDB.Client/TypeSystem/IOrderedTypeCodec.cs +++ b/FoundationDB.Client/Shared/TypeSystem/IOrderedTypeCodec.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,16 +26,20 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client + +namespace Doxense.Serialization.Encoders { using System; + using Doxense.Memory; public interface IOrderedTypeCodec { void EncodeOrderedSelfTerm(ref SliceWriter output, T value); + T DecodeOrderedSelfTerm(ref SliceReader input); Slice EncodeOrdered(T value); + T DecodeOrdered(Slice input); } diff --git a/FoundationDB.Client/Linq/IFdbAsyncEnumerator.cs b/FoundationDB.Client/Shared/TypeSystem/ITypeSystem.cs similarity index 87% rename from FoundationDB.Client/Linq/IFdbAsyncEnumerator.cs rename to FoundationDB.Client/Shared/TypeSystem/ITypeSystem.cs index 166b83334..ccb10d5cf 100644 --- a/FoundationDB.Client/Linq/IFdbAsyncEnumerator.cs +++ b/FoundationDB.Client/Shared/TypeSystem/ITypeSystem.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,13 +26,14 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq +namespace Doxense.Serialization.Encoders { - using FoundationDB.Async; using System; - public interface IFdbAsyncEnumerator : IAsyncEnumerator + using JetBrains.Annotations; + + public interface ITypeSystem : IKeyEncoding, IValueEncoding { - //TODO: add method specific to FDB here + [NotNull] + string Name { get; } } - } diff --git a/FoundationDB.Client/TypeSystem/IUnorderedTypeCodec.cs b/FoundationDB.Client/Shared/TypeSystem/IUnorderedTypeCodec.cs similarity index 94% rename from FoundationDB.Client/TypeSystem/IUnorderedTypeCodec.cs rename to FoundationDB.Client/Shared/TypeSystem/IUnorderedTypeCodec.cs index a51a126ac..d6c15f524 100644 --- a/FoundationDB.Client/TypeSystem/IUnorderedTypeCodec.cs +++ b/FoundationDB.Client/Shared/TypeSystem/IUnorderedTypeCodec.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,16 +26,19 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client +namespace Doxense.Serialization.Encoders { using System; + using Doxense.Memory; public interface IUnorderedTypeCodec { void EncodeUnorderedSelfTerm(ref SliceWriter output, T value); + T DecodeUnorderedSelfTerm(ref SliceReader input); Slice EncodeUnordered(T value); + T DecodeUnordered(Slice input); } diff --git a/FoundationDB.Client/TypeSystem/IKeyValueEncoder.cs b/FoundationDB.Client/Shared/TypeSystem/IValueEncoding.cs similarity index 78% rename from FoundationDB.Client/TypeSystem/IKeyValueEncoder.cs rename to FoundationDB.Client/Shared/TypeSystem/IValueEncoding.cs index b8d561bfd..069517dfc 100644 --- a/FoundationDB.Client/TypeSystem/IKeyValueEncoder.cs +++ b/FoundationDB.Client/Shared/TypeSystem/IValueEncoding.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,16 +26,19 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client +namespace Doxense.Serialization.Encoders { using System; + using JetBrains.Annotations; - public interface IKeyEncoder + public interface IValueEncoding { - /// Encode a single value - Slice EncodeKey(T1 value); + /// Returns an encoder which can process values of a fixed type + /// Type of the element to encode + /// Value encoder + [NotNull] + IValueEncoder GetValueEncoder(); - /// Decode a single value - T1 DecodeKey(Slice encoded); + //TODO: DynamicValueValue! } } diff --git a/FoundationDB.Client/TypeSystem/FdbTypeCodec`1.cs b/FoundationDB.Client/Shared/TypeSystem/TypeCodec`1.cs similarity index 89% rename from FoundationDB.Client/TypeSystem/FdbTypeCodec`1.cs rename to FoundationDB.Client/Shared/TypeSystem/TypeCodec`1.cs index d323b24d1..76527f00f 100644 --- a/FoundationDB.Client/TypeSystem/FdbTypeCodec`1.cs +++ b/FoundationDB.Client/Shared/TypeSystem/TypeCodec`1.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,14 +26,12 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -using System.ComponentModel; -using System.Runtime.CompilerServices; - -namespace FoundationDB.Client +namespace Doxense.Serialization.Encoders { using System; + using Doxense.Memory; - public abstract class FdbTypeCodec : IOrderedTypeCodec, IUnorderedTypeCodec + public abstract class TypeCodec : IOrderedTypeCodec, IUnorderedTypeCodec { public abstract void EncodeOrderedSelfTerm(ref SliceWriter output, T value); @@ -42,7 +40,7 @@ public abstract class FdbTypeCodec : IOrderedTypeCodec, IUnorderedTypeCode public virtual Slice EncodeOrdered(T value) { - var writer = SliceWriter.Empty; + var writer = default(SliceWriter); EncodeOrderedSelfTerm(ref writer, value); return writer.ToSlice(); } @@ -65,7 +63,7 @@ public virtual T DecodeUnorderedSelfTerm(ref SliceReader input) public virtual Slice EncodeUnordered(T value) { - var writer = SliceWriter.Empty; + var writer = default(SliceWriter); EncodeUnorderedSelfTerm(ref writer, value); return writer.ToSlice(); } diff --git a/FoundationDB.Client/Utils/Uuid128.cs b/FoundationDB.Client/Shared/Uuid128.cs similarity index 50% rename from FoundationDB.Client/Utils/Uuid128.cs rename to FoundationDB.Client/Shared/Uuid128.cs index 8a7249fd1..e71429f27 100644 --- a/FoundationDB.Client/Utils/Uuid128.cs +++ b/FoundationDB.Client/Shared/Uuid128.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,17 +26,23 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client +namespace System { - using JetBrains.Annotations; using System; + using System.Collections.Generic; using System.ComponentModel; + using System.Diagnostics; + using System.Runtime.CompilerServices; using System.Runtime.InteropServices; + using Doxense.Diagnostics.Contracts; + using Doxense.Memory; + using JetBrains.Annotations; - /// RFC 4122 compliant 128-bit UUID - /// You should use this type if you are primarily exchanged UUIDs with non-.NET platforms, that use the RFC 4122 byte ordering (big endian). The type System.Guid uses the Microsoft encoding (little endian) and is not compatible. + /// Represents an RFC 4122 compliant 128-bit UUID + /// You should use this type if you are primarily exchanging UUIDs with non-.NET platforms, that use the RFC 4122 byte ordering (big endian). The type System.Guid uses the Microsoft encoding (little endian) and is not compatible. + [DebuggerDisplay("[{ToString(),nq}]")] [ImmutableObject(true), StructLayout(LayoutKind.Explicit), Serializable] - public struct Uuid128 : IFormattable, IComparable, IEquatable, IComparable, IEquatable + public readonly struct Uuid128 : IFormattable, IComparable, IEquatable, IComparable, IEquatable { // This is just a wrapper struct on System.Guid that makes sure that ToByteArray() and Parse(byte[]) and new(byte[]) will parse according to RFC 4122 (http://www.ietf.org/rfc/rfc4122.txt) // For performance reasons, we will store the UUID as a System.GUID (Microsoft in-memory format), and swap the bytes when needed. @@ -48,7 +54,7 @@ public struct Uuid128 : IFormattable, IComparable, IEquatable, ICompara // Significant Byte first (known as network byte order). Note that the // field names, particularly for multiplexed fields, follow historical // practice. - + // 0 1 2 3 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ @@ -93,6 +99,7 @@ public struct Uuid128 : IFormattable, IComparable, IEquatable, ICompara #region Constructors... + [MethodImpl(MethodImplOptions.AggressiveInlining)] public Uuid128(Guid guid) : this() { @@ -103,33 +110,56 @@ public Uuid128(string value) : this(new Guid(value)) { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public Uuid128(Slice slice) : this() { m_packed = Convert(slice); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public Uuid128(byte[] bytes) - : this(Slice.Create(bytes)) - { } + : this() + { + m_packed = Convert(bytes.AsSlice()); + } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public Uuid128(int a, short b, short c, byte[] d) : this(new Guid(a, b, c, d)) { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public Uuid128(int a, short b, short c, byte d, byte e, byte f, byte g, byte h, byte i, byte j, byte k) : this(new Guid(a, b, c, d, e, f, g, h, i, j, k)) { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public Uuid128(uint a, ushort b, ushort c, byte d, byte e, byte f, byte g, byte h, byte i, byte j, byte k) : this(new Guid(a, b, c, d, e, f, g, h, i, j, k)) { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public Uuid128(Uuid64 a, Uuid64 b) + : this() + { + m_packed = Convert(a, b); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public Uuid128(Uuid64 a, uint b, uint c) + : this() + { + m_packed = Convert(a, b, c); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static explicit operator Guid(Uuid128 uuid) { return uuid.m_packed; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static explicit operator Uuid128(Guid guid) { return new Uuid128(guid); @@ -137,28 +167,67 @@ public static explicit operator Uuid128(Guid guid) public static readonly Uuid128 Empty = default(Uuid128); + /// Size is 16 bytes + public const int SizeOf = 16; + public static Uuid128 NewUuid() { return new Uuid128(Guid.NewGuid()); } - internal static Guid Convert(Slice input) + public static Guid Convert(Slice input) { - if (input.Count <= 0) return default(Guid); + input.EnsureSliceIsValid(); + if (input.Count == 0) return default(Guid); + if (input.Count != 16) throw new ArgumentException("Slice for UUID must be exactly 16 bytes long"); - if (input.Array == null) throw new ArgumentNullException("input"); - if (input.Count == 16) + unsafe { - unsafe + fixed (byte* buf = &input.DangerousGetPinnableReference()) { - fixed (byte* buf = input.Array) - { - return Read(buf + input.Offset); - } + return ReadUnsafe(buf); } } + } + + public static unsafe Guid Convert(byte* buffer, int count) + { + if (count == 0) return default(Guid); + if (count != 16) throw new ArgumentException("Slice for UUID must be exactly 16 bytes long"); - throw new ArgumentException("Slice for UUID must be exactly 16 bytes long"); + return ReadUnsafe(buffer); + } + + public static Guid Convert(Uuid64 a, Uuid64 b) + { + unsafe + { + byte* buf = stackalloc byte[16]; + a.WriteToUnsafe(buf); + b.WriteToUnsafe(buf + 8); + return ReadUnsafe(buf); + } + } + + public static Guid Convert(Uuid64 a, uint b, uint c) + { + unsafe + { + byte* buf = stackalloc byte[16]; + a.WriteToUnsafe(buf); + + buf[8] = (byte) b; + buf[9] = (byte)(b >> 8); + buf[10] = (byte)(b >> 16); + buf[11] = (byte)(b >> 24); + + buf[12] = (byte) c; + buf[13] = (byte)(c >> 8); + buf[14] = (byte)(c >> 16); + buf[15] = (byte)(c >> 24); + + return ReadUnsafe(buf); + } } public static Uuid128 Parse([NotNull] string input) @@ -173,8 +242,7 @@ public static Uuid128 ParseExact([NotNull] string input, string format) public static bool TryParse(string input, out Uuid128 result) { - Guid guid; - if (!Guid.TryParse(input, out guid)) + if (!Guid.TryParse(input, out Guid guid)) { result = default(Uuid128); return false; @@ -185,8 +253,7 @@ public static bool TryParse(string input, out Uuid128 result) public static bool TryParseExact(string input, string format, out Uuid128 result) { - Guid guid; - if (!Guid.TryParseExact(input, format, out guid)) + if (!Guid.TryParseExact(input, format, out Guid guid)) { result = default(Uuid128); return false; @@ -199,6 +266,7 @@ public static bool TryParseExact(string input, string format, out Uuid128 result public long Timestamp { + [Pure] get { long ts = m_timeLow; @@ -210,6 +278,7 @@ public long Timestamp public int Version { + [Pure] get { return m_timeHiAndVersion >> 12; @@ -218,6 +287,7 @@ public int Version public int ClockSequence { + [Pure] get { int clk = m_clkSeqLow; @@ -228,6 +298,7 @@ public int ClockSequence public long Node { + [Pure] get { long node; @@ -236,15 +307,17 @@ public long Node node |= ((long)m_node2) << 24; node |= ((long)m_node3) << 16; node |= ((long)m_node4) << 8; - node |= (long)m_node5; + node |= m_node5; return node; } } - #region Conversion... + #region Unsafe I/O... - internal unsafe static Guid Read(byte* src) + [Pure] + public static unsafe Guid ReadUnsafe([NotNull] byte* src) { + Contract.Requires(src != null); Guid tmp; if (BitConverter.IsLittleEndian) @@ -275,11 +348,30 @@ internal unsafe static Guid Read(byte* src) return tmp; } - internal unsafe static void Write(Guid value, byte* ptr) + public static Guid ReadUnsafe([NotNull] byte[] buffer, int offset) + { + Contract.Requires(buffer != null && offset >= 0 && offset + 15 < buffer.Length); + unsafe + { + fixed (byte* ptr = &buffer[offset]) + { + return ReadUnsafe(ptr); + } + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static unsafe void WriteUnsafe(Guid value, [NotNull] byte* ptr) { + WriteUnsafe(&value, ptr); + } + + internal static unsafe void WriteUnsafe([NotNull] Guid* value, [NotNull] byte* ptr) + { + Contract.Requires(value != null && ptr != null); if (BitConverter.IsLittleEndian) { - byte* src = (byte*)&value; + byte* src = (byte*) value; // Data1: 32 bits, must swap ptr[0] = src[3]; @@ -297,19 +389,91 @@ internal unsafe static void Write(Guid value, byte* ptr) } else { - long* src = (long*)&value; + long* src = (long*) value; *(long*)(ptr) = src[0]; *(long*)(ptr + 8) = src[1]; } } - internal unsafe void WriteTo(byte* ptr) + public static void WriteUnsafe(Guid value, [NotNull] byte[] buffer, int offset) { - Write(m_packed, ptr); + Contract.Requires(buffer != null && offset >= 0 && offset + 15 < buffer.Length); + unsafe + { + fixed (byte* ptr = &buffer[offset]) + { + WriteUnsafe(value, ptr); + } + } } - [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public unsafe void WriteToUnsafe([NotNull] byte* ptr) + { + WriteUnsafe(m_packed, ptr); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void WriteToUnsafe([NotNull] byte[] buffer, int offset) + { + WriteUnsafe(m_packed, buffer, offset); + } + + #endregion + + #region Decomposition... + + /// Split this 128-bit UUID into two 64-bit UUIDs + /// Receives the first 8 bytes (in network order) of this UUID + /// Receives the last 8 bytes (in network order) of this UUID + public void Split(out Uuid64 high, out Uuid64 low) + { + unsafe + { + byte* buffer = stackalloc byte[16]; + WriteUnsafe(m_packed, buffer); + high = new Uuid64(Uuid64.ReadUnsafe(buffer)); + low = new Uuid64(Uuid64.ReadUnsafe(buffer + 8)); + } + } + + /// Split this 128-bit UUID into two 64-bit numbers + /// Receives the first 8 bytes (in network order) of this UUID + /// Receives the last 8 bytes (in network order) of this UUID + public void Split(out ulong high, out ulong low) + { + unsafe + { + byte* buffer = stackalloc byte[16]; + WriteUnsafe(m_packed, buffer); + high = Uuid64.ReadUnsafe(buffer); + low = Uuid64.ReadUnsafe(buffer + 8); + } + } + + /// Split this 128-bit UUID into two 64-bit numbers + /// Receives the first 8 bytes (in network order) of this UUID + /// Receives the middle 4 bytes (in network order) of this UUID + /// Receives the last 4 bytes (in network order) of this UUID + public void Split(out ulong high, out uint mid, out uint low) + { + unsafe + { + byte* buffer = stackalloc byte[16]; + WriteUnsafe(m_packed, buffer); + high = Uuid64.ReadUnsafe(buffer); + var id = Uuid64.ReadUnsafe(buffer + 8); + mid = (uint) (id >> 32); + low = (uint) id; + } + } + + #endregion + + #region Conversion... + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] public Guid ToGuid() { return m_packed; @@ -324,14 +488,15 @@ public byte[] ToByteArray() unsafe { fixed (byte* ptr = res) + fixed (Uuid128* self = &this) { - Write(m_packed, ptr); + WriteUnsafe((Guid*) self, ptr); } } return res; } - [Pure] + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] public Slice ToSlice() { //TODO: optimize this ? @@ -353,6 +518,57 @@ public string ToString(string format, IFormatProvider provider) return m_packed.ToString(format, provider); } + /// Increment the value of this UUID + /// Positive value + /// Incremented UUID + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Uuid128 Increment([Positive] int value) + { + Contract.Requires(value >= 0); + return Increment(checked((ulong)value)); + } + + /// Increment the value of this UUID + /// Positive value + /// Incremented UUID + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Uuid128 Increment([Positive] long value) + { + Contract.Requires(value >= 0); + return Increment(checked((ulong)value)); + } + + /// Increment the value of this UUID + /// Value to add to this UUID + /// Incremented UUID + [Pure] + public Uuid128 Increment(ulong value) + { + unsafe + { + fixed (Uuid128* self = &this) + { + // serialize GUID into High Endian format + byte* buf = stackalloc byte[16]; + WriteUnsafe((Guid*)self, buf); + + // Add the low 64 bits (in HE) + ulong lo = UnsafeHelpers.LoadUInt64BE(buf + 8); + ulong sum = lo + value; + if (sum < value) + { // overflow occured, we must carry to the high 64 bits (in HE) + ulong hi = UnsafeHelpers.LoadUInt64BE(buf); + UnsafeHelpers.StoreUInt64BE(buf, unchecked(hi + 1)); + } + UnsafeHelpers.StoreUInt64BE(buf + 8, sum); + // deserialize back to GUID + return new Uuid128(ReadUnsafe(buf)); + } + } + } + + //TODO: Decrement + #endregion #region Equality / Comparison ... @@ -360,46 +576,55 @@ public string ToString(string format, IFormatProvider provider) public override bool Equals(object obj) { if (obj == null) return false; - if (obj is Uuid128) return m_packed == ((Uuid128)obj); - if (obj is Guid) return m_packed == ((Guid)obj); + if (obj is Uuid128 u128) return m_packed == u128.m_packed; + if (obj is Guid g) return m_packed == g; + //TODO: Slice? string? return false; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool Equals(Uuid128 other) { return m_packed == other.m_packed; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool Equals(Guid other) { return m_packed == other; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool operator ==(Uuid128 a, Uuid128 b) { return a.m_packed == b.m_packed; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool operator !=(Uuid128 a, Uuid128 b) { return a.m_packed != b.m_packed; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool operator ==(Uuid128 a, Guid b) { return a.m_packed == b; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool operator !=(Uuid128 a, Guid b) { return a.m_packed != b; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool operator ==(Guid a, Uuid128 b) { return a == b.m_packed; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool operator !=(Guid a, Uuid128 b) { return a != b.m_packed; @@ -410,6 +635,7 @@ public override int GetHashCode() return m_packed.GetHashCode(); } + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] public int CompareTo(Uuid128 other) { return m_packed.CompareTo(other.m_packed); @@ -417,16 +643,42 @@ public int CompareTo(Uuid128 other) public int CompareTo(object obj) { - if (obj == null) return 1; - - if (obj is Uuid128) - return m_packed.CompareTo(((Uuid128)obj).m_packed); - else - return m_packed.CompareTo(obj); + switch (obj) + { + case null: return 1; + case Uuid128 u128: return m_packed.CompareTo(u128.m_packed); + case Guid g: return m_packed.CompareTo(g); + } + return m_packed.CompareTo(obj); } #endregion + /// Instance of this times can be used to test Uuid128 for equality and ordering + public sealed class Comparer : IEqualityComparer, IComparer + { + + public static readonly Comparer Default = new Comparer(); + + private Comparer() + { } + + public bool Equals(Uuid128 x, Uuid128 y) + { + return x.m_packed.Equals(y.m_packed); + } + + public int GetHashCode(Uuid128 obj) + { + return obj.m_packed.GetHashCode(); + } + + public int Compare(Uuid128 x, Uuid128 y) + { + return x.m_packed.CompareTo(y.m_packed); + } + } + } } diff --git a/FoundationDB.Client/Shared/Uuid64.cs b/FoundationDB.Client/Shared/Uuid64.cs new file mode 100644 index 000000000..04c1e0a40 --- /dev/null +++ b/FoundationDB.Client/Shared/Uuid64.cs @@ -0,0 +1,1241 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +using System.Security.Cryptography; + +namespace System +{ + using System; + using System.Collections.Generic; + using System.ComponentModel; + using System.Diagnostics; + using System.Globalization; + using System.Runtime.CompilerServices; + using Doxense.Diagnostics.Contracts; + using Doxense.Memory; + using JetBrains.Annotations; + + /// Represents a 64-bit UUID that is stored in high-endian format on the wire + [DebuggerDisplay("[{ToString(),nq}]")] + [ImmutableObject(true), Serializable] + public readonly struct Uuid64 : IFormattable, IEquatable, IComparable + { + public static readonly Uuid64 Empty = default(Uuid64); + + /// Size is 8 bytes + public const int SizeOf = 8; + + private readonly ulong m_value; + //note: this will be in host order (so probably Little-Endian) in order to simplify parsing and ordering + + #region Constructors... + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public Uuid64(ulong value) + { + m_value = value; + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public Uuid64(long value) + { + m_value = (ulong)value; + } + + /// Pack two 32-bits components into a 64-bit UUID + /// Upper 32 bits (XXXXXXXX-........) + /// Lower 32 bits (........-XXXXXXXX) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public Uuid64(uint a, uint b) + { + //Contract.Requires((ulong) b < (1UL << 48)); + m_value = ((ulong) a << 32) | b; + } + + /// Pack two components into a 64-bit UUID + /// Upper 16 bits (XXXX....-........) + /// Lower 48 bits (....XXXX-XXXXXXXX) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public Uuid64(ushort a, long b) + { + //Contract.Requires((ulong) b < (1UL << 48)); + m_value = ((ulong) a << 48) | ((ulong) b & ((1UL << 48) - 1)); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + private static Exception FailInvalidBufferSize([InvokerParameterName] string arg) + { + return ThrowHelper.ArgumentException(arg, "Value must be 8 bytes long"); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.NoInlining)] + private static Exception FailInvalidFormat() + { + return ThrowHelper.FormatException("Invalid " + nameof(Uuid64) + " format"); + } + + /// Generate a new random 64-bit UUID, using a global source of randomness. + /// Instance of a new Uuid64 that is random. + /// + ///

If you need sequential uuids, you should use a different generator (ex: FlakeID, ...)

+ ///

This method uses a cryptographic RNG under a lock to generate 8 bytes of randomness, which can be slow. If you must generate a large number of unique ids, you should use a different source.

+ ///
+ [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Uuid64 NewUuid() + { + //Note: we chould use Guid.NewGuid() as a source of randomness, but even though a guid is "guaranteed" to be unique, a substring of a guid is not.. or is it? + return Uuid64RandomGenerator.Default.NewUuid(); + } + + #endregion + + #region Decomposition... + + /// Split into two 32-bit halves + /// Most significant 32 bits + /// Least significant 32 bits + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void Split(out uint a, out uint b) + { + a = (uint) (m_value >> 32); + b = (uint) m_value; + } + + /// Split into two halves + /// Most significant 16 bits + /// Least significant 48 bits + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void Split(out ushort a, out long b) + { + a = (ushort) (m_value >> 48); + b = (long) (m_value & ~((1UL << 48) - 1)); + } + + #endregion + + #region Reading... + + /// Read a 64-bit UUID from a byte array + /// Array of exactly 0 or 8 bytes + [Pure] + public static Uuid64 Read(byte[] value) + { + Contract.NotNull(value, nameof(value)); + if (value.Length == 0) return default(Uuid64); + if (value.Length == 8) return new Uuid64(ReadUnsafe(value, 0)); + throw FailInvalidBufferSize(nameof(value)); + } + + /// Read a 64-bit UUID from part of a byte array + [Pure] + [Obsolete("Use Uuid64.Read(ReadOnlySpan) instead!")] + public static Uuid64 Read(byte[] value, int offset, int count) + { + Contract.DoesNotOverflow(value, offset, count, nameof(value)); + if (count == 0) return default(Uuid64); + if (count == 8) return new Uuid64(ReadUnsafe(value, 0)); + throw FailInvalidBufferSize(nameof(count)); + } + + /// Read a 64-bit UUID from slice of memory + /// slice of exactly 0 or 8 bytes + [Pure] + public static Uuid64 Read(Slice value) + { + Contract.NotNull(value.Array, nameof(value)); + if (value.Count == 0) return default(Uuid64); + if (value.Count == 8) return new Uuid64(ReadUnsafe(value.Array, value.Offset)); + throw FailInvalidBufferSize(nameof(value)); + } + + /// Read a 64-bit UUID from slice of memory + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static unsafe Uuid64 Read(byte* ptr, uint count) + { + if (count == 0) return default(Uuid64); + if (count == 8) return new Uuid64(ReadUnsafe(ptr)); + throw FailInvalidBufferSize(nameof(count)); + } + + #endregion + + #region Parsing... + +#if ENABLED_SPAN + + /// Parse a string representation of an UUid64 + /// String in either formats: "", "badc0ffe-e0ddf00d", "badc0ffee0ddf00d", "{badc0ffe-e0ddf00d}", "{badc0ffee0ddf00d}" + /// Parsing is case-insensitive. The empty string is mapped to Uuid64.Empty. + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Uuid64 Parse([NotNull] string buffer) + { + Contract.NotNull(buffer, nameof(buffer)); + if (!TryParse(buffer.AsSpan(), out var value)) + { + throw FailInvalidFormat(); + } + return value; + } + + /// Parse a string representation of an UUid64 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Uuid64 Parse(ReadOnlySpan buffer) + { + if (!TryParse(buffer, out var value)) + { + throw FailInvalidFormat(); + } + return value; + } + + /// Parse a string representation of an UUid64 + [Pure] + [Obsolete("Use Uuid64.Parse(ReadOnlySpan) instead", error: true)] //TODO: remove me! + public static unsafe Uuid64 Parse(char* buffer, int count) + { + if (count == 0) return default(Uuid64); + if (!TryParse(new ReadOnlySpan(buffer, count), out var value)) + { + throw FailInvalidFormat(); + } + return value; + } + + /// Parse a Base62 encoded string representation of an UUid64 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Uuid64 FromBase62([NotNull] string buffer) + { + Contract.NotNull(buffer, nameof(buffer)); + if (!TryParseBase62(buffer.AsSpan(), out var value)) + { + throw FailInvalidFormat(); + } + return value; + } + + /// Try parsing a string representation of an UUid64 + public static bool TryParse([NotNull] string buffer, out Uuid64 result) + { + Contract.NotNull(buffer, nameof(buffer)); + return TryParse(buffer.AsSpan(), out result); + } + + /// Try parsing a string representation of an UUid64 + public static bool TryParse(ReadOnlySpan s, out Uuid64 result) + { + Contract.Requires(s != null); + + // we support the following formats: "{hex8-hex8}", "{hex16}", "hex8-hex8", "hex16" and "base62" + // we don't support base10 format, because there is no way to differentiate from hex or base62 + + result = default(Uuid64); + switch (s.Length) + { + case 0: + { // empty + return true; + } + case 16: + { // xxxxxxxxxxxxxxxx + return TryDecode16Unsafe(s, separator: false, out result); + } + case 17: + { // xxxxxxxx-xxxxxxxx + if (s[8] != '-') return false; + return TryDecode16Unsafe(s, separator: true, out result); + } + case 18: + { // {xxxxxxxxxxxxxxxx} + if (s[0] != '{' || s[17] != '}') + { + return false; + } + return TryDecode16Unsafe(s.Slice(1, s.Length - 2), separator: false, out result); + } + case 19: + { // {xxxxxxxx-xxxxxxxx} + if (s[0] != '{' || s[18] != '}') + { + return false; + } + return TryDecode16Unsafe(s.Slice(1, s.Length - 2), separator: true, out result); + } + default: + { + return false; + } + } + } + + public static bool TryParseBase62(ReadOnlySpan s, out Uuid64 result) + { + if (s.Length == 0) + { + result = default(Uuid64); + return true; + } + + if (s.Length <= 11 && Base62.TryDecode(s, out ulong x)) + { + result = new Uuid64(x); + return true; + } + + result = default(Uuid64); + return false; + } + +#else + + /// Parse a string representation of an UUid64 + /// String in either formats: "", "badc0ffe-e0ddf00d", "badc0ffee0ddf00d", "{badc0ffe-e0ddf00d}", "{badc0ffee0ddf00d}" + /// Parsing is case-insensitive. The empty string is mapped to Uuid64.Empty. + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Uuid64 Parse([NotNull] string buffer) + { + Contract.NotNull(buffer, nameof(buffer)); + unsafe + { + fixed (char* chars = buffer) + { + if (!TryParse(chars, buffer.Length, out var value)) + { + throw FailInvalidFormat(); + } + + return value; + } + } + } + + /// Parse a string representation of an UUid64 + [Pure] + [Obsolete("Use Uuid64.Parse(ReadOnlySpan) instead", error: true)] //TODO: remove me! + public static unsafe Uuid64 Parse(char* chars, int numChars) + { + if (numChars == 0) return default(Uuid64); + if (!TryParse(chars, numChars, out var value)) + { + throw FailInvalidFormat(); + } + return value; + } + + /// Parse a Base62 encoded string representation of an UUid64 + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Uuid64 FromBase62([NotNull] string buffer) + { + Contract.NotNull(buffer, nameof(buffer)); + unsafe + { + fixed (char* chars = buffer) + { + if (!TryParseBase62(chars, buffer.Length, out var value)) + { + throw FailInvalidFormat(); + } + + return value; + } + } + } + + /// Try parsing a string representation of an UUid64 + public static bool TryParse([NotNull] string buffer, out Uuid64 result) + { + Contract.NotNull(buffer, nameof(buffer)); + unsafe + { + fixed (char* chars = buffer) + { + return TryParse(chars, buffer.Length, out result); + } + } + } + + /// Try parsing a string representation of an UUid64 + public static unsafe bool TryParse(char* chars, int numChars, out Uuid64 result) + { + Contract.Requires(chars != null && numChars >= 0); + + // we support the following formats: "{hex8-hex8}", "{hex16}", "hex8-hex8", "hex16" and "base62" + // we don't support base10 format, because there is no way to differentiate from hex or base62 + + result = default(Uuid64); + switch (numChars) + { + case 0: + { // empty + return true; + } + case 16: + { // xxxxxxxxxxxxxxxx + return TryDecode16Unsafe(chars, numChars, false, out result); + } + case 17: + { // xxxxxxxx-xxxxxxxx + if (chars[8] != '-') return false; + return TryDecode16Unsafe(chars, numChars, true, out result); + } + case 18: + { // {xxxxxxxxxxxxxxxx} + if (chars[0] != '{' || chars[17] != '}') + { + return false; + } + return TryDecode16Unsafe(chars + 1, numChars - 2, false, out result); + } + case 19: + { // {xxxxxxxx-xxxxxxxx} + if (chars[0] != '{' || chars[18] != '}') + { + return false; + } + return TryDecode16Unsafe(chars + 1, numChars - 2, true, out result); + } + default: + { + return false; + } + } + } + + public static unsafe bool TryParseBase62(char* chars, int numChars, out Uuid64 result) + { + if (numChars == 0) + { + result = default(Uuid64); + return true; + } + + if (numChars <= 11 && Base62.TryDecode(chars, numChars, out ulong x)) + { + result = new Uuid64(x); + return true; + } + + result = default(Uuid64); + return false; + + } +#endif + + + #endregion + + #region Casting... + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator Uuid64(ulong value) + { + return new Uuid64(value); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static explicit operator ulong(Uuid64 value) + { + return value.m_value; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static implicit operator Uuid64(long value) + { + return new Uuid64(value); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static explicit operator long(Uuid64 value) + { + return (long) value.m_value; + } + + #endregion + + #region IFormattable... + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public long ToInt64() + { + return (long) m_value; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public ulong ToUInt64() + { + return m_value; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Slice ToSlice() + { + return Slice.FromFixedU64BE(m_value); + } + + [Pure, NotNull] + public byte[] ToByteArray() + { + var bytes = Slice.FromFixedU64BE(m_value).Array; + Contract.Ensures(bytes != null && bytes.Length == 8); // HACKHACK: for perf reasons, we rely on the fact that Slice.FromFixedU64BE() allocates a new 8-byte array that we can return without copying + return bytes; + } + + /// Returns a string representation of the value of this instance. + /// String using the format "xxxxxxxx-xxxxxxxx", where 'x' is a lower-case hexadecimal digit + /// Strings returned by this method will always to 17 characters long. + public override string ToString() + { + return ToString("D", null); + } + + /// Returns a string representation of the value of this instance, according to the provided format specifier. + /// A single format specifier that indicates how to format the value of this Guid. The format parameter can be "D", "B", "X", "G", "Z" or "N". If format is null or an empty string (""), "D" is used. + /// The value of this , using the specified format. + /// See for a description of the different formats + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public string ToString(string format) + { + return ToString(format, null); + } + + /// Returns a string representation of the value of this instance of the class, according to the provided format specifier and culture-specific format information. + /// A single format specifier that indicates how to format the value of this Guid. The format parameter can be "D", "N", "Z", "R", "X" or "B". If format is null or an empty string (""), "D" is used. + /// An object that supplies culture-specific formatting information. Only used for the "R" format. + /// The value of this , using the specified format. + /// + ///

The D format encodes the value as two groups of 8 hexadecimal digits, separated by an hyphen: "01234567-89abcdef" (17 characters).

+ ///

The X format encodes the value as a single group of 16 hexadecimal digits: "0123456789abcdef" (16 characters).

+ ///

The B format is equivalent to the D format, but surrounded with '{' and '}': "{01234567-89abcdef}" (19 characters).

+ ///

The R format encodes the value as a decimal number "1234567890" (1 to 20 characters) which can be parsed as an UInt64 without loss.

+ ///

The C format uses a compact base-62 encoding that preserves lexicographical ordering, composed of digits, uppercase alpha and lowercase alpha, suitable for compact representation that can fit in a querystring.

+ ///

The Z format is equivalent to the C format, but with extra padding so that the string is always 11 characters long.

+ ///
+ public string ToString(string format, IFormatProvider formatProvider) + { + if (string.IsNullOrEmpty(format)) format = "D"; + + switch(format) + { + case "D": + { // Default format is "xxxxxxxx-xxxxxxxx" + return Encode16(m_value, separator: true, quotes: false, upper: true); + } + case "d": + { // Default format is "xxxxxxxx-xxxxxxxx" + return Encode16(m_value, separator: true, quotes: false, upper: false); + } + + case "C": + case "c": + { // base 62, compact, no padding + return Base62.Encode(m_value, padded: false); + } + case "Z": + case "z": + { // base 62, padded with '0' up to 11 chars + return Base62.Encode(m_value, padded: true); + } + + case "R": + case "r": + { // Integer: "1234567890" + return m_value.ToString(null, formatProvider ?? CultureInfo.InvariantCulture); + } + + case "X": //TODO: Guid.ToString("X") returns "{0x.....,0x.....,...}" + case "N": + { // "XXXXXXXXXXXXXXXX" + return Encode16(m_value, separator: false, quotes: false, upper: true); + } + case "x": //TODO: Guid.ToString("X") returns "{0x.....,0x.....,...}" + case "n": + { // "xxxxxxxxxxxxxxxx" + return Encode16(m_value, separator: false, quotes: false, upper: false); + } + + case "B": + { // "{xxxxxxxx-xxxxxxxx}" + return Encode16(m_value, separator: true, quotes: true, upper: true); + } + case "b": + { // "{xxxxxxxx-xxxxxxxx}" + return Encode16(m_value, separator: true, quotes: true, upper: false); + } + default: + { + throw new FormatException("Invalid " + nameof(Uuid64) + " format specification."); + } + } + } + + #endregion + + #region IEquatable / IComparable... + + public override bool Equals(object obj) + { + switch (obj) + { + case Uuid64 u64: return Equals(u64); + case ulong ul: return m_value == ul; + case long l: return m_value == (ulong) l; + //TODO: string format ? Slice ? + } + return false; + } + + public override int GetHashCode() + { + return ((int) m_value) ^ (int) (m_value >> 32); + } + + public bool Equals(Uuid64 other) + { + return m_value == other.m_value; + } + + public int CompareTo(Uuid64 other) + { + return m_value.CompareTo(other.m_value); + } + + #endregion + + #region Base16 encoding... + + [Pure] + private static char HexToLowerChar(int a) + { + a &= 0xF; + return a > 9 ? (char)(a - 10 + 'a') : (char)(a + '0'); + } + + [NotNull] + private static unsafe char* HexsToLowerChars([NotNull] char* ptr, int a) + { + Contract.Requires(ptr != null); + ptr[0] = HexToLowerChar(a >> 28); + ptr[1] = HexToLowerChar(a >> 24); + ptr[2] = HexToLowerChar(a >> 20); + ptr[3] = HexToLowerChar(a >> 16); + ptr[4] = HexToLowerChar(a >> 12); + ptr[5] = HexToLowerChar(a >> 8); + ptr[6] = HexToLowerChar(a >> 4); + ptr[7] = HexToLowerChar(a); + return ptr + 8; + } + + [Pure] + private static char HexToUpperChar(int a) + { + a &= 0xF; + return a > 9 ? (char)(a - 10 + 'A') : (char)(a + '0'); + } + + [NotNull] + private static unsafe char* HexsToUpperChars([NotNull] char* ptr, int a) + { + Contract.Requires(ptr != null); + ptr[0] = HexToUpperChar(a >> 28); + ptr[1] = HexToUpperChar(a >> 24); + ptr[2] = HexToUpperChar(a >> 20); + ptr[3] = HexToUpperChar(a >> 16); + ptr[4] = HexToUpperChar(a >> 12); + ptr[5] = HexToUpperChar(a >> 8); + ptr[6] = HexToUpperChar(a >> 4); + ptr[7] = HexToUpperChar(a); + return ptr + 8; + } + + [Pure, NotNull] + private static unsafe string Encode16(ulong value, bool separator, bool quotes, bool upper) + { + int size = 16 + (separator ? 1 : 0) + (quotes ? 2 : 0); + char* buffer = stackalloc char[24]; // max 19 mais on arrondi a 24 + + char* ptr = buffer; + if (quotes) *ptr++ = '{'; + ptr = upper + ? HexsToUpperChars(ptr, (int)(value >> 32)) + : HexsToLowerChars(ptr, (int)(value >> 32)); + if (separator) *ptr++ = '-'; + ptr = upper + ? HexsToUpperChars(ptr, (int)(value & 0xFFFFFFFF)) + : HexsToLowerChars(ptr, (int)(value & 0xFFFFFFFF)); + if (quotes) *ptr++ = '}'; + + Contract.Ensures(ptr == buffer + size); + return new string(buffer, 0, size); + } + + private const int INVALID_CHAR = -1; + + [Pure] + private static int CharToHex(char c) + { + if (c <= '9') + { + return c >= '0' ? (c - 48) : INVALID_CHAR; + } + if (c <= 'F') + { + return c >= 'A' ? (c - 55) : INVALID_CHAR; + } + if (c <= 'f') + { + return c >= 'a' ? (c - 87) : INVALID_CHAR; + } + return INVALID_CHAR; + } + +#if ENABLE_SPAN + + private static bool TryCharsToHexsUnsafe(ReadOnlySpan chars, out uint result) + { + int word = 0; + for (int i = 0; i < 8; i++) + { + int a = CharToHex(chars[i]); + if (a == INVALID_CHAR) + { + result = 0; + return false; + } + word = (word << 4) | a; + } + result = (uint)word; + return true; + } + + private static bool TryDecode16Unsafe(ReadOnlySpan chars, bool separator, out Uuid64 result) + { + if ((!separator || chars[8] == '-') + && TryCharsToHexsUnsafe(chars, out uint hi) + && TryCharsToHexsUnsafe(chars.Slice(separator ? 9 : 8), out uint lo)) + { + result = new Uuid64(((ulong)hi << 32) | lo); + return true; + } + result = default(Uuid64); + return false; + } + +#else + + private static unsafe bool TryCharsToHexsUnsafe(char* chars, int numChars, out uint result) + { + int word = 0; + for (int i = 0; i < 8; i++) + { + int a = CharToHex(chars[i]); + if (a == INVALID_CHAR) + { + result = 0; + return false; + } + word = (word << 4) | a; + } + result = (uint)word; + return true; + } + + private static unsafe bool TryDecode16Unsafe(char* chars, int numChars, bool separator, out Uuid64 result) + { + if ((!separator || chars[8] == '-') + && TryCharsToHexsUnsafe(chars, numChars, out uint hi) + && TryCharsToHexsUnsafe(chars + (separator ? 9 : 8), numChars - (separator ? 9 : 8), out uint lo)) + { + result = new Uuid64(((ulong)hi << 32) | lo); + return true; + } + result = default(Uuid64); + return false; + } + +#endif + + #endregion + + #region Base62 encoding... + + //NOTE: this version of base62 encoding puts the digits BEFORE the letters, to ensure that the string representation of a UUID64 is in the same order as its byte[] or ulong version. + // => This scheme use the "0-9A-Za-z" ordering, while most other base62 encoder use "a-zA-Z0-9" + + private static class Base62 + { + //note: nested static class, so that we only allocate the internal buffers if Base62 encoding is actually used + + private static readonly char[] Base62LexicographicChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz".ToCharArray(); + + private static readonly int[] Base62Values = new int[3 * 32] + { + /* 32.. 63 */ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, -1, -1, -1, -1, + /* 64.. 95 */ -1, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, -1, -1, -1, -1, -1, + /* 96..127 */ -1, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -1, -1, -1, -1, -1, + }; + + /// Encode a 64-bit value into a base-62 string + /// 64-bit value to encode + /// If true, keep the leading '0' to return a string of length 11. If false, discards all extra leading '0' digits. + /// String that contains only digits, lower and upper case letters. The string will be lexicographically ordered, which means that sorting by string will give the same order as sorting by value. + /// + /// Encode62(0, false) => "0" + /// Encode62(0, true) => "00000000000" + /// Encode62(0xDEADBEEF) => "" + /// + public static string Encode(ulong value, bool padded) + { + // special case for default(Uuid64) which may be more frequent than others + if (value == 0) return padded ? "00000000000" : "0"; + + // encoding a 64 bits value in Base62 yields 10.75 "digits", which is rounded up to 11 chars. + const int MAX_SIZE = 11; + + unsafe + { + // The maximum size is 11 chars, but we will allocate 64 bytes on the stack to keep alignment. + char* chars = stackalloc char[16]; + char[] bc = Base62LexicographicChars; + + // start from the last "digit" + char* pc = chars + (MAX_SIZE - 1); + + while (pc >= chars) + { + ulong r = value % 62L; + value /= 62L; + *pc-- = bc[(int) r]; + if (!padded && value == 0) + { // the rest will be all zeroes + break; + } + } + + ++pc; + int count = MAX_SIZE - (int) (pc - chars); + Contract.Assert(count > 0 && count <= 11); + return count <= 0 ? String.Empty : new string(pc, 0, count); + } + } + +#if ENABLE_SPAN + + public static bool TryDecode(char[] s, out ulong value) + { + if (s == null) { value = 0; return false; } + return TryDecode(new ReadOnlySpan(s), out value); + } + + public static bool TryDecode(ReadOnlySpan s, out ulong value) + { + if (s == null || s.Length == 0 || s.Length > 11) + { // fail: too small/too big + value = 0; + return false; + } + + // we know that the original value is exactly 64bits, and any missing digit is '0' + ulong factor = 1UL; + ulong acc = 0UL; + int p = s.Length - 1; + int[] bv = Base62Values; + while (p >= 0) + { + // read digit + int a = s[p]; + // decode base62 digit + a = a >= 32 && a < 128 ? bv[a - 32] : -1; + if (a == -1) + { // fail: invalid character + value = 0; + return false; + } + // accumulate, while checking for overflow + acc = checked(acc + ((ulong) a * factor)); + if (p-- > 0) factor *= 62; + } + value = acc; + return true; + } + +#else + + + public static bool TryDecode(char[] s, out ulong value) + { + if (s == null) { value = 0; return false; } + + unsafe + { + fixed (char* chars = s) + { + return TryDecode(chars, s.Length, out value); + } + } + } + + public static unsafe bool TryDecode(char* chars, int numChars, out ulong value) + { + if (chars == null || numChars == 0 || numChars > 11) + { // fail: too small/too big + value = 0; + return false; + } + + // we know that the original value is exactly 64bits, and any missing digit is '0' + ulong factor = 1UL; + ulong acc = 0UL; + int p = numChars - 1; + int[] bv = Base62Values; + while (p >= 0) + { + // read digit + int a = chars[p]; + // decode base62 digit + a = a >= 32 && a < 128 ? bv[a - 32] : -1; + if (a == -1) + { // fail: invalid character + value = 0; + return false; + } + // accumulate, while checking for overflow + acc = checked(acc + ((ulong) a * factor)); + if (p-- > 0) factor *= 62; + } + value = acc; + return true; + } + +#endif + + } + + #endregion + + #region Unsafe I/O... + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static unsafe ulong ReadUnsafe([NotNull] byte* src) + { + //Contract.Requires(src != null); + return UnsafeHelpers.LoadUInt64BE(src); + } + +#if ENABLE_SPAN + internal static unsafe ulong ReadUnsafe(ReadOnlySpan src) + { + //Contract.Requires(src.Length >= 0); + fixed (byte* ptr = &MemoryMarshal.GetReference(src)) + { + return UnsafeHelpers.LoadUInt64BE(ptr); + } + } +#endif + + [Pure] + public static ulong ReadUnsafe([NotNull] byte[] buffer, int offset) + { + //Contract.Requires(buffer != null && offset >= 0 && offset + 7 < buffer.Length); + // buffer contains the bytes in Big Endian + unsafe + { + fixed (byte* ptr = &buffer[offset]) + { + return UnsafeHelpers.LoadUInt64BE(ptr); + } + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static unsafe void WriteUnsafe(ulong value, byte* ptr) + { + //Contract.Requires(ptr != null); + UnsafeHelpers.StoreUInt64BE(ptr, value); + } + + public static void WriteUnsafe(ulong value, [NotNull] byte[] buffer, int offset) + { + //Contract.Requires(buffer != null && offset >= 0 && offset + 7 < buffer.Length); + unsafe + { + fixed (byte* ptr = &buffer[offset]) + { + UnsafeHelpers.StoreUInt64BE(ptr, value); + } + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public unsafe void WriteToUnsafe([NotNull] byte* ptr) + { + WriteUnsafe(m_value, ptr); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void WriteToUnsafe([NotNull] byte[] buffer, int offset) + { + WriteUnsafe(m_value, buffer, offset); + } + +#if ENABLE_SPAN + public void WriteTo(byte[] buffer, int offset) + { + WriteTo(buffer.AsSpan(offset)); + } + + public void WriteTo(Span destination) + { + if (destination.Length < 8) throw FailInvalidBufferSize(nameof(destination)); + unsafe + { + fixed (byte* ptr = &MemoryMarshal.GetReference(destination)) + { + WriteUnsafe(m_value, ptr); + } + } + } + + public bool TryWriteTo(Span destination) + { + if (destination.Length < 8) return false; + unsafe + { + fixed (byte* ptr = &MemoryMarshal.GetReference(destination)) + { + WriteUnsafe(m_value, ptr); + return true; + } + } + } +#else + public void WriteTo(byte[] buffer, int offset) + { + WriteTo(buffer.AsSlice(offset)); + } + + public void WriteTo(Slice destination) + { + if (destination.Count < 8) throw FailInvalidBufferSize(nameof(destination)); + unsafe + { + fixed (byte* ptr = &destination.DangerousGetPinnableReference()) + { + WriteUnsafe(m_value, ptr); + } + } + } + + public bool TryWriteTo(Slice destination) + { + if (destination.Count < 8) return false; + unsafe + { + fixed (byte* ptr = &destination.DangerousGetPinnableReference()) + { + WriteUnsafe(m_value, ptr); + return true; + } + } + } +#endif + + #endregion + + #region Operators... + + public static bool operator ==(Uuid64 left, Uuid64 right) + { + return left.m_value == right.m_value; + } + + public static bool operator !=(Uuid64 left, Uuid64 right) + { + return left.m_value != right.m_value; + } + + public static bool operator >(Uuid64 left, Uuid64 right) + { + return left.m_value > right.m_value; + } + + public static bool operator >=(Uuid64 left, Uuid64 right) + { + return left.m_value >= right.m_value; + } + + public static bool operator <(Uuid64 left, Uuid64 right) + { + return left.m_value < right.m_value; + } + + public static bool operator <=(Uuid64 left, Uuid64 right) + { + return left.m_value <= right.m_value; + } + + // Comparing an Uuid64 to a 64-bit integer can have sense for "if (id == 0)" or "if (id != 0)" ? + + public static bool operator ==(Uuid64 left, long right) + { + return left.m_value == (ulong)right; + } + + public static bool operator ==(Uuid64 left, ulong right) + { + return left.m_value == right; + } + + public static bool operator !=(Uuid64 left, long right) + { + return left.m_value != (ulong)right; + } + + public static bool operator !=(Uuid64 left, ulong right) + { + return left.m_value != right; + } + + /// Add a value from this instance + public static Uuid64 operator +(Uuid64 left, long right) + { + //TODO: how to handle overflow ? negative values ? + ulong v = (ulong)right; + return new Uuid64(checked(left.m_value + v)); + } + + /// Add a value from this instance + public static Uuid64 operator +(Uuid64 left, ulong right) + { + return new Uuid64(checked(left.m_value + right)); + } + + /// Subtract a value from this instance + public static Uuid64 operator -(Uuid64 left, long right) + { + //TODO: how to handle overflow ? negative values ? + ulong v = (ulong)right; + return new Uuid64(checked(left.m_value - v)); + } + + /// Subtract a value from this instance + public static Uuid64 operator -(Uuid64 left, ulong right) + { + return new Uuid64(checked(left.m_value - right)); + } + + /// Increments the value of this instance + public static Uuid64 operator ++(Uuid64 value) + { + return new Uuid64(checked(value.m_value + 1)); + } + + /// Decrements the value of this instance + public static Uuid64 operator --(Uuid64 value) + { + return new Uuid64(checked(value.m_value - 1)); + } + + #endregion + + /// Instance of this times can be used to test Uuid64 for equality and ordering + public sealed class Comparer : IEqualityComparer, IComparer + { + + public static readonly Comparer Default = new Comparer(); + + private Comparer() + { } + + public bool Equals(Uuid64 x, Uuid64 y) + { + return x.m_value == y.m_value; + } + + public int GetHashCode(Uuid64 obj) + { + return obj.m_value.GetHashCode(); + } + + public int Compare(Uuid64 x, Uuid64 y) + { + return x.m_value.CompareTo(y.m_value); + } + } + + } + + /// Generates 64-bit UUIDs using a secure random number generator + /// Methods of this type are thread-safe. + public sealed class Uuid64RandomGenerator + { + + /// Default instance of a random generator + /// Using this instance will introduce a global lock in your application. You can create specific instances for worker threads, if you require concurrency. + [NotNull] + public static readonly Uuid64RandomGenerator Default = new Uuid64RandomGenerator(); + + [NotNull] + private RandomNumberGenerator Rng { get; } + + [NotNull] + private readonly byte[] Scratch = new byte[8]; + + /// Create a new instance of a random UUID generator + public Uuid64RandomGenerator() + : this(null) + { } + + /// Create a new instance of a random UUID generator, using a specific random number generator + public Uuid64RandomGenerator(RandomNumberGenerator generator) + { + this.Rng = generator ?? RandomNumberGenerator.Create(); + } + + /// Return a new random 64-bit UUID + /// Uuid64 that contains 64 bits worth of randomness. + /// + ///

This methods needs to acquire a lock. If multiple threads needs to generate ids concurrently, you may need to create an instance of this class for each threads.

+ ///

The uniqueness of the generated uuids depends on the quality of the random number generator. If you cannot tolerate collisions, you either have to check if a newly generated uid already exists, or use a different kind of generator.

+ ///
+ [Pure] + public Uuid64 NewUuid() + { + //REVIEW: OPTIMIZE: use a per-thread instance of the rng and scratch buffer? + // => right now, NewUuid() is a Global Lock for the whole process! + lock (this.Rng) + { + // get 8 bytes of randomness (0 allowed) + this.Rng.GetBytes(this.Scratch); + //note: do *NOT* call GetBytes(byte[], int, int) because it creates creates a temp buffer, calls GetBytes(byte[]) and copy the result back! (as of .NET 4.7.1) + //TODO: PERF: use Span APIs once (if?) they become available! + return Uuid64.Read(this.Scratch); + } + } + + } + +} diff --git a/FoundationDB.Client/Status/FdbSystemStatus.cs b/FoundationDB.Client/Status/FdbSystemStatus.cs index be7b698b9..d3526813e 100644 --- a/FoundationDB.Client/Status/FdbSystemStatus.cs +++ b/FoundationDB.Client/Status/FdbSystemStatus.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,6 +26,7 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion +// ReSharper disable UnusedMember.Global namespace FoundationDB.Client.Status { using FoundationDB.Client.Utils; @@ -36,42 +37,37 @@ namespace FoundationDB.Client.Status using System.Globalization; /// Snapshot of the state of a FoundationDB cluster + [PublicAPI] public sealed class FdbSystemStatus : MetricsBase { - private readonly ClientStatus m_client; - private readonly ClusterStatus m_cluster; - private readonly long m_readVersion; - private readonly string m_raw; - internal FdbSystemStatus(Dictionary doc, long readVersion, string raw) : base(doc) { - m_client = new ClientStatus(TinyJsonParser.GetMapField(doc, "client")); - m_cluster = new ClusterStatus(TinyJsonParser.GetMapField(doc, "cluster")); - m_readVersion = readVersion; - m_raw = raw; + this.Client = new ClientStatus(TinyJsonParser.GetMapField(doc, "client")); + this.Cluster = new ClusterStatus(TinyJsonParser.GetMapField(doc, "cluster")); + this.ReadVersion = readVersion; + this.RawText = raw; } /// Details about the local Client - public ClientStatus Client { get { return m_client; } } + public ClientStatus Client { get; } /// Details about the remote Cluster - public ClusterStatus Cluster { get { return m_cluster; } } + public ClusterStatus Cluster { get; } /// Read Version of the snapshot - public long ReadVersion { get { return m_readVersion; } } + public long ReadVersion { get; } /// Raw JSON text of this snapshot. /// This is the same value that is returned by running 'status json' in fdbcli - public string RawText { get { return m_raw; } } - + public string RawText { get; } } #region Common... /// Details about a notification, alert or error, as reported by a component of a FoundationDB cluster [DebuggerDisplay("{Name}")] - public struct Message + public readonly struct Message { /// Code for this message public readonly string Name; @@ -87,8 +83,8 @@ internal Message(string name, string description) internal static Message From(Dictionary data, string field) { - var kvp = TinyJsonParser.GetStringPair(TinyJsonParser.GetMapField(data, field), "name", "description"); - return new Message(kvp.Key ?? String.Empty, kvp.Value ?? String.Empty); + (var key, var value) = TinyJsonParser.GetStringPair(TinyJsonParser.GetMapField(data, field), "name", "description"); + return new Message(key ?? string.Empty, value ?? string.Empty); } internal static Message[] FromArray(Dictionary data, string field) @@ -98,15 +94,15 @@ internal static Message[] FromArray(Dictionary data, string fiel for (int i = 0; i < res.Length; i++) { var obj = (Dictionary)array[i]; - var kvp = TinyJsonParser.GetStringPair(obj, "name", "description"); - res[i] = new Message(kvp.Key, kvp.Value); + (var key, var value) = TinyJsonParser.GetStringPair(obj, "name", "description"); + res[i] = new Message(key, value); } return res; } public override string ToString() { - return String.Format("[{0}] {1}", this.Name, this.Description); + return $"[{this.Name}] {this.Description}"; } public override int GetHashCode() @@ -116,7 +112,7 @@ public override int GetHashCode() public override bool Equals(object obj) { - return (obj is Message) && Equals((Message)obj); + return obj is Message message && Equals(message); } public bool Equals(Message other) @@ -128,7 +124,7 @@ public bool Equals(Message other) /// Measured quantity that changes over time [DebuggerDisplay("Counter={Counter}, Hz={Hz}, Roughness={Roughness}")] - public struct LoadCounter + public readonly struct LoadCounter { /// Absolute value, since the start (ex: "UNIT") public readonly long Counter; @@ -236,51 +232,29 @@ internal ClientStatus(Dictionary data) : base(data) { } private Message[] m_messages; /// Path to the '.cluster' file used by the client to connect to the cluster - public string ClusterFilePath - { - get { return GetString("cluster_file", "path"); } - } + public string ClusterFilePath => GetString("cluster_file", "path"); /// Indicates if the content of the '.cluster' file is up to date with the current topology of the cluster - public bool ClusterFileUpToDate - { - get { return GetBoolean("cluster_file", "up_to_date") ?? false; } - } + public bool ClusterFileUpToDate => GetBoolean("cluster_file", "up_to_date") ?? false; /// Liste of active messages for the client /// The most common client messages are listed in . - public Message[] Messages - { - [NotNull] - get { return m_messages ?? (m_messages = Message.FromArray(m_data, "messages")); } - } + [NotNull] + public Message[] Messages => m_messages ?? (m_messages = Message.FromArray(m_data, "messages")); /// Timestamp of the local client (unix time) /// Number of seconds since 1970-01-01Z, using the local system clock - public long Timestamp - { - get { return GetInt64("timestamp") ?? 0; } - } + public long Timestamp => GetInt64("timestamp") ?? 0; /// Local system time on the client - public DateTime SystemTime - { - get { return new DateTime(checked(621355968000000000L + this.Timestamp * TimeSpan.TicksPerSecond), DateTimeKind.Utc); } - } + public DateTime SystemTime => new DateTime(checked(621355968000000000L + this.Timestamp * TimeSpan.TicksPerSecond), DateTimeKind.Utc); /// Specifies if the local client was able to connect to the cluster - public bool DatabaseAvailable - { - get { return GetBoolean("database_status", "available") ?? false; } - } + public bool DatabaseAvailable => GetBoolean("database_status", "available") ?? false; /// Specifies if the database is currently healthy //REVIEW: what does it mean if available=true, but healthy=false ? - public bool DatabaseHealthy - { - get { return GetBoolean("database_status", "healthy") ?? false; } - } - + public bool DatabaseHealthy => GetBoolean("database_status", "healthy") ?? false; } /// List of well known client messages @@ -320,58 +294,31 @@ internal ClusterStatus(Dictionary data) /// Unix time of the cluster controller /// Number of seconds since the Unix epoch (1970-01-01Z) - public long ClusterControllerTimestamp - { - get { return GetInt64("cluster_controller_timestamp") ?? 0; } - } + public long ClusterControllerTimestamp => GetInt64("cluster_controller_timestamp") ?? 0; /// License string of the cluster - public string License - { - [NotNull] - get { return GetString("license") ?? String.Empty; } - } + [NotNull] + public string License => GetString("license") ?? String.Empty; /// List of currently active messages /// Includes notifications, warnings, errors, ... - public Message[] Messages - { - [NotNull] - get { return m_messages ?? (m_messages = Message.FromArray(m_data, "messages")); } - } + [NotNull] + public Message[] Messages => m_messages ?? (m_messages = Message.FromArray(m_data, "messages")); /// Recovery state of the cluster - public Message RecoveryState - { - get { return Message.From(m_data, "recovery_state"); } - } + public Message RecoveryState => Message.From(m_data, "recovery_state"); - public ClusterConfiguration Configuration - { - get { return m_configuration ?? (m_configuration = new ClusterConfiguration(GetMap("configuration"))); } - } + public ClusterConfiguration Configuration => m_configuration ?? (m_configuration = new ClusterConfiguration(GetMap("configuration"))); - public DataMetrics Data - { - get { return m_dataMetrics ?? (m_dataMetrics = new DataMetrics(GetMap("data"))); } - } + public DataMetrics Data => m_dataMetrics ?? (m_dataMetrics = new DataMetrics(GetMap("data"))); - public LatencyMetrics Latency - { - get { return m_latency ?? (m_latency = new LatencyMetrics(GetMap("latency_probe"))); } - } + public LatencyMetrics Latency => m_latency ?? (m_latency = new LatencyMetrics(GetMap("latency_probe"))); /// QoS metrics - public QosMetrics Qos - { - get { return m_qos ?? (m_qos = new QosMetrics(GetMap("qos"))); } - } + public QosMetrics Qos => m_qos ?? (m_qos = new QosMetrics(GetMap("qos"))); /// Workload metrics - public WorkloadMetrics Workload - { - get { return m_workload ?? (m_workload = new WorkloadMetrics(GetMap("workload"))); } - } + public WorkloadMetrics Workload => m_workload ?? (m_workload = new WorkloadMetrics(GetMap("workload"))); /// List of the processes that are currently active in the cluster public IReadOnlyDictionary Processes @@ -444,11 +391,11 @@ internal ClusterConfiguration(Dictionary data) private string[] m_excludedServers; - public int CoordinatorsCount { get; private set; } + public int CoordinatorsCount { get; } - public string StorageEngine { get; private set; } + public string StorageEngine { get; } - public string RedundancyFactor { get; private set; } + public string RedundancyFactor { get; } public IReadOnlyList ExcludedServers { @@ -481,11 +428,11 @@ internal LatencyMetrics(Dictionary data) this.TransactionStartSeconds = GetDouble("transaction_start_seconds") ?? 0; } - public double CommitSeconds { get; private set; } + public double CommitSeconds { get; } - public double ReadSeconds { get; private set; } + public double ReadSeconds { get; set; } - public double TransactionStartSeconds { get; private set; } + public double TransactionStartSeconds { get; } } /// Details about the volume of data stored in the cluster @@ -493,56 +440,25 @@ public sealed class DataMetrics : MetricsBase { internal DataMetrics(Dictionary data) : base(data) { } - public long AveragePartitionSizeBytes - { - get { return GetInt64("average_partition_size_bytes") ?? 0; } - } + public long AveragePartitionSizeBytes => GetInt64("average_partition_size_bytes") ?? 0; - public long LeastOperatingSpaceBytesLogServer - { - get { return GetInt64("least_operating_space_bytes_log_server") ?? 0; } - } - - public long LeastOperatingSpaceBytesStorageServer - { - get { return GetInt64("least_operating_space_bytes_storage_server") ?? 0; } - } + public long LeastOperatingSpaceBytesLogServer => GetInt64("least_operating_space_bytes_log_server") ?? 0; - public long MovingDataInFlightBytes - { - get { return GetInt64("moving_data", "in_flight_bytes") ?? 0; } - } + public long LeastOperatingSpaceBytesStorageServer => GetInt64("least_operating_space_bytes_storage_server") ?? 0; - public long MovingDataInQueueBytes - { - get { return GetInt64("moving_data", "in_queue_bytes") ?? 0; } - } + public long MovingDataInFlightBytes => GetInt64("moving_data", "in_flight_bytes") ?? 0; - public long PartitionsCount - { - get { return GetInt64("partitions_count") ?? 0; } - } + public long MovingDataInQueueBytes => GetInt64("moving_data", "in_queue_bytes") ?? 0; - public long TotalDiskUsedBytes - { - get { return GetInt64("total_disk_used_bytes") ?? 0; } - } + public long PartitionsCount => GetInt64("partitions_count") ?? 0; - public long TotalKVUsedBytes - { - get { return GetInt64("total_kv_size_bytes") ?? 0; } - } + public long TotalDiskUsedBytes => GetInt64("total_disk_used_bytes") ?? 0; - public bool StateHealthy - { - get { return GetBoolean("state", "healthy") ?? false; } - } + public long TotalKVUsedBytes => GetInt64("total_kv_size_bytes") ?? 0; - public string StateName - { - get { return GetString("state", "name"); } - } + public bool StateHealthy => GetBoolean("state", "healthy") ?? false; + public string StateName => GetString("state", "name"); } /// Details about the quality of service offered by the cluster @@ -551,23 +467,13 @@ public sealed class QosMetrics : MetricsBase internal QosMetrics(Dictionary data) : base(data) { } /// Current limiting factor for the performance of the cluster - public Message PerformanceLimitedBy - { - get { return Message.From(m_data, "performance_limited_by"); } - } + public Message PerformanceLimitedBy => Message.From(m_data, "performance_limited_by"); //REVIEW: what is this? - public long WorstQueueBytesLogServer - { - get { return GetInt64("worst_queue_bytes_log_server") ?? 0; } - } + public long WorstQueueBytesLogServer => GetInt64("worst_queue_bytes_log_server") ?? 0; //REVIEW: what is this? - public long WorstQueueBytesStorageServer - { - get { return GetInt64("worst_queue_bytes_storage_server") ?? 0; } - } - + public long WorstQueueBytesStorageServer => GetInt64("worst_queue_bytes_storage_server") ?? 0; } /// Details about the current wokrload of the cluster @@ -580,22 +486,13 @@ internal WorkloadMetrics(Dictionary data) : base(data) { } private WorkloadTransactionsMetrics m_transactions; /// Performance counters for the volume of data processed by the database - public WorkloadBytesMetrics Bytes - { - get { return m_bytes ?? (m_bytes = new WorkloadBytesMetrics(GetMap("bytes"))); } - } + public WorkloadBytesMetrics Bytes => m_bytes ?? (m_bytes = new WorkloadBytesMetrics(GetMap("bytes"))); /// Performance counters for the operations on the keys in the database - public WorkloadOperationsMetrics Operations - { - get { return m_operations ?? (m_operations = new WorkloadOperationsMetrics(GetMap("operations"))); } - } + public WorkloadOperationsMetrics Operations => m_operations ?? (m_operations = new WorkloadOperationsMetrics(GetMap("operations"))); /// Performance counters for the transactions. - public WorkloadTransactionsMetrics Transactions - { - get { return m_transactions ?? (m_transactions = new WorkloadTransactionsMetrics(GetMap("transactions"))); } - } + public WorkloadTransactionsMetrics Transactions => m_transactions ?? (m_transactions = new WorkloadTransactionsMetrics(GetMap("transactions"))); } /// Throughput of a FoundationDB cluster @@ -609,7 +506,7 @@ internal WorkloadBytesMetrics(Dictionary data) /// Bytes written //REVIEW: this looks like the size of writes in transactions, NOT the number of bytes written to the disk! - public LoadCounter Written { get; private set; } + public LoadCounter Written { get; } } @@ -624,10 +521,10 @@ internal WorkloadOperationsMetrics(Dictionary data) } /// Details about read operations - public LoadCounter Reads { get; private set; } + public LoadCounter Reads { get; } /// Details about write operations - public LoadCounter Writes { get; private set; } + public LoadCounter Writes { get; } } /// Transaction workload of a FoundationDB cluster @@ -641,11 +538,11 @@ internal WorkloadTransactionsMetrics(Dictionary data) this.Started = LoadCounter.From(data, "started"); } - public LoadCounter Committed { get; private set; } + public LoadCounter Committed { get; } - public LoadCounter Conflicted { get; private set; } + public LoadCounter Conflicted { get; } - public LoadCounter Started { get; private set; } + public LoadCounter Started { get; } } #endregion @@ -669,83 +566,54 @@ internal ProcessStatus(Dictionary data, string id) private ProcessCpuMetrics m_cpu; private ProcessDiskMetrics m_disk; private ProcessMemoryMetrics m_memory; - private KeyValuePair[] m_roles; + private (string Id, string Role)[] m_roles; /// Unique identifier for this process. //TODO: is it stable accross reboots? what are the conditions for a process to change its ID ? - public string Id { [NotNull] get; private set; } + [NotNull] + public string Id { get; } /// Identifier of the machine that is hosting this process /// All processes that have the same MachineId are running on the same (physical) machine. - public string MachineId - { - [NotNull] - get { return m_machineId ?? (m_machineId = GetString("machine_id") ?? String.Empty); } - } + [NotNull] + public string MachineId => m_machineId ?? (m_machineId = GetString("machine_id") ?? String.Empty); /// Version of this process /// "3.0.4" - public string Version - { - [NotNull] - get { return GetString("version") ?? String.Empty; } - } + [NotNull] + public string Version => GetString("version") ?? String.Empty; /// Address and port of this process, with syntax "IP_ADDRESS:port" /// "10.1.2.34:4500" - public string Address - { - [NotNull] - get { return m_address ?? (m_address = GetString("address") ?? String.Empty); } - } + [NotNull] + public string Address => m_address ?? (m_address = GetString("address") ?? String.Empty); /// Command line that was used to start this process - public string CommandLine - { - [NotNull] - get { return GetString("command_line") ?? String.Empty; } - } + [NotNull] + public string CommandLine => GetString("command_line") ?? String.Empty; /// If true, this process is currently excluded from the cluster - public bool Excluded - { - get { return GetBoolean("excluded") ?? false; } - } + public bool Excluded => GetBoolean("excluded") ?? false; /// List of messages that are currently published by this process - public Message[] Messages - { - [NotNull] - get { return m_messages ?? (m_messages = Message.FromArray(m_data, "messages")); } - } + [NotNull] + public Message[] Messages => m_messages ?? (m_messages = Message.FromArray(m_data, "messages")); /// Network performance counters - public ProcessNetworkMetrics Network - { - get { return m_network ?? (m_network = new ProcessNetworkMetrics(GetMap("network"))); } - } + public ProcessNetworkMetrics Network => m_network ?? (m_network = new ProcessNetworkMetrics(GetMap("network"))); /// CPU performance counters - public ProcessCpuMetrics Cpu - { - get { return m_cpu ?? (m_cpu = new ProcessCpuMetrics(GetMap("cpu"))); } - } + public ProcessCpuMetrics Cpu => m_cpu ?? (m_cpu = new ProcessCpuMetrics(GetMap("cpu"))); /// Disk performance counters - public ProcessDiskMetrics Disk - { - get { return m_disk ?? (m_disk = new ProcessDiskMetrics(GetMap("disk"))); } - } + public ProcessDiskMetrics Disk => m_disk ?? (m_disk = new ProcessDiskMetrics(GetMap("disk"))); /// Memory performance counters - public ProcessMemoryMetrics Memory - { - get { return m_memory ?? (m_memory = new ProcessMemoryMetrics(GetMap("memory"))); } - } + public ProcessMemoryMetrics Memory => m_memory ?? (m_memory = new ProcessMemoryMetrics(GetMap("memory"))); /// List of the roles assumed by this process /// The key is the unique role ID in the cluster, and the value is the type of the role itself - public KeyValuePair[] Roles + public (string Id, string Role)[] Roles { get { @@ -754,7 +622,7 @@ public KeyValuePair[] Roles //REVIEW: should we have (K=id, V=role) or (K=role, V=id) ? var arr = GetArray("roles"); - var res = new KeyValuePair[arr.Count]; + var res = new (string, string)[arr.Count]; for (int i = 0; i < res.Length; i++) { var obj = (Dictionary)arr[i]; @@ -785,16 +653,9 @@ internal ProcessMemoryMetrics(Dictionary data) : base(data) { } - public long AvailableBytes - { - get { return GetInt64("available_bytes") ?? 0; } - } - - public long UsedBytes - { - get { return GetInt64("used_bytes") ?? 0; } - } + public long AvailableBytes => GetInt64("available_bytes") ?? 0; + public long UsedBytes => GetInt64("used_bytes") ?? 0; } /// CPU performane counters for a FoundationDB process @@ -804,11 +665,7 @@ internal ProcessCpuMetrics(Dictionary data) : base(data) { } - public double UsageCores - { - get { return GetDouble("usage_cores") ?? 0; } - } - + public double UsageCores => GetDouble("usage_cores") ?? 0; } /// Disk performane counters for a FoundationDB process @@ -818,10 +675,7 @@ internal ProcessDiskMetrics(Dictionary data) : base(data) { } - public double Busy - { - get { return GetDouble("busy") ?? 0; } - } + public double Busy => GetDouble("busy") ?? 0; } /// Network performane counters for a FoundationDB process or machine @@ -834,9 +688,9 @@ internal ProcessNetworkMetrics(Dictionary data) this.MegabitsSent = LoadCounter.From(data, "megabits_sent"); } - public LoadCounter MegabitsReceived { get; private set; } + public LoadCounter MegabitsReceived { get; } - public LoadCounter MegabitsSent { get; private set; } + public LoadCounter MegabitsSent { get; } } @@ -860,47 +714,30 @@ internal MachineStatus(Dictionary data, string id) /// Unique identifier for this machine. //TODO: is it stable accross reboots? what are the conditions for a process to change its ID ? - public string Id { [NotNull] get; private set; } + [NotNull] + public string Id { get; } /// Identifier of the data center that is hosting this machine /// All machines that have the same DataCenterId are probably running on the same (physical) network. - public string DataCenterId - { - [NotNull] - get { return GetString("datacenter_id") ?? String.Empty; } - } + [NotNull] + public string DataCenterId => GetString("datacenter_id") ?? String.Empty; /// Address of this machine /// "10.1.2.34" - public string Address - { - [NotNull] - get { return m_address ?? (m_address = GetString("address") ?? String.Empty); } - } + [NotNull] + public string Address => m_address ?? (m_address = GetString("address") ?? String.Empty); /// If true, this process is currently excluded from the cluster - public bool Excluded - { - get { return GetBoolean("excluded") ?? false; } - } + public bool Excluded => GetBoolean("excluded") ?? false; /// Network performance counters - public MachineNetworkMetrics Network - { - get { return m_network ?? (m_network = new MachineNetworkMetrics(GetMap("network"))); } - } + public MachineNetworkMetrics Network => m_network ?? (m_network = new MachineNetworkMetrics(GetMap("network"))); /// CPU performance counters - public MachineCpuMetrics Cpu - { - get { return m_cpu ?? (m_cpu = new MachineCpuMetrics(GetMap("cpu"))); } - } + public MachineCpuMetrics Cpu => m_cpu ?? (m_cpu = new MachineCpuMetrics(GetMap("cpu"))); /// Memory performance counters - public MachineMemoryMetrics Memory - { - get { return m_memory ?? (m_memory = new MachineMemoryMetrics(GetMap("memory"))); } - } + public MachineMemoryMetrics Memory => m_memory ?? (m_memory = new MachineMemoryMetrics(GetMap("memory"))); } /// Memory performane counters for machine hosting one or more FoundationDB processes @@ -914,11 +751,11 @@ internal MachineMemoryMetrics(Dictionary data) this.TotalBytes = GetInt64("total_bytes") ?? 0; } - public long CommittedBytes { get; private set; } + public long CommittedBytes { get; } - public long FreeBytes { get; private set; } + public long FreeBytes { get; } - public long TotalBytes { get; private set; } + public long TotalBytes { get; } } @@ -931,7 +768,7 @@ internal MachineCpuMetrics(Dictionary data) this.LogicalCoreUtilization = GetDouble("logical_core_utilization") ?? 0; } - public double LogicalCoreUtilization { get; private set; } + public double LogicalCoreUtilization { get; } } @@ -946,15 +783,15 @@ internal MachineNetworkMetrics(Dictionary data) this.TcpSegmentsRetransmitted = LoadCounter.From(data, "tcp_segments_retransmitted"); } - public LoadCounter MegabitsReceived { get; private set; } + public LoadCounter MegabitsReceived { get; } - public LoadCounter MegabitsSent { get; private set; } + public LoadCounter MegabitsSent { get; } - public LoadCounter TcpSegmentsRetransmitted { get; private set; } + public LoadCounter TcpSegmentsRetransmitted { get; } } #endregion -} \ No newline at end of file +} diff --git a/FoundationDB.Client/Subspaces/DynamicKeySubspace.cs b/FoundationDB.Client/Subspaces/DynamicKeySubspace.cs new file mode 100644 index 000000000..577546ba1 --- /dev/null +++ b/FoundationDB.Client/Subspaces/DynamicKeySubspace.cs @@ -0,0 +1,524 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace FoundationDB.Client +{ + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.Runtime.CompilerServices; + using Doxense.Collections.Tuples; + using Doxense.Diagnostics.Contracts; + using Doxense.Memory; + using Doxense.Serialization.Encoders; + using JetBrains.Annotations; + + public class DynamicKeySubspace : KeySubspace, IDynamicKeySubspace + { + + /// Encoder for the keys of this subspace + public IKeyEncoding Encoding { get; } + + [NotNull] + internal IDynamicKeyEncoder KeyEncoder { get; } + + /// Create a new subspace from a binary prefix + /// Prefix of the new subspace + /// Type System used to encode keys in this subspace + internal DynamicKeySubspace(Slice prefix, [NotNull] IKeyEncoding encoding) + : base(prefix) + { + Contract.Requires(encoding != null); + this.Encoding = encoding; + this.KeyEncoder = encoding.GetDynamicKeyEncoder(); + this.Keys = new DynamicKeys(this, this.KeyEncoder); + this.Partition = new DynamicPartition(this); + } + + /// Create a new subspace from a binary prefix + /// Prefix of the new subspace + /// Encoder that will be used by this subspace + internal DynamicKeySubspace(Slice prefix, [NotNull] IDynamicKeyEncoder encoder) + : base(prefix) + { + Contract.Requires(encoder != null); + this.Encoding = encoder.Encoding; + this.KeyEncoder = encoder; + this.Keys = new DynamicKeys(this, encoder); + this.Partition = new DynamicPartition(this); + } + + /// Return a view of all the possible binary keys of this subspace + public DynamicKeys Keys { get; } + + /// Return a view of all the possible binary keys of this subspace + public DynamicPartition Partition { get; } + + public Slice this[[NotNull] ITuple item] + { + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + get => this.Keys.Pack(item); + } + + } + + /// Key helper for a dynamic TypeSystem + [DebuggerDisplay("{Parent.ToString(),nq)}")] + public sealed class DynamicKeys + { + + /// Parent subspace + [NotNull] + private readonly DynamicKeySubspace Parent; + + /// Encoder used to format keys in this subspace + [NotNull] + public IDynamicKeyEncoder Encoder { get; } + + internal DynamicKeys(DynamicKeySubspace parent, IDynamicKeyEncoder encoder) + { + Contract.Requires(parent != null && encoder != null); + this.Parent = parent; + this.Encoder = encoder; + } + + /// Convert a tuple into a key of this subspace + /// Tuple that will be packed and appended to the subspace prefix + [Pure] + public Slice Pack([NotNull] TTuple tuple) + where TTuple : ITuple + { + Contract.NotNullAllowStructs(tuple, nameof(tuple)); + + var sw = this.Parent.OpenWriter(); + this.Encoder.PackKey(ref sw, tuple); + return sw.ToSlice(); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Slice Pack(ValueTuple items) + { + return Encode(items.Item1); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Slice Pack((T1, T2) items) + { + return Encode(items.Item1, items.Item2); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Slice Pack((T1, T2, T3) items) + { + return Encode(items.Item1, items.Item2, items.Item3); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Slice Pack((T1, T2, T3, T4) items) + { + return Encode(items.Item1, items.Item2, items.Item3, items.Item4); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Slice Pack((T1, T2, T3, T4, T5) items) + { + return Encode(items.Item1, items.Item2, items.Item3, items.Item4, items.Item5); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Slice Pack((T1, T2, T3, T4, T5, T6) items) + { + return Encode(items.Item1, items.Item2, items.Item3, items.Item4, items.Item5, items.Item6); + } + + /// Unpack a key of this subspace, back into a tuple + /// Key that was produced by a previous call to + /// Original tuple + public ITuple Unpack(Slice packedKey) + { + return this.Encoder.UnpackKey(this.Parent.ExtractKey(packedKey)); + } + + #region ToRange()... + + /// Return a key range that encompass all the keys inside this subspace, according to the current key encoder + public KeyRange ToRange() + { + return this.Encoder.ToRange(this.Parent.GetPrefix()); + } + + /// Return a key range that encompass all the keys inside a partition of this subspace, according to the current key encoder + /// Tuple used as a prefix for the range + public KeyRange ToRange([NotNull] ITuple tuple) + { + return this.Encoder.ToRange(this.Parent.GetPrefix(), tuple); + } + + public KeyRange ToRange(STuple tuple) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), tuple.Item1); + } + + public KeyRange ToRange(STuple tuple) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), tuple.Item1, tuple.Item2); + } + + public KeyRange ToRange(STuple tuple) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), tuple.Item1, tuple.Item2, tuple.Item3); + } + + public KeyRange ToRange(STuple tuple) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4); + } + + public KeyRange ToRange(STuple tuple) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4, tuple.Item5); + } + + public KeyRange ToRange(STuple tuple) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4, tuple.Item5); + } + + public KeyRange ToRange(ValueTuple tuple) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), tuple.Item1); + } + + public KeyRange ToRange((T1, T2) tuple) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), tuple.Item1, tuple.Item2); + } + + public KeyRange ToRange((T1, T2, T3) tuple) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), tuple.Item1, tuple.Item2, tuple.Item3); + } + + public KeyRange ToRange((T1, T2, T3, T4) tuple) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4); + } + + public KeyRange ToRange((T1, T2, T3, T4, T5) tuple) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4, tuple.Item5); + } + + public KeyRange ToRange((T1, T2, T3, T4, T5, T6) tuple) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4, tuple.Item5); + } + + #endregion + + #region ToKeyRange()... + + public KeyRange ToKeyRange(T1 item1) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), item1); + } + + public KeyRange ToKeyRange(T1 item1, T2 item2) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), item1, item2); + } + + public KeyRange ToKeyRange(T1 item1, T2 item2, T3 item3) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), item1, item2, item3); + } + + public KeyRange ToKeyRange(T1 item1, T2 item2, T3 item3, T4 item4) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), item1, item2, item3, item4); + } + + public KeyRange ToKeyRange(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), item1, item2, item3, item4, item5); + } + public KeyRange ToKeyRange(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), item1, item2, item3, item4, item5, item6); + } + public KeyRange ToKeyRange(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), item1, item2, item3, item4, item5, item6, item7); + } + public KeyRange ToKeyRange(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7, T8 item8) + { + return this.Encoder.ToKeyRange(this.Parent.GetPrefix(), item1, item2, item3, item4, item5, item6, item7, item8); + } + + #endregion + + #region Encode... + + /// Encode a key which is composed of a single element + public Slice Encode(T1 item1) + { + var sw = this.Parent.OpenWriter(); + this.Encoder.EncodeKey(ref sw, item1); + return sw.ToSlice(); + } + + /// Encode a batch of keys, each one composed of a single element + public Slice[] EncodeMany(IEnumerable items) + { + return Batched.Convert( + this.Parent.OpenWriter(), + items, + (ref SliceWriter writer, T item, IDynamicKeyEncoder encoder) => encoder.EncodeKey(ref writer, item), + this.Encoder + ); + } + + /// Encode a batch of keys, each one composed of a single value extracted from each elements + public Slice[] EncodeMany(IEnumerable items, Func selector) + { + return Batched.Convert( + this.Parent.OpenWriter(), + items, + (ref SliceWriter writer, TSource item, IDynamicKeyEncoder encoder) => encoder.EncodeKey(ref writer, selector(item)), + this.Encoder + ); + } + + /// Encode a key which is composed of a two elements + public Slice Encode(T1 item1, T2 item2) + { + var sw = this.Parent.OpenWriter(); + this.Encoder.EncodeKey(ref sw, item1, item2); + return sw.ToSlice(); + } + + /// Encode a key which is composed of three elements + public Slice Encode(T1 item1, T2 item2, T3 item3) + { + var sw = this.Parent.OpenWriter(); + this.Encoder.EncodeKey(ref sw, item1, item2, item3); + return sw.ToSlice(); + } + + /// Encode a key which is composed of four elements + public Slice Encode(T1 item1, T2 item2, T3 item3, T4 item4) + { + var sw = this.Parent.OpenWriter(); + this.Encoder.EncodeKey(ref sw, item1, item2, item3, item4); + return sw.ToSlice(); + } + + /// Encode a key which is composed of five elements + public Slice Encode(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) + { + var sw = this.Parent.OpenWriter(); + this.Encoder.EncodeKey(ref sw, item1, item2, item3, item4, item5); + return sw.ToSlice(); + } + + /// Encode a key which is composed of six elements + public Slice Encode(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6) + { + var sw = this.Parent.OpenWriter(); + this.Encoder.EncodeKey(ref sw, item1, item2, item3, item4, item5, item6); + return sw.ToSlice(); + } + + #endregion + + #region Decode... + + /// Decode a key of this subspace, composed of a single element + public T1 Decode(Slice packedKey) + { + return this.Encoder.DecodeKey(this.Parent.ExtractKey(packedKey)); + } + + /// Decode a key of this subspace, composed of exactly two elements + public STuple Decode(Slice packedKey) + { + return this.Encoder.DecodeKey(this.Parent.ExtractKey(packedKey)); + } + + /// Decode a key of this subspace, composed of exactly three elements + public STuple Decode(Slice packedKey) + { + return this.Encoder.DecodeKey(this.Parent.ExtractKey(packedKey)); + } + + /// Decode a key of this subspace, composed of exactly four elements + public STuple Decode(Slice packedKey) + { + return this.Encoder.DecodeKey(this.Parent.ExtractKey(packedKey)); + } + + /// Decode a key of this subspace, composed of exactly five elements + public STuple Decode(Slice packedKey) + { + return this.Encoder.DecodeKey(this.Parent.ExtractKey(packedKey)); + } + + public STuple Decode(Slice packedKey) + { + return this.Encoder.DecodeKey(this.Parent.ExtractKey(packedKey)); + } + + /// Decode a key of this subspace, and return only the first element without decoding the rest the key. + /// This method is faster than unpacking the complete key and reading only the first element. + public TFirst DecodeFirst(Slice packedKey) + { + return this.Encoder.DecodeKeyFirst(this.Parent.ExtractKey(packedKey)); + } + + /// Decode a key of this subspace, and return only the last element without decoding the rest. + /// This method is faster than unpacking the complete key and reading only the last element. + public TLast DecodeLast(Slice packedKey) + { + return this.Encoder.DecodeKeyLast(this.Parent.ExtractKey(packedKey)); + } + + #endregion + + /// Return a user-friendly string representation of a key of this subspace + public string Dump(Slice packedKey) + { + //TODO: defer to the encoding itself? + var key = this.Parent.ExtractKey(packedKey); + try + { + var tuple = TuPack.Unpack(key); + return tuple.ToString(); + } + catch (FormatException) + { + // this is not a tuple??? + } + return key.PrettyPrint(); + } + + } + + public sealed class DynamicPartition + { + + [NotNull] + public IDynamicKeySubspace Subspace { get; } + + + internal DynamicPartition([NotNull] DynamicKeySubspace subspace) + { + Contract.Requires(subspace != null); + this.Subspace = subspace; + } + + public IDynamicKeySubspace this[Slice binarySuffix] + { + [Pure, NotNull] + get => new DynamicKeySubspace(this.Subspace[binarySuffix], this.Subspace.Encoding); + } + + public IDynamicKeySubspace this[ITuple suffix] + { + [Pure, NotNull] + get => new DynamicKeySubspace(this.Subspace.Keys.Pack(suffix), this.Subspace.Encoding); + } + + /// Partition this subspace into a child subspace + /// Type of the child subspace key + /// Value of the child subspace + /// New subspace that is logically contained by the current subspace + /// Subspace([Foo, ]).Partition(Bar) is equivalent to Subspace([Foo, Bar, ]) + /// + /// new FdbSubspace(["Users", ]).Partition("Contacts") == new FdbSubspace(["Users", "Contacts", ]) + /// + [Pure, NotNull] + public IDynamicKeySubspace ByKey(T value) + { + return new DynamicKeySubspace(this.Subspace.Keys.Encode(value), this.Subspace.Encoding); + } + + /// Partition this subspace into a child subspace + /// Type of the first subspace key + /// Type of the second subspace key + /// Value of the first subspace key + /// Value of the second subspace key + /// New subspace that is logically contained by the current subspace + /// Subspace([Foo, ]).Partition(Bar, Baz) is equivalent to Subspace([Foo, Bar, Baz]) + /// + /// new FdbSubspace(["Users", ]).Partition("Contacts", "Friends") == new FdbSubspace(["Users", "Contacts", "Friends", ]) + /// + [Pure, NotNull] + public IDynamicKeySubspace ByKey(T1 value1, T2 value2) + { + return new DynamicKeySubspace(this.Subspace.Keys.Encode(value1, value2), this.Subspace.Encoding); + } + + /// Partition this subspace into a child subspace + /// Type of the first subspace key + /// Type of the second subspace key + /// Type of the third subspace key + /// Value of the first subspace key + /// Value of the second subspace key + /// Value of the third subspace key + /// New subspace that is logically contained by the current subspace + /// + /// new FdbSubspace(["Users", ]).Partition("John Smith", "Contacts", "Friends") == new FdbSubspace(["Users", "John Smith", "Contacts", "Friends", ]) + /// + [Pure, NotNull] + public IDynamicKeySubspace ByKey(T1 value1, T2 value2, T3 value3) + { + return new DynamicKeySubspace(this.Subspace.Keys.Encode(value1, value2, value3), this.Subspace.Encoding); + } + + /// Partition this subspace into a child subspace + /// Type of the first subspace key + /// Type of the second subspace key + /// Type of the third subspace key + /// Type of the fourth subspace key + /// Value of the first subspace key + /// Value of the second subspace key + /// Value of the third subspace key + /// Value of the fourth subspace key + /// New subspace that is logically contained by the current subspace + /// + /// new FdbSubspace(["Users", ]).Partition("John Smith", "Contacts", "Friends", "Messages") == new FdbSubspace(["Users", "John Smith", "Contacts", "Friends", "Messages", ]) + /// + [Pure, NotNull] + public IDynamicKeySubspace ByKey(T1 value1, T2 value2, T3 value3, T4 value4) + { + return new DynamicKeySubspace(this.Subspace.Keys.Encode(value1, value2, value3, value4), this.Subspace.Encoding); + } + + } + +} diff --git a/FoundationDB.Client/Subspaces/Fdb.Directory.cs b/FoundationDB.Client/Subspaces/Fdb.Directory.cs index 61b50b0d5..a4bd51168 100644 --- a/FoundationDB.Client/Subspaces/Fdb.Directory.cs +++ b/FoundationDB.Client/Subspaces/Fdb.Directory.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,31 +28,31 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Client { - using FoundationDB.Client.Utils; - using FoundationDB.Layers.Directories; - using FoundationDB.Linq; - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Threading; using System.Threading.Tasks; - using SystemIO = System.IO; + using Doxense.Linq; + using FoundationDB.Layers.Directories; + using JetBrains.Annotations; public static partial class Fdb { /// Static helper class to open named partitions + [PublicAPI] public static class Directory { /// Open a named partition of the default cluster /// Path of the named partition to open - /// Token used to cancel this operation + /// Token used to cancel this operation /// Returns a new database instance that will only be able to read and write inside the specified partition. If the partition does not exist, it will be automatically created - public static Task OpenNamedPartitionAsync([NotNull] IEnumerable path, CancellationToken cancellationToken) + [ItemNotNull] + public static Task OpenNamedPartitionAsync([NotNull] IEnumerable path, CancellationToken ct) { - return OpenNamedPartitionAsync(clusterFile: null, dbName: null, path: path, readOnly: false, cancellationToken: cancellationToken); + return OpenNamedPartitionAsync(clusterFile: null, dbName: null, path: path, readOnly: false, ct: ct); } /// Open a named partition of a specific cluster @@ -60,41 +60,42 @@ public static Task OpenNamedPartitionAsync([NotNull] IEnumerableName of the database, or "DB" if not specified. /// Path of the named partition to open /// If true, the database instance will only allow read operations - /// Token used to cancel this operation + /// Token used to cancel this operation /// Returns a new database instance that will only be able to read and write inside the specified partition. If the partition does not exist, it will be automatically created - public static async Task OpenNamedPartitionAsync(string clusterFile, string dbName, [NotNull] IEnumerable path, bool readOnly, CancellationToken cancellationToken) + [ItemNotNull] + public static async Task OpenNamedPartitionAsync(string clusterFile, string dbName, [NotNull] IEnumerable path, bool readOnly, CancellationToken ct) { - if (path == null) throw new ArgumentNullException("path"); + if (path == null) throw new ArgumentNullException(nameof(path)); var partitionPath = path.ToList(); - if (partitionPath.Count == 0) throw new ArgumentException("The path to the named partition cannot be empty", "path"); + if (partitionPath.Count == 0) throw new ArgumentException("The path to the named partition cannot be empty", nameof(path)); // looks at the global partition table for the specified named partition // By convention, all named databases will be under the "/Databases" folder FdbDatabase db = null; - var rootSpace = FdbSubspace.Empty; + var rootSpace = KeySubspace.Empty; try { - db = await Fdb.OpenInternalAsync(clusterFile, dbName, rootSpace, readOnly: false, cancellationToken: cancellationToken).ConfigureAwait(false); + db = (FdbDatabase) (await Fdb.OpenInternalAsync(clusterFile, dbName, rootSpace, readOnly: false, ct: ct).ConfigureAwait(false)); var rootLayer = FdbDirectoryLayer.Create(rootSpace); - if (Logging.On) Logging.Verbose(typeof(Fdb.Directory), "OpenNamedPartitionAsync", String.Format("Opened root layer of database {0} using cluster file '{1}'", db.Name, db.Cluster.Path)); + if (Logging.On) Logging.Verbose(typeof(Fdb.Directory), "OpenNamedPartitionAsync", $"Opened root layer of database {db.Name} using cluster file '{db.Cluster.Path}'"); // look up in the root layer for the named partition - var descriptor = await rootLayer.CreateOrOpenAsync(db, partitionPath, layer: FdbDirectoryPartition.LayerId, cancellationToken: cancellationToken).ConfigureAwait(false); - if (Logging.On) Logging.Verbose(typeof(Fdb.Directory), "OpenNamedPartitionAsync", String.Format("Found named partition '{0}' at prefix {1}", descriptor.FullName, descriptor)); + var descriptor = await rootLayer.CreateOrOpenAsync(db, partitionPath, layer: FdbDirectoryPartition.LayerId, ct: ct).ConfigureAwait(false); + if (Logging.On) Logging.Verbose(typeof(Fdb.Directory), "OpenNamedPartitionAsync", $"Found named partition '{descriptor.FullName}' at prefix {descriptor}"); // we have to chroot the database to the new prefix, and create a new DirectoryLayer with a new '/' - rootSpace = FdbSubspace.Copy(descriptor); //note: create a copy of the key + rootSpace = descriptor.Copy(); //note: create a copy of the key //TODO: find a nicer way to do that! db.ChangeRoot(rootSpace, FdbDirectoryLayer.Create(rootSpace, partitionPath), readOnly); - if (Logging.On) Logging.Info(typeof(Fdb.Directory), "OpenNamedPartitionAsync", String.Format("Opened partition {0} at {1}, using directory layer at {2}", descriptor.FullName, db.GlobalSpace, db.Directory.DirectoryLayer.NodeSubspace)); + if (Logging.On) Logging.Info(typeof(Fdb.Directory), "OpenNamedPartitionAsync", $"Opened partition {descriptor.FullName} at {db.GlobalSpace}, using directory layer at {db.Directory.DirectoryLayer.NodeSubspace}"); return db; } catch(Exception e) { - if (db != null) db.Dispose(); + db?.Dispose(); if (Logging.On) Logging.Exception(typeof(Fdb.Directory), "OpenNamedPartitionAsync", e); throw; } @@ -103,12 +104,13 @@ public static async Task OpenNamedPartitionAsync(string clusterFil /// List and open the sub-directories of the given directory /// Database used for the operation /// Parent directory - /// Token used to cancel this operation + /// Token used to cancel this operation /// Dictionary of all the sub directories of the directory. - public static async Task> BrowseAsync([NotNull] IFdbDatabase db, [NotNull] IFdbDirectory parent, CancellationToken cancellationToken) + [ItemNotNull] + public static async Task> BrowseAsync([NotNull] IFdbDatabase db, [NotNull] IFdbDirectory parent, CancellationToken ct) { - if (db == null) throw new ArgumentNullException("db"); - if (parent == null) throw new ArgumentNullException("parent"); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (parent == null) throw new ArgumentNullException(nameof(parent)); return await db.ReadAsync(async (tr) => { @@ -118,13 +120,13 @@ public static async Task> BrowseAsync([ // open all the subdirectories var folders = await names .ToAsyncEnumerable() - .SelectAsync((name, ct) => parent.OpenAsync(tr, name)) + .SelectAsync((name, _) => parent.OpenAsync(tr, name)) .ToListAsync(); // map the result return folders.ToDictionary(ds => ds.Name); - }, cancellationToken).ConfigureAwait(false); + }, ct).ConfigureAwait(false); } } diff --git a/FoundationDB.Client/Subspaces/FdbDatabasePartition.cs b/FoundationDB.Client/Subspaces/FdbDatabasePartition.cs index a53728466..5bf238fd0 100644 --- a/FoundationDB.Client/Subspaces/FdbDatabasePartition.cs +++ b/FoundationDB.Client/Subspaces/FdbDatabasePartition.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,89 +28,63 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Client { - using FoundationDB.Layers.Directories; - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.Diagnostics; using System.Threading; using System.Threading.Tasks; + using FoundationDB.Layers.Directories; + using JetBrains.Annotations; /// View of a database that is bound to a specific Partition [DebuggerDisplay("Database={Database.Name}, Partition={Directory.FullName}, Prefix={Database.GlobalSpace}")] public sealed class FdbDatabasePartition : IFdbDirectory { - private readonly IFdbDatabase m_database; - private readonly IFdbDirectory m_directory; /// Wrap an existing database with a root directory public FdbDatabasePartition(IFdbDatabase database, IFdbDirectory directory) { - if (database == null) throw new ArgumentNullException("database"); - if (directory == null) throw new ArgumentNullException("directory"); - - m_database = database; - m_directory = directory; + this.Database = database ?? throw new ArgumentNullException(nameof(database)); + this.Directory = directory ?? throw new ArgumentNullException(nameof(directory)); } /// Wrapped Directory instance - public IFdbDirectory Directory - { - [NotNull] - get { return m_directory; } - } + [NotNull] + public IFdbDirectory Directory { get; } /// Wrapped Database instance - public IFdbDatabase Database - { - [NotNull] - get { return m_database; } - } + [NotNull] + public IFdbDatabase Database { get; } /// Name of this Partition. /// This returns the last part of the path - public string Name - { - get { return m_directory.Name; } - } + public string Name => this.Directory.Name; /// Formatted path of this Partition /// This returns the formatted path, using '/' as the separator - public string FullName - { - [NotNull] - get { return m_directory.FullName; } - } + [NotNull] + public string FullName => this.Directory.FullName; /// Gets the path represented by this Partition. /// Returns an empty list for the root partition of the database, or a non empty list for a sub-partition - public IReadOnlyList Path - { - [NotNull] - get { return m_directory.Path; } - } + [NotNull] + public IReadOnlyList Path => this.Directory.Path; /// Get the DirectoryLayer that was used to create this partition. - public FdbDirectoryLayer DirectoryLayer - { - [NotNull] - get { return m_directory.DirectoryLayer; } - } + [NotNull] + public FdbDirectoryLayer DirectoryLayer => this.Directory.DirectoryLayer; #region Layer... /// Returns "partition" (ASCII) /// This should be equal to - public Slice Layer - { - get { return m_directory.Layer; } - } + public Slice Layer => this.Directory.Layer; void IFdbDirectory.CheckLayer(Slice layer) { if (layer.IsPresent && layer != this.Layer) { - throw new InvalidOperationException(String.Format("The directory {0} is a partition which is not compatible with layer {1}.", this.FullName, layer.ToAsciiOrHexaString())); + throw new InvalidOperationException($"The directory {this.FullName} is a partition which is not compatible with layer {layer:P}."); } } @@ -127,41 +101,41 @@ Task IFdbDirectory.ChangeLayerAsync(IFdbTransaction trans, /// If the subdirectory does not exist, it is created (creating intermediate subdirectories if necessary). /// If layer is specified, it is checked against the layer of an existing subdirectory or set as the layer of a new subdirectory. /// - public Task CreateOrOpenAsync([NotNull] string name, CancellationToken cancellationToken) + public Task CreateOrOpenAsync([NotNull] string name, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.CreateOrOpenAsync(tr, new [] { name }, Slice.Nil), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.CreateOrOpenAsync(tr, new [] { name }, Slice.Nil), ct); } /// Opens a subdirectory with the given path. /// If the subdirectory does not exist, it is created (creating intermediate subdirectories if necessary). /// If layer is specified, it is checked against the layer of an existing subdirectory or set as the layer of a new subdirectory. /// - public Task CreateOrOpenAsync([NotNull] string name, Slice layer, CancellationToken cancellationToken) + public Task CreateOrOpenAsync([NotNull] string name, Slice layer, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.CreateOrOpenAsync(tr, new[] { name }, layer), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.CreateOrOpenAsync(tr, new[] { name }, layer), ct); } /// Opens a subdirectory with the given path. /// If the subdirectory does not exist, it is created (creating intermediate subdirectories if necessary). /// If layer is specified, it is checked against the layer of an existing subdirectory or set as the layer of a new subdirectory. /// - public Task CreateOrOpenAsync([NotNull] IEnumerable path, CancellationToken cancellationToken) + public Task CreateOrOpenAsync([NotNull] IEnumerable path, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.CreateOrOpenAsync(tr, path, Slice.Nil), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.CreateOrOpenAsync(tr, path, Slice.Nil), ct); } /// Opens a subdirectory with the given path. /// If the subdirectory does not exist, it is created (creating intermediate subdirectories if necessary). /// If layer is specified, it is checked against the layer of an existing subdirectory or set as the layer of a new subdirectory. /// - public Task CreateOrOpenAsync([NotNull] IEnumerable path, Slice layer, CancellationToken cancellationToken) + public Task CreateOrOpenAsync([NotNull] IEnumerable path, Slice layer, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.CreateOrOpenAsync(tr, path, layer), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.CreateOrOpenAsync(tr, path, layer), ct); } Task IFdbDirectory.CreateOrOpenAsync(IFdbTransaction trans, IEnumerable subPath, Slice layer) { - return m_directory.CreateOrOpenAsync(trans, subPath, layer); + return this.Directory.CreateOrOpenAsync(trans, subPath, layer); } #endregion @@ -172,9 +146,9 @@ Task IFdbDirectory.CreateOrOpenAsync(IFdbTransaction trans /// An exception is thrown if the subdirectory does not exist, or if a layer is specified and a different layer was specified when the subdirectory was created. /// /// Name of the subdirectory to open - public Task OpenAsync([NotNull] string name, CancellationToken cancellationToken) + public Task OpenAsync([NotNull] string name, CancellationToken ct) { - return m_database.ReadAsync((tr) => m_directory.OpenAsync(tr, new [] { name }, Slice.Nil), cancellationToken); + return this.Database.ReadAsync((tr) => this.Directory.OpenAsync(tr, new [] { name }, Slice.Nil), ct); } /// Opens a subdirectory with the given . @@ -182,18 +156,18 @@ public Task OpenAsync([NotNull] string name, CancellationT /// /// Name of the subdirectory to open /// Expected layer id for the subdirectory (optional) - public Task OpenAsync([NotNull] string name, Slice layer, CancellationToken cancellationToken) + public Task OpenAsync([NotNull] string name, Slice layer, CancellationToken ct) { - return m_database.ReadAsync((tr) => m_directory.OpenAsync(tr, new[] { name }, layer), cancellationToken); + return this.Database.ReadAsync((tr) => this.Directory.OpenAsync(tr, new[] { name }, layer), ct); } /// Opens a subdirectory with the given . /// An exception is thrown if the subdirectory does not exist, or if a layer is specified and a different layer was specified when the subdirectory was created. /// /// Relative path of the subdirectory to open - public Task OpenAsync([NotNull] IEnumerable path, CancellationToken cancellationToken) + public Task OpenAsync([NotNull] IEnumerable path, CancellationToken ct) { - return m_database.ReadAsync((tr) => m_directory.OpenAsync(tr, path, Slice.Nil), cancellationToken); + return this.Database.ReadAsync((tr) => this.Directory.OpenAsync(tr, path, Slice.Nil), ct); } /// Opens a subdirectory with the given . @@ -201,14 +175,14 @@ public Task OpenAsync([NotNull] IEnumerable path, /// /// Relative path of the subdirectory to open /// Expected layer id for the subdirectory (optional) - public Task OpenAsync([NotNull] IEnumerable path, Slice layer, CancellationToken cancellationToken) + public Task OpenAsync([NotNull] IEnumerable path, Slice layer, CancellationToken ct) { - return m_database.ReadAsync((tr) => m_directory.OpenAsync(tr, path, layer), cancellationToken); + return this.Database.ReadAsync((tr) => this.Directory.OpenAsync(tr, path, layer), ct); } Task IFdbDirectory.OpenAsync(IFdbReadOnlyTransaction trans, IEnumerable path, Slice layer) { - return m_directory.OpenAsync(trans, path, layer); + return this.Directory.OpenAsync(trans, path, layer); } #endregion @@ -220,9 +194,9 @@ Task IFdbDirectory.OpenAsync(IFdbReadOnlyTransaction trans /// /// Name of the subdirectory to open /// Returns the directory if it exists, or null if it was not found - public Task TryOpenAsync([NotNull] string name, CancellationToken cancellationToken) + public Task TryOpenAsync([NotNull] string name, CancellationToken ct) { - return m_database.ReadAsync((tr) => m_directory.TryOpenAsync(tr, new [] { name }, Slice.Nil), cancellationToken); + return this.Database.ReadAsync((tr) => this.Directory.TryOpenAsync(tr, new [] { name }, Slice.Nil), ct); } /// Opens a subdirectory with the given . @@ -231,9 +205,9 @@ public Task TryOpenAsync([NotNull] string name, Cancellati /// Name of the subdirectory to open /// Expected layer id for the subdirectory (optional) /// Returns the directory if it exists, or null if it was not found - public Task TryOpenAsync([NotNull] string name, Slice layer, CancellationToken cancellationToken) + public Task TryOpenAsync([NotNull] string name, Slice layer, CancellationToken ct) { - return m_database.ReadAsync((tr) => m_directory.TryOpenAsync(tr, new[] { name }, layer), cancellationToken); + return this.Database.ReadAsync((tr) => this.Directory.TryOpenAsync(tr, new[] { name }, layer), ct); } /// Opens a subdirectory with the given . @@ -241,9 +215,9 @@ public Task TryOpenAsync([NotNull] string name, Slice laye /// /// Relative path of the subdirectory to open /// Returns the directory if it exists, or null if it was not found - public Task TryOpenAsync([NotNull] IEnumerable path, CancellationToken cancellationToken) + public Task TryOpenAsync([NotNull] IEnumerable path, CancellationToken ct) { - return m_database.ReadAsync((tr) => m_directory.TryOpenAsync(tr, path, Slice.Nil), cancellationToken); + return this.Database.ReadAsync((tr) => this.Directory.TryOpenAsync(tr, path, Slice.Nil), ct); } /// Opens a subdirectory with the given . @@ -252,72 +226,72 @@ public Task TryOpenAsync([NotNull] IEnumerable pat /// Relative path of the subdirectory to open /// Expected layer id for the subdirectory (optional) /// Returns the directory if it exists, or null if it was not found - public Task TryOpenAsync([NotNull] IEnumerable path, Slice layer, CancellationToken cancellationToken) + public Task TryOpenAsync([NotNull] IEnumerable path, Slice layer, CancellationToken ct) { - return m_database.ReadAsync((tr) => m_directory.TryOpenAsync(tr, path, layer), cancellationToken); + return this.Database.ReadAsync((tr) => this.Directory.TryOpenAsync(tr, path, layer), ct); } Task IFdbDirectory.TryOpenAsync(IFdbReadOnlyTransaction trans, IEnumerable path, Slice layer) { - return m_directory.TryOpenAsync(trans, path, layer); + return this.Directory.TryOpenAsync(trans, path, layer); } #endregion #region Create... - public Task CreateAsync([NotNull] string name, CancellationToken cancellationToken) + public Task CreateAsync([NotNull] string name, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.CreateAsync(tr, new[] { name }, Slice.Nil), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.CreateAsync(tr, new[] { name }, Slice.Nil), ct); } - public Task CreateAsync([NotNull] string name, Slice layer, CancellationToken cancellationToken) + public Task CreateAsync([NotNull] string name, Slice layer, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.CreateAsync(tr, new [] { name }, layer), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.CreateAsync(tr, new [] { name }, layer), ct); } - public Task CreateAsync([NotNull] IEnumerable path, CancellationToken cancellationToken) + public Task CreateAsync([NotNull] IEnumerable path, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.CreateAsync(tr, path, Slice.Nil), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.CreateAsync(tr, path, Slice.Nil), ct); } - public Task CreateAsync([NotNull] IEnumerable path, Slice layer, CancellationToken cancellationToken) + public Task CreateAsync([NotNull] IEnumerable path, Slice layer, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.CreateAsync(tr, path, layer), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.CreateAsync(tr, path, layer), ct); } Task IFdbDirectory.CreateAsync(IFdbTransaction trans, IEnumerable path, Slice layer) { - return m_directory.CreateAsync(trans, path, layer); + return this.Directory.CreateAsync(trans, path, layer); } #endregion #region TryCreate... - public Task TryCreateAsync([NotNull] string name, CancellationToken cancellationToken) + public Task TryCreateAsync([NotNull] string name, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.TryCreateAsync(tr, new [] { name }, Slice.Nil), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.TryCreateAsync(tr, new [] { name }, Slice.Nil), ct); } - public Task TryCreateAsync([NotNull] string name, Slice layer, CancellationToken cancellationToken) + public Task TryCreateAsync([NotNull] string name, Slice layer, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.TryCreateAsync(tr, new[] { name }, layer), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.TryCreateAsync(tr, new[] { name }, layer), ct); } - public Task TryCreateAsync([NotNull] IEnumerable path, CancellationToken cancellationToken) + public Task TryCreateAsync([NotNull] IEnumerable path, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.TryCreateAsync(tr, path, Slice.Nil), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.TryCreateAsync(tr, path, Slice.Nil), ct); } - public Task TryCreateAsync([NotNull] IEnumerable path, Slice layer, CancellationToken cancellationToken) + public Task TryCreateAsync([NotNull] IEnumerable path, Slice layer, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.TryCreateAsync(tr, path, layer), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.TryCreateAsync(tr, path, layer), ct); } Task IFdbDirectory.TryCreateAsync(IFdbTransaction trans, IEnumerable path, Slice layer) { - return m_directory.TryCreateAsync(trans, path, layer); + return this.Directory.TryCreateAsync(trans, path, layer); } #endregion @@ -328,51 +302,51 @@ Task IFdbDirectory.TryCreateAsync(IFdbTransaction trans, I /// Name of the directory to create /// If is specified, it is recorded with the directory and will be checked by future calls to open. /// The directory will be created with the given physical prefix; otherwise a prefix is allocated automatically. - public Task RegisterAsync([NotNull] string name, Slice layer, Slice prefix, CancellationToken cancellationToken) + public Task RegisterAsync([NotNull] string name, Slice layer, Slice prefix, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.RegisterAsync(tr, new[] { name }, layer, prefix), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.RegisterAsync(tr, new[] { name }, layer, prefix), ct); } /// Registers an existing prefix as a directory with the given (creating parent directories if necessary). This method is only indented for advanced use cases. /// Path of the directory to create /// If is specified, it is recorded with the directory and will be checked by future calls to open. /// The directory will be created with the given physical prefix; otherwise a prefix is allocated automatically. - public Task RegisterAsync([NotNull] IEnumerable path, Slice layer, Slice prefix, CancellationToken cancellationToken) + public Task RegisterAsync([NotNull] IEnumerable path, Slice layer, Slice prefix, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.RegisterAsync(tr, path, layer, prefix), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.RegisterAsync(tr, path, layer, prefix), ct); } Task IFdbDirectory.RegisterAsync(IFdbTransaction trans, IEnumerable path, Slice layer, Slice prefix) { - return m_directory.RegisterAsync(trans, path, layer, prefix); + return this.Directory.RegisterAsync(trans, path, layer, prefix); } #endregion #region Move... - public Task MoveAsync([NotNull] IEnumerable oldPath, [NotNull] IEnumerable newPath, CancellationToken cancellationToken) + public Task MoveAsync([NotNull] IEnumerable oldPath, [NotNull] IEnumerable newPath, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.MoveAsync(tr, oldPath, newPath), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.MoveAsync(tr, oldPath, newPath), ct); } Task IFdbDirectory.MoveAsync(IFdbTransaction trans, IEnumerable oldPath, IEnumerable newPath) { - return m_directory.MoveAsync(trans, oldPath, newPath); + return this.Directory.MoveAsync(trans, oldPath, newPath); } #endregion #region TryMove... - public Task TryMoveAsync([NotNull] IEnumerable oldPath, [NotNull] IEnumerable newPath, CancellationToken cancellationToken) + public Task TryMoveAsync([NotNull] IEnumerable oldPath, [NotNull] IEnumerable newPath, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.TryMoveAsync(tr, oldPath, newPath), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.TryMoveAsync(tr, oldPath, newPath), ct); } Task IFdbDirectory.TryMoveAsync(IFdbTransaction trans, IEnumerable oldPath, IEnumerable newPath) { - return m_directory.TryMoveAsync(trans, oldPath, newPath); + return this.Directory.TryMoveAsync(trans, oldPath, newPath); } #endregion @@ -397,57 +371,57 @@ public Task TryMoveToAsync(IFdbTransaction trans, IEnumera #region Remove... - public Task RemoveAsync([NotNull] string name, CancellationToken cancellationToken) + public Task RemoveAsync([NotNull] string name, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.RemoveAsync(tr, new string[] { name }), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.RemoveAsync(tr, new string[] { name }), ct); } - public Task RemoveAsync(IEnumerable path, CancellationToken cancellationToken) + public Task RemoveAsync(IEnumerable path, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.RemoveAsync(tr, path), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.RemoveAsync(tr, path), ct); } Task IFdbDirectory.RemoveAsync(IFdbTransaction trans, IEnumerable path) { - return m_directory.RemoveAsync(trans, path); + return this.Directory.RemoveAsync(trans, path); } #endregion #region TryRemove... - public Task TryRemoveAsync([NotNull] string name, CancellationToken cancellationToken) + public Task TryRemoveAsync([NotNull] string name, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.TryRemoveAsync(tr, new string[] { name }), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.TryRemoveAsync(tr, new [] { name }), ct); } - public Task TryRemoveAsync(IEnumerable path, CancellationToken cancellationToken) + public Task TryRemoveAsync(IEnumerable path, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.TryRemoveAsync(tr, path), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.TryRemoveAsync(tr, path), ct); } Task IFdbDirectory.TryRemoveAsync(IFdbTransaction trans, IEnumerable path) { - return m_directory.TryRemoveAsync(trans, path); + return this.Directory.TryRemoveAsync(trans, path); } #endregion #region Exists... - public Task ExistsAsync([NotNull] string name, CancellationToken cancellationToken) + public Task ExistsAsync([NotNull] string name, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.ExistsAsync(tr, new string[] { name }), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.ExistsAsync(tr, new [] { name }), ct); } - public Task ExistsAsync(IEnumerable path, CancellationToken cancellationToken) + public Task ExistsAsync(IEnumerable path, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.ExistsAsync(tr, path), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.ExistsAsync(tr, path), ct); } Task IFdbDirectory.ExistsAsync(IFdbReadOnlyTransaction trans, IEnumerable path) { - return m_directory.ExistsAsync(trans, path); + return this.Directory.ExistsAsync(trans, path); } #endregion @@ -455,26 +429,26 @@ Task IFdbDirectory.ExistsAsync(IFdbReadOnlyTransaction trans, IEnumerable< #region List... /// Returns the list of all the top level directories of this database instance. - public Task> ListAsync(CancellationToken cancellationToken) + public Task> ListAsync(CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.ListAsync(tr), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.ListAsync(tr), ct); } /// Returns the list of all the top level directories of this database instance. - public Task> ListAsync([NotNull] string name, CancellationToken cancellationToken) + public Task> ListAsync([NotNull] string name, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.ListAsync(tr, new string[] { name }), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.ListAsync(tr, new string[] { name }), ct); } /// Returns the list of all the top level directories of this database instance. - public Task> ListAsync(IEnumerable path, CancellationToken cancellationToken) + public Task> ListAsync(IEnumerable path, CancellationToken ct) { - return m_database.ReadWriteAsync((tr) => m_directory.ListAsync(tr, path), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.ListAsync(tr, path), ct); } Task> IFdbDirectory.ListAsync(IFdbReadOnlyTransaction trans, IEnumerable path) { - return m_directory.ListAsync(trans, path); + return this.Directory.ListAsync(trans, path); } #endregion @@ -482,32 +456,32 @@ Task> IFdbDirectory.ListAsync(IFdbReadOnlyTransaction trans, IEnume #region TryList... /// Returns the list of all the top level directories of this database instance. - public Task> TryListAsync(CancellationToken cancellationToken) + public Task> TryListAsync(CancellationToken ct) { //REVIEW: is it possible for this method to fail on a top-level db partition? // => it not, should be removed because it is a duplicate of ListAsync(..) - return m_database.ReadWriteAsync((tr) => m_directory.TryListAsync(tr), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.TryListAsync(tr), ct); } /// Returns the list of all the top level directories of this database instance. - public Task> TryListAsync([NotNull] string name, CancellationToken cancellationToken) + public Task> TryListAsync([NotNull] string name, CancellationToken ct) { //REVIEW: is it possible for this method to fail on a top-level db partition? // => it not, should be removed because it is a duplicate of ListAsync(..) - return m_database.ReadWriteAsync((tr) => m_directory.TryListAsync(tr, new string[] { name }), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.TryListAsync(tr, new string[] { name }), ct); } /// Returns the list of all the top level directories of this database instance. - public Task> TryListAsync(IEnumerable path, CancellationToken cancellationToken) + public Task> TryListAsync(IEnumerable path, CancellationToken ct) { //REVIEW: is it possible for this method to fail on a top-level db partition? // => it not, should be removed because it is a duplicate of ListAsync(..) - return m_database.ReadWriteAsync((tr) => m_directory.TryListAsync(tr, path), cancellationToken); + return this.Database.ReadWriteAsync((tr) => this.Directory.TryListAsync(tr, path), ct); } Task> IFdbDirectory.TryListAsync(IFdbReadOnlyTransaction trans, IEnumerable path) { - return m_directory.TryListAsync(trans, path); + return this.Directory.TryListAsync(trans, path); } #endregion diff --git a/FoundationDB.Client/Subspaces/FdbDynamicSubspace.cs b/FoundationDB.Client/Subspaces/FdbDynamicSubspace.cs deleted file mode 100644 index 610e1a430..000000000 --- a/FoundationDB.Client/Subspaces/FdbDynamicSubspace.cs +++ /dev/null @@ -1,80 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -using System; -using System.Diagnostics; -using JetBrains.Annotations; - -namespace FoundationDB.Client -{ - public class FdbDynamicSubspace : FdbSubspace, IFdbDynamicSubspace - { - /// Encoder for the keys of this subspace - private readonly IDynamicKeyEncoder m_encoder; - - /// Create a new subspace from a binary prefix - /// Prefix of the new subspace - /// If true, take a copy of the prefix - /// Type System used to encode keys in this subspace (optional, will use Tuple Encoding by default) - internal FdbDynamicSubspace(Slice rawPrefix, bool copy, IDynamicKeyEncoder encoder) - : base (rawPrefix, copy) - { - this.m_encoder = encoder ?? TypeSystem.Default.GetDynamicEncoder(); - } - - public FdbDynamicSubspace(Slice rawPrefix, IDynamicKeyEncoder encoder) - : this(rawPrefix, true, encoder) - { } - - protected override IFdbSubspace CreateChildren(Slice suffix) - { - return new FdbDynamicSubspace(ConcatKey(suffix), m_encoder); - } - - public IDynamicKeyEncoder Encoder - { - get { return m_encoder; } - } - - /// Return a view of all the possible binary keys of this subspace - public FdbDynamicSubspaceKeys Keys - { - [DebuggerStepThrough] - get { return new FdbDynamicSubspaceKeys(this, m_encoder); } - } - - /// Returns an helper object that knows how to create sub-partitions of this subspace - public FdbDynamicSubspacePartition Partition - { - //note: not cached, because this is probably not be called frequently (except in the init path) - [DebuggerStepThrough] - get { return new FdbDynamicSubspacePartition(this, m_encoder); } - } - - } -} \ No newline at end of file diff --git a/FoundationDB.Client/Subspaces/FdbDynamicSubspaceKeys.cs b/FoundationDB.Client/Subspaces/FdbDynamicSubspaceKeys.cs deleted file mode 100644 index 271f98355..000000000 --- a/FoundationDB.Client/Subspaces/FdbDynamicSubspaceKeys.cs +++ /dev/null @@ -1,600 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -using System; -using System.Collections; -using System.Collections.Generic; -using System.Linq; -using FoundationDB.Client.Utils; -using FoundationDB.Layers.Tuples; -using JetBrains.Annotations; - -namespace FoundationDB.Client -{ - - internal static class Batched - { - - public delegate void Handler(ref SliceWriter writer, TValue item, TState state); - - [NotNull] - public static Slice[] Convert(SliceWriter writer, [NotNull, ItemNotNull] IEnumerable values, Handler handler, TState state) - { - Contract.Requires(values != null && handler != null); - - //Note on performance: - // - we will reuse the same buffer for each temp key, and copy them into a slice buffer - // - doing it this way adds a memory copy (writer => buffer) but reduce the number of byte[] allocations (and reduce the GC overhead) - - int start = writer.Position; - - var buffer = new SliceBuffer(); - - var coll = values as ICollection; - if (coll != null) - { // pre-allocate the final array with the correct size - var res = new Slice[coll.Count]; - int p = 0; - foreach (var tuple in coll) - { - // reset position to just after the subspace prefix - writer.Position = start; - - handler(ref writer, tuple, state); - - // copy full key in the buffer - res[p++] = buffer.Intern(writer.ToSlice()); - } - Contract.Assert(p == res.Length); - return res; - } - else - { // we won't now the array size until the end... - var res = new List(); - foreach (var tuple in values) - { - // reset position to just after the subspace prefix - writer.Position = start; - - handler(ref writer, tuple, state); - - // copy full key in the buffer - res.Add(buffer.Intern(writer.ToSlice())); - } - return res.ToArray(); - } - } - } - - /// Key helper for a dynamic TypeSystem - public struct FdbDynamicSubspaceKeys - { - //NOTE: everytime an IFdbTuple is used here, it is as a container (vector of objects), and NOT as the Tuple Encoding scheme ! (separate concept) - - /// Parent subspace - [NotNull] - public readonly IFdbSubspace Subspace; - - /// Encoder used to format keys in this subspace - [NotNull] - public readonly IDynamicKeyEncoder Encoder; - - public FdbDynamicSubspaceKeys([NotNull] IFdbSubspace subspace, [NotNull] IDynamicKeyEncoder encoder) - { - Contract.Requires(subspace != null && encoder != null); - this.Subspace = subspace; - this.Encoder = encoder; - } - - /// Return a key range that encompass all the keys inside this subspace, according to the current key encoder - public FdbKeyRange ToRange() - { - return this.Encoder.ToRange(this.Subspace.Key); - } - - /// Return a key range that encompass all the keys inside a partition of this subspace, according to the current key encoder - /// Tuple used as a prefix for the range - public FdbKeyRange ToRange([NotNull] IFdbTuple tuple) - { - return this.Encoder.ToRange(Pack(tuple)); - } - - /// Return a key range that encompass all the keys inside a partition of this subspace, according to the current key encoder - /// Convertible item used as a prefix for the range - public FdbKeyRange ToRange([NotNull] ITupleFormattable item) - { - return this.Encoder.ToRange(Pack(item)); - } - - /// Convert a tuple into a key of this subspace - /// Tuple that will be packed and appended to the subspace prefix - /// This is a shortcut for - public Slice this[[NotNull] IFdbTuple tuple] - { - get { return Pack(tuple); } - } - - /// Convert an item into a key of this subspace - /// Convertible item that will be packed and appended to the subspace prefix - /// This is a shortcut for - public Slice this[[NotNull] ITupleFormattable item] - { - get { return Pack(item); } - } - - /// Convert a tuple into a key of this subspace - /// Tuple that will be packed and appended to the subspace prefix - public Slice Pack([NotNull] IFdbTuple tuple) - { - if (tuple == null) throw new ArgumentNullException("tuple"); - - var writer = this.Subspace.GetWriter(); - this.Encoder.PackKey(ref writer, tuple); - return writer.ToSlice(); - } - - /// Convert a batch of tuples into keys of this subspace, in an optimized way. - /// Sequence of tuple that will be packed and appended to the subspace prefix - public Slice[] PackMany([NotNull, ItemNotNull] IEnumerable tuples) - { - if (tuples == null) throw new ArgumentNullException("tuples"); - - return Batched.Convert( - this.Subspace.GetWriter(), - tuples, - (ref SliceWriter writer, IFdbTuple tuple, IDynamicKeyEncoder encoder) => encoder.PackKey(ref writer, tuple), - this.Encoder - ); - } - - /// Convert an item into a key of this subspace - /// Convertible item that will be packed and appended to the subspace prefix - public Slice Pack([NotNull] ITupleFormattable item) - { - if (item == null) throw new ArgumentNullException("item"); - - return Pack(item.ToTuple()); - } - - /// Convert a batch of items into keys of this subspace, in an optimized way. - /// Sequence of convertible items that will be packed and appended to the subspace prefix - public Slice[] PackMany([NotNull, ItemNotNull] IEnumerable items) - { - if (items == null) throw new ArgumentNullException("items"); - - return Batched.Convert( - this.Subspace.GetWriter(), - items.Select(item => item.ToTuple()), - (ref SliceWriter writer, IFdbTuple tuple, IDynamicKeyEncoder encoder) => encoder.PackKey(ref writer, tuple), - this.Encoder - ); - } - - /// Encode a key which is composed of a single element - public Slice Encode(T item1) - { - var writer = this.Subspace.GetWriter(); - this.Encoder.EncodeKey(ref writer, item1); - return writer.ToSlice(); - } - - /// Encode a batch of keys, each one composed of a single element - public Slice[] EncodeMany(IEnumerable items) - { - return Batched.Convert( - this.Subspace.GetWriter(), - items, - (ref SliceWriter writer, T item, IDynamicKeyEncoder encoder) => encoder.EncodeKey(ref writer, item), - this.Encoder - ); - } - - /// Encode a batch of keys, each one composed of a single value extracted from each elements - public Slice[] EncodeMany(IEnumerable items, Func selector) - { - return Batched.Convert( - this.Subspace.GetWriter(), - items, - (ref SliceWriter writer, TSource item, IDynamicKeyEncoder encoder) => encoder.EncodeKey(ref writer, selector(item)), - this.Encoder - ); - } - - /// Encode a key which is composed of a two elements - public Slice Encode(T1 item1, T2 item2) - { - var writer = this.Subspace.GetWriter(); - this.Encoder.EncodeKey(ref writer, item1, item2); - return writer.ToSlice(); - } - - /// Encode a batch of keys, each one composed of two values extracted from each elements - public Slice[] EncodeMany(IEnumerable items, Func selector1, Func selector2) - { - return Batched.Convert( - this.Subspace.GetWriter(), - items, - (ref SliceWriter writer, TItem item, IDynamicKeyEncoder encoder) => encoder.EncodeKey(ref writer, selector1(item), selector2(item)), - this.Encoder - ); - } - - /// Encode a key which is composed of three elements - public Slice Encode(T1 item1, T2 item2, T3 item3) - { - var writer = this.Subspace.GetWriter(); - this.Encoder.EncodeKey(ref writer, item1, item2, item3); - return writer.ToSlice(); - } - - /// Encode a batch of keys, each one composed of three values extracted from each elements - public Slice[] EncodeMany(IEnumerable items, Func selector1, Func selector2, Func selector3) - { - return Batched.Convert( - this.Subspace.GetWriter(), - items, - (ref SliceWriter writer, TItem item, IDynamicKeyEncoder encoder) => encoder.EncodeKey(ref writer, selector1(item), selector2(item), selector3(item)), - this.Encoder - ); - } - - /// Encode a key which is composed of four elements - public Slice Encode(T1 item1, T2 item2, T3 item3, T4 item4) - { - var writer = this.Subspace.GetWriter(); - this.Encoder.EncodeKey(ref writer, item1, item2, item3, item4); - return writer.ToSlice(); - } - - /// Encode a batch of keys, each one composed of four values extracted from each elements - public Slice[] EncodeMany(IEnumerable items, Func selector1, Func selector2, Func selector3, Func selector4) - { - return Batched.Convert( - this.Subspace.GetWriter(), - items, - (ref SliceWriter writer, TItem item, IDynamicKeyEncoder encoder) => encoder.EncodeKey(ref writer, selector1(item), selector2(item), selector3(item), selector4(item)), - this.Encoder - ); - } - - /// Encode a key which is composed of five elements - public Slice Encode(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) - { - var writer = this.Subspace.GetWriter(); - this.Encoder.EncodeKey(ref writer, item1, item2, item3, item4, item5); - return writer.ToSlice(); - } - - /// Encode a batch of keys, each one composed of five values extracted from each elements - public Slice[] EncodeMany(IEnumerable items, Func selector1, Func selector2, Func selector3, Func selector4, Func selector5) - { - return Batched.Convert( - this.Subspace.GetWriter(), - items, - (ref SliceWriter writer, TItem item, IDynamicKeyEncoder encoder) => encoder.EncodeKey(ref writer, selector1(item), selector2(item), selector3(item), selector4(item), selector5(item)), - this.Encoder - ); - } - - /// Encode a key which is composed of six elements - public Slice Encode(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6) - { - var writer = this.Subspace.GetWriter(); - this.Encoder.EncodeKey(ref writer, item1, item2, item3, item4, item5, item6); - return writer.ToSlice(); - } - - /// Encode a batch of keys, each one composed of six values extracted from each elements - public Slice[] EncodeMany(IEnumerable items, Func selector1, Func selector2, Func selector3, Func selector4, Func selector5, Func selector6) - { - return Batched.Convert( - this.Subspace.GetWriter(), - items, - (ref SliceWriter writer, TItem item, IDynamicKeyEncoder encoder) => encoder.EncodeKey(ref writer, selector1(item), selector2(item), selector3(item), selector4(item), selector5(item), selector6(item)), - this.Encoder - ); - } - - /// Encode a key which is composed of seven elements - public Slice Encode(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7) - { - var writer = this.Subspace.GetWriter(); - this.Encoder.EncodeKey(ref writer, item1, item2, item3, item4, item5, item6, item7); - return writer.ToSlice(); - } - - /// Encode a batch of keys, each one composed of seven values extracted from each elements - public Slice[] EncodeMany(IEnumerable items, Func selector1, Func selector2, Func selector3, Func selector4, Func selector5, Func selector6, Func selector7) - { - return Batched.Convert( - this.Subspace.GetWriter(), - items, - (ref SliceWriter writer, TItem item, IDynamicKeyEncoder encoder) => encoder.EncodeKey(ref writer, selector1(item), selector2(item), selector3(item), selector4(item), selector5(item), selector6(item), selector7(item)), - this.Encoder - ); - } - - /// Encode a key which is composed of eight elements - public Slice Encode(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7, T8 item8) - { - var writer = this.Subspace.GetWriter(); - this.Encoder.EncodeKey(ref writer, item1, item2, item3, item4, item5, item6, item7, item8); - return writer.ToSlice(); - } - - /// Encode a batch of keys, each one composed of eight values extracted from each elements - public Slice[] EncodeMany(IEnumerable items, Func selector1, Func selector2, Func selector3, Func selector4, Func selector5, Func selector6, Func selector7, Func selector8) - { - return Batched.Convert( - this.Subspace.GetWriter(), - items, - (ref SliceWriter writer, TItem item, IDynamicKeyEncoder encoder) => encoder.EncodeKey(ref writer, selector1(item), selector2(item), selector3(item), selector4(item), selector5(item), selector6(item), selector7(item), selector8(item)), - this.Encoder - ); - } - - /// Unpack a key of this subspace, back into a tuple - /// Key that was produced by a previous call to - /// Original tuple - public IFdbTuple Unpack(Slice packed) - { - return this.Encoder.UnpackKey(this.Subspace.ExtractKey(packed)); - } - - private static T[] BatchDecode(IEnumerable packed, IFdbSubspace subspace, IDynamicKeyEncoder encoder, Func decode) - { - var coll = packed as ICollection; - if (coll != null) - { - var res = new T[coll.Count]; - int p = 0; - foreach (var data in packed) - { - res[p++] = decode(subspace.ExtractKey(data), encoder); - } - Contract.Assert(p == res.Length); - return res; - } - else - { - var res = new List(); - foreach (var data in packed) - { - res.Add(decode(subspace.ExtractKey(data), encoder)); - } - return res.ToArray(); - } - } - - /// Unpack a batch of keys of this subspace, back into an array of tuples - /// Sequence of keys that were produced by a previous call to or - /// Array containing the original tuples - public IFdbTuple[] UnpackMany(IEnumerable packed) - { - return BatchDecode(packed, this.Subspace, this.Encoder, (data, encoder) => encoder.UnpackKey(data)); - } - - /// Decode a key of this subspace, composed of a single element - public T1 Decode(Slice packed) - { - return this.Encoder.DecodeKey(this.Subspace.ExtractKey(packed)); - } - - /// Decode a batch of keys of this subspace, each one composed of a single element - public IEnumerable DecodeMany(IEnumerable packed) - { - return BatchDecode(packed, this.Subspace, this.Encoder, (data, encoder) => encoder.DecodeKey(data)); - } - - /// Decode a key of this subspace, composed of exactly two elements - public FdbTuple Decode(Slice packed) - { - return this.Encoder.DecodeKey(this.Subspace.ExtractKey(packed)); - } - - /// Decode a batch of keys of this subspace, each one composed of exactly two elements - public IEnumerable> DecodeMany(IEnumerable packed) - { - return BatchDecode(packed, this.Subspace, this.Encoder, (data, encoder) => encoder.DecodeKey(data)); - } - - /// Decode a key of this subspace, composed of exactly three elements - public FdbTuple Decode(Slice packed) - { - return this.Encoder.DecodeKey(this.Subspace.ExtractKey(packed)); - } - - /// Decode a batch of keys of this subspace, each one composed of exactly three elements - public IEnumerable> DecodeMany(IEnumerable packed) - { - return BatchDecode(packed, this.Subspace, this.Encoder, (data, encoder) => encoder.DecodeKey(data)); - } - - /// Decode a key of this subspace, composed of exactly four elements - public FdbTuple Decode(Slice packed) - { - return this.Encoder.DecodeKey(this.Subspace.ExtractKey(packed)); - } - - /// Decode a batch of keys of this subspace, each one composed of exactly four elements - public IEnumerable> DecodeMany(IEnumerable packed) - { - return BatchDecode(packed, this.Subspace, this.Encoder, (data, encoder) => encoder.DecodeKey(data)); - } - - /// Decode a key of this subspace, composed of exactly five elements - public FdbTuple Decode(Slice packed) - { - return this.Encoder.DecodeKey(this.Subspace.ExtractKey(packed)); - } - - /// Decode a batch of keys of this subspace, each one composed of exactly five elements - public IEnumerable> DecodeMany(IEnumerable packed) - { - return BatchDecode(packed, this.Subspace, this.Encoder, (data, encoder) => encoder.DecodeKey(data)); - } - - /// Decode a key of this subspace, and return only the first element without decoding the rest the key. - /// This method is faster than unpacking the complete key and reading only the first element. - public T DecodeFirst(Slice packed) - { - return this.Encoder.DecodeKeyFirst(this.Subspace.ExtractKey(packed)); - } - - /// Decode a batch of keys of this subspace, and for each one, return only the first element without decoding the rest of the key. - /// This method is faster than unpacking the complete key and reading only the first element. - public IEnumerable DecodeFirstMany(IEnumerable packed) - { - return BatchDecode(packed, this.Subspace, this.Encoder, (data, encoder) => encoder.DecodeKeyFirst(data)); - } - - /// Decode a key of this subspace, and return only the last element without decoding the rest. - /// This method is faster than unpacking the complete key and reading only the last element. - public T DecodeLast(Slice packed) - { - return this.Encoder.DecodeKeyLast(this.Subspace.ExtractKey(packed)); - } - - /// Decode a batch of keys of this subspace, and for each one, return only the last element without decoding the rest of the key. - /// This method is faster than unpacking the complete key and reading only the last element. - public IEnumerable DecodeLastMany(Slice[] packed) - { - return BatchDecode(packed, this.Subspace, this.Encoder, (data, encoder) => encoder.DecodeKeyLast(data)); - } - - #region Append: Subspace => Tuple - - /// Return an empty tuple that is attached to this subspace - /// Empty tuple that can be extended, and whose packed representation will always be prefixed by the subspace key - [NotNull] - public IFdbTuple ToTuple() - { - return new FdbPrefixedTuple(this.Subspace.Key, FdbTuple.Empty); - } - - /// Attach a tuple to an existing subspace. - /// Tuple whose items will be appended at the end of the current subspace - /// Tuple that wraps the items of and whose packed representation will always be prefixed by the subspace key. - [NotNull] - public IFdbTuple Concat([NotNull] IFdbTuple tuple) - { - return new FdbPrefixedTuple(this.Subspace.Key, tuple); - } - - /// Convert a formattable item into a tuple that is attached to this subspace. - /// Item that can be converted into a tuple - /// Tuple that is the logical representation of the item, and whose packed representation will always be prefixed by the subspace key. - /// This is the equivalent of calling 'subspace.Create(formattable.ToTuple())' - [NotNull] - public IFdbTuple Concat([NotNull] ITupleFormattable formattable) - { - if (formattable == null) throw new ArgumentNullException("formattable"); - var tuple = formattable.ToTuple(); - if (tuple == null) throw new InvalidOperationException("Formattable item cannot return an empty tuple"); - return new FdbPrefixedTuple(this.Subspace.Key, tuple); - } - - /// Create a new 1-tuple that is attached to this subspace - /// Type of the value to append - /// Value that will be appended - /// Tuple of size 1 that contains , and whose packed representation will always be prefixed by the subspace key. - /// This is the equivalent of calling 'subspace.Create(FdbTuple.Create<T>(value))' - [NotNull] - public IFdbTuple Append(T value) - { - return new FdbPrefixedTuple(this.Subspace.Key, FdbTuple.Create(value)); - } - - /// Create a new 2-tuple that is attached to this subspace - /// Type of the first value to append - /// Type of the second value to append - /// First value that will be appended - /// Second value that will be appended - /// Tuple of size 2 that contains and , and whose packed representation will always be prefixed by the subspace key. - /// This is the equivalent of calling 'subspace.Create(FdbTuple.Create<T1, T2>(item1, item2))' - [NotNull] - public IFdbTuple Append(T1 item1, T2 item2) - { - return new FdbPrefixedTuple(this.Subspace.Key, FdbTuple.Create(item1, item2)); - } - - /// Create a new 3-tuple that is attached to this subspace - /// Type of the first value to append - /// Type of the second value to append - /// Type of the third value to append - /// First value that will be appended - /// Second value that will be appended - /// Third value that will be appended - /// Tuple of size 3 that contains , and , and whose packed representation will always be prefixed by the subspace key. - /// This is the equivalent of calling 'subspace.Create(FdbTuple.Create<T1, T2, T3>(item1, item2, item3))' - [NotNull] - public IFdbTuple Append(T1 item1, T2 item2, T3 item3) - { - return new FdbPrefixedTuple(this.Subspace.Key, FdbTuple.Create(item1, item2, item3)); - } - - /// Create a new 4-tuple that is attached to this subspace - /// Type of the first value to append - /// Type of the second value to append - /// Type of the third value to append - /// Type of the fourth value to append - /// First value that will be appended - /// Second value that will be appended - /// Third value that will be appended - /// Fourth value that will be appended - /// Tuple of size 4 that contains , , and , and whose packed representation will always be prefixed by the subspace key. - /// This is the equivalent of calling 'subspace.Create(FdbTuple.Create<T1, T2, T3, T4>(item1, item2, item3, item4))' - [NotNull] - public IFdbTuple Append(T1 item1, T2 item2, T3 item3, T4 item4) - { - return new FdbPrefixedTuple(this.Subspace.Key, FdbTuple.Create(item1, item2, item3, item4)); - } - - /// Create a new 5-tuple that is attached to this subspace - /// Type of the first value to append - /// Type of the second value to append - /// Type of the third value to append - /// Type of the fourth value to append - /// Type of the fifth value to append - /// First value that will be appended - /// Second value that will be appended - /// Third value that will be appended - /// Fourth value that will be appended - /// Fifth value that will be appended - /// Tuple of size 5 that contains , , , and , and whose packed representation will always be prefixed by the subspace key. - /// This is the equivalent of calling 'subspace.Create(FdbTuple.Create<T1, T2, T3, T4, T5>(item1, item2, item3, item4, item5))' - [NotNull] - public IFdbTuple Append(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) - { - return new FdbPrefixedTuple(this.Subspace.Key, FdbTuple.Create(item1, item2, item3, item4, item5)); - } - - #endregion - - } -} \ No newline at end of file diff --git a/FoundationDB.Client/Subspaces/FdbDynamicSubspacePartition.cs b/FoundationDB.Client/Subspaces/FdbDynamicSubspacePartition.cs deleted file mode 100644 index 03f7489e3..000000000 --- a/FoundationDB.Client/Subspaces/FdbDynamicSubspacePartition.cs +++ /dev/null @@ -1,195 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Client -{ - using FoundationDB.Layers.Tuples; - using JetBrains.Annotations; - using System; - - public struct FdbDynamicSubspacePartition - { - [NotNull] - public readonly IFdbDynamicSubspace Subspace; - - [NotNull] - public readonly IDynamicKeyEncoder Encoder; - - public FdbDynamicSubspacePartition([NotNull] IFdbDynamicSubspace subspace, [NotNull] IDynamicKeyEncoder encoder) - { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (encoder == null) throw new ArgumentNullException("encoder"); - this.Subspace = subspace; - this.Encoder = encoder; - } - - /// Returns the same view but using a different Type System - /// Type System that will code keys in this new view - /// Review that will partition this subspace using a different Type System - /// - /// This should only be used for one-off usages where creating a new subspace just to encode one key would be overkill. - /// If you are calling this in a loop, consider creating a new subspace using that encoding. - /// - public FdbDynamicSubspacePartition Using([NotNull] IFdbKeyEncoding encoding) - { - if (encoding == null) throw new ArgumentNullException("encoding"); - var encoder = encoding.GetDynamicEncoder(); - return UsingEncoder(encoder); - } - - /// Returns the same view but using a different Type System - /// Type System that will code keys in this new view - /// Review that will partition this subspace using a different Type System - /// - /// This should only be used for one-off usages where creating a new subspace just to encode one key would be overkill. - /// If you are calling this in a loop, consider creating a new subspace using that encoder. - /// - public FdbDynamicSubspacePartition UsingEncoder([NotNull] IDynamicKeyEncoder encoder) - { - return new FdbDynamicSubspacePartition(this.Subspace, encoder); - } - - /// Create a new subspace by appdending a suffix to the current subspace - /// Suffix of the new subspace - /// New subspace with prefix equal to the current subspace's prefix, followed by - public IFdbDynamicSubspace this[Slice suffix] - { - [NotNull] - get - { - if (suffix.IsNull) throw new ArgumentException("Partition suffix cannot be null", "suffix"); - //TODO: find a way to limit the number of copies of the key? - return new FdbDynamicSubspace(this.Subspace.ConcatKey(suffix), false, this.Encoder); - } - } - - /// Create a new subspace by adding a to the current subspace's prefix - /// Key that will be appended to the current prefix - /// New subspace whose prefix is the concatenation of the parent prefix, and the packed representation of - public IFdbDynamicSubspace this[IFdbKey key] - { - [ContractAnnotation("null => halt; notnull => notnull")] - get - { - if (key == null) throw new ArgumentNullException("key"); - var packed = key.ToFoundationDbKey(); - return this[packed]; - } - } - - public IFdbDynamicSubspace this[IFdbTuple tuple] - { - [ContractAnnotation("null => halt; notnull => notnull")] - get - { - if (tuple == null) throw new ArgumentNullException("tuple"); - //TODO: find a way to limit the number of copies of the packed tuple? - return new FdbDynamicSubspace(this.Subspace.Keys.Pack(tuple), false, this.Encoder); - } - } - - public IFdbDynamicSubspace this[ITupleFormattable item] - { - [ContractAnnotation("null => halt; notnull => notnull")] - get - { - if (item == null) throw new ArgumentNullException("item"); - var tuple = item.ToTuple(); - if (tuple == null) throw new InvalidOperationException("Formattable item returned an empty tuple"); - return this[tuple]; - } - } - - /// Partition this subspace into a child subspace - /// Type of the child subspace key - /// Value of the child subspace - /// New subspace that is logically contained by the current subspace - /// Subspace([Foo, ]).Partition(Bar) is equivalent to Subspace([Foo, Bar, ]) - /// - /// new FdbSubspace(["Users", ]).Partition("Contacts") == new FdbSubspace(["Users", "Contacts", ]) - /// - [NotNull] - public IFdbDynamicSubspace ByKey(T value) - { - return new FdbDynamicSubspace(this.Subspace.Keys.Encode(value), false, this.Encoder); - } - - /// Partition this subspace into a child subspace - /// Type of the first subspace key - /// Type of the second subspace key - /// Value of the first subspace key - /// Value of the second subspace key - /// New subspace that is logically contained by the current subspace - /// Subspace([Foo, ]).Partition(Bar, Baz) is equivalent to Subspace([Foo, Bar, Baz]) - /// - /// new FdbSubspace(["Users", ]).Partition("Contacts", "Friends") == new FdbSubspace(["Users", "Contacts", "Friends", ]) - /// - [NotNull] - public IFdbDynamicSubspace ByKey(T1 value1, T2 value2) - { - return new FdbDynamicSubspace(this.Subspace.Keys.Encode(value1, value2), false, this.Encoder); - } - - /// Partition this subspace into a child subspace - /// Type of the first subspace key - /// Type of the second subspace key - /// Type of the third subspace key - /// Value of the first subspace key - /// Value of the second subspace key - /// Value of the third subspace key - /// New subspace that is logically contained by the current subspace - /// - /// new FdbSubspace(["Users", ]).Partition("John Smith", "Contacts", "Friends") == new FdbSubspace(["Users", "John Smith", "Contacts", "Friends", ]) - /// - [NotNull] - public IFdbDynamicSubspace ByKey(T1 value1, T2 value2, T3 value3) - { - return new FdbDynamicSubspace(this.Subspace.Keys.Encode(value1, value2, value3), false, this.Encoder); - } - - /// Partition this subspace into a child subspace - /// Type of the first subspace key - /// Type of the second subspace key - /// Type of the third subspace key - /// Type of the fourth subspace key - /// Value of the first subspace key - /// Value of the second subspace key - /// Value of the third subspace key - /// Value of the fourth subspace key - /// New subspace that is logically contained by the current subspace - /// - /// new FdbSubspace(["Users", ]).Partition("John Smith", "Contacts", "Friends", "Messages") == new FdbSubspace(["Users", "John Smith", "Contacts", "Friends", "Messages", ]) - /// - [NotNull] - public IFdbDynamicSubspace ByKey(T1 value1, T2 value2, T3 value3, T4 value4) - { - return new FdbDynamicSubspace(this.Subspace.Keys.Encode(value1, value2, value3, value4), false, this.Encoder); - } - - } -} diff --git a/FoundationDB.Client/Subspaces/FdbEncoderSubspaceKeys`1.cs b/FoundationDB.Client/Subspaces/FdbEncoderSubspaceKeys`1.cs deleted file mode 100644 index bc507e1b6..000000000 --- a/FoundationDB.Client/Subspaces/FdbEncoderSubspaceKeys`1.cs +++ /dev/null @@ -1,86 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -using System; -using System.Collections.Generic; -using System.Diagnostics.Contracts; -using FoundationDB.Layers.Tuples; -using JetBrains.Annotations; - -namespace FoundationDB.Client -{ - public struct FdbEncoderSubspaceKeys - { - - [NotNull] - public readonly IFdbSubspace Subspace; - - [NotNull] - public readonly IKeyEncoder Encoder; - - public FdbEncoderSubspaceKeys([NotNull] IFdbSubspace subspace, [NotNull] IKeyEncoder encoder) - { - Contract.Requires(subspace != null && encoder != null); - this.Subspace = subspace; - this.Encoder = encoder; - } - - public Slice this[T value] - { - get { return Encode(value); } - } - - public Slice Encode(T value) - { - return this.Subspace.ConcatKey(this.Encoder.EncodeKey(value)); - } - - public Slice[] Encode([NotNull] IEnumerable values) - { - if (values == null) throw new ArgumentNullException("values"); - return Batched>.Convert( - this.Subspace.GetWriter(), - values, - (ref SliceWriter writer, T value, IKeyEncoder encoder) => { writer.WriteBytes(encoder.EncodeKey(value)); }, - this.Encoder - ); - } - - public T Decode(Slice packed) - { - return this.Encoder.DecodeKey(this.Subspace.ExtractKey(packed)); - } - - public FdbKeyRange ToRange(T value) - { - //REVIEW: which semantic for ToRange() should we use? - return FdbTuple.ToRange(Encode(value)); - } - - } -} \ No newline at end of file diff --git a/FoundationDB.Client/Subspaces/FdbEncoderSubspaceKeys`2.cs b/FoundationDB.Client/Subspaces/FdbEncoderSubspaceKeys`2.cs deleted file mode 100644 index 7e35b28f6..000000000 --- a/FoundationDB.Client/Subspaces/FdbEncoderSubspaceKeys`2.cs +++ /dev/null @@ -1,86 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -using System; -using System.Collections.Generic; -using System.Diagnostics.Contracts; -using FoundationDB.Layers.Tuples; -using JetBrains.Annotations; - -namespace FoundationDB.Client -{ - public struct FdbEncoderSubspaceKeys - { - - [NotNull] - public readonly IFdbSubspace Subspace; - - [NotNull] - public readonly ICompositeKeyEncoder Encoder; - - public FdbEncoderSubspaceKeys([NotNull] IFdbSubspace subspace, [NotNull] ICompositeKeyEncoder encoder) - { - Contract.Requires(subspace != null && encoder != null); - this.Subspace = subspace; - this.Encoder = encoder; - } - - public Slice this[T1 value1, T2 value2] - { - get { return Encode(value1, value2); } - } - - public Slice Encode(T1 value1, T2 value2) - { - return this.Subspace.ConcatKey(this.Encoder.EncodeKey(value1, value2)); - } - - public Slice[] Encode([NotNull] IEnumerable values, [NotNull] Func selector1, [NotNull] Func selector2) - { - if (values == null) throw new ArgumentNullException("values"); - return Batched>.Convert( - this.Subspace.GetWriter(), - values, - (ref SliceWriter writer, TSource value, ICompositeKeyEncoder encoder) => writer.WriteBytes(encoder.EncodeKey(selector1(value), selector2(value))), - this.Encoder - ); - } - - public FdbTuple Decode(Slice packed) - { - return this.Encoder.DecodeKey(this.Subspace.ExtractKey(packed)); - } - - public FdbKeyRange ToRange(T1 value1, T2 value2) - { - //REVIEW: which semantic for ToRange() should we use? - return FdbTuple.ToRange(Encode(value1, value2)); - } - - } -} \ No newline at end of file diff --git a/FoundationDB.Client/Subspaces/FdbEncoderSubspaceKeys`3.cs b/FoundationDB.Client/Subspaces/FdbEncoderSubspaceKeys`3.cs deleted file mode 100644 index d175c05ad..000000000 --- a/FoundationDB.Client/Subspaces/FdbEncoderSubspaceKeys`3.cs +++ /dev/null @@ -1,81 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -using System; -using System.Collections.Generic; -using FoundationDB.Layers.Tuples; -using JetBrains.Annotations; - -namespace FoundationDB.Client -{ - public struct FdbEncoderSubspaceKeys - { - - public readonly IFdbSubspace Subspace; - public readonly ICompositeKeyEncoder Encoder; - - public FdbEncoderSubspaceKeys([NotNull] IFdbSubspace subspace, [NotNull] ICompositeKeyEncoder encoder) - { - this.Subspace = subspace; - this.Encoder = encoder; - } - - public Slice this[T1 value1, T2 value2, T3 value3] - { - get { return Encode(value1, value2, value3); } - } - - public Slice Encode(T1 value1, T2 value2, T3 value3) - { - return this.Subspace.ConcatKey(this.Encoder.EncodeKey(value1, value2, value3)); - } - - public Slice[] Encode([NotNull] IEnumerable values, [NotNull] Func selector1, [NotNull] Func selector2, [NotNull] Func selector3) - { - if (values == null) throw new ArgumentNullException("values"); - return Batched>.Convert( - this.Subspace.GetWriter(), - values, - (ref SliceWriter writer, TSource value, ICompositeKeyEncoder encoder) => writer.WriteBytes(encoder.EncodeKey(selector1(value), selector2(value), selector3(value))), - this.Encoder - ); - } - - public FdbTuple Decode(Slice packed) - { - return this.Encoder.DecodeKey(this.Subspace.ExtractKey(packed)); - } - - public FdbKeyRange ToRange(T1 value1, T2 value2, T3 value3) - { - //REVIEW: which semantic for ToRange() should we use? - return FdbTuple.ToRange(Encode(value1, value2, value3)); - } - - } -} \ No newline at end of file diff --git a/FoundationDB.Client/Subspaces/FdbEncoderSubspaceKeys`4.cs b/FoundationDB.Client/Subspaces/FdbEncoderSubspaceKeys`4.cs deleted file mode 100644 index 9983cbda0..000000000 --- a/FoundationDB.Client/Subspaces/FdbEncoderSubspaceKeys`4.cs +++ /dev/null @@ -1,87 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -using System; -using System.Collections.Generic; -using System.Diagnostics.Contracts; -using FoundationDB.Layers.Tuples; -using JetBrains.Annotations; - -namespace FoundationDB.Client -{ - public struct FdbEncoderSubspaceKeys - { - - [NotNull] - public readonly IFdbSubspace Subspace; - - [NotNull] - public readonly ICompositeKeyEncoder Encoder; - - public FdbEncoderSubspaceKeys([NotNull] IFdbSubspace subspace, [NotNull] ICompositeKeyEncoder encoder) - { - Contract.Requires(subspace != null && encoder != null); - this.Subspace = subspace; - this.Encoder = encoder; - } - - public Slice this[T1 value1, T2 value2, T3 value3, T4 value4] - { - get { return Encode(value1, value2, value3, value4); } - } - - public Slice Encode(T1 value1, T2 value2, T3 value3, T4 value4) - { - return this.Subspace.ConcatKey(this.Encoder.EncodeKey(value1, value2, value3, value4)); - } - - [NotNull] - public Slice[] Encode([NotNull] IEnumerable values, [NotNull] Func selector1, [NotNull] Func selector2, [NotNull] Func selector3, [NotNull] Func selector4) - { - if (values == null) throw new ArgumentNullException("values"); - return Batched>.Convert( - this.Subspace.GetWriter(), - values, - (ref SliceWriter writer, TSource value, ICompositeKeyEncoder encoder) => writer.WriteBytes(encoder.EncodeKey(selector1(value), selector2(value), selector3(value), selector4(value))), - this.Encoder - ); - } - - public FdbTuple Decode(Slice packed) - { - return this.Encoder.DecodeKey(this.Subspace.ExtractKey(packed)); - } - - public FdbKeyRange ToRange(T1 value1, T2 value2, T3 value3, T4 value4) - { - //REVIEW: which semantic for ToRange() should we use? - return FdbTuple.ToRange(Encode(value1, value2, value3, value4)); - } - - } -} \ No newline at end of file diff --git a/FoundationDB.Client/Subspaces/FdbEncoderSubspacePartition`2.cs b/FoundationDB.Client/Subspaces/FdbEncoderSubspacePartition`2.cs deleted file mode 100644 index f2653bb06..000000000 --- a/FoundationDB.Client/Subspaces/FdbEncoderSubspacePartition`2.cs +++ /dev/null @@ -1,86 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -using System; -using JetBrains.Annotations; - -namespace FoundationDB.Client -{ - public struct FdbEncoderSubspacePartition - { - [NotNull] - public readonly IFdbSubspace Subspace; - - [NotNull] - public readonly ICompositeKeyEncoder Encoder; - - public FdbEncoderSubspacePartition([NotNull] IFdbSubspace subspace, [NotNull] ICompositeKeyEncoder encoder) - { - this.Subspace = subspace; - this.Encoder = encoder; - } - - public IFdbSubspace this[T1 value1, T2 value2] - { - [NotNull] - get - { return ByKey(value1, value2); } - } - - [NotNull] - public IFdbSubspace ByKey(T1 value1, T2 value2) - { - return this.Subspace[this.Encoder.EncodeKey(value1, value2)]; - } - - [NotNull] - public IFdbDynamicSubspace ByKey(T1 value1, T2 value2, IFdbKeyEncoding encoding) - { - return FdbSubspace.CreateDynamic(this.Subspace.ConcatKey(this.Encoder.EncodeKey(value1, value2)), encoding); - } - - [NotNull] - public IFdbDynamicSubspace ByKey(T1 value1, T2 value2, IDynamicKeyEncoder encoder) - { - return FdbSubspace.CreateDynamic(this.Subspace.ConcatKey(this.Encoder.EncodeKey(value1, value2)), encoder); - } - - [NotNull] - public IFdbEncoderSubspace ByKey(T1 value1, T2 value2, IFdbKeyEncoding encoding) - { - return FdbSubspace.CreateEncoder(this.Subspace.ConcatKey(this.Encoder.EncodeKey(value1, value2)), encoding); - } - - [NotNull] - public IFdbEncoderSubspace ByKey(T1 value1, T2 value2, IKeyEncoder encoder) - { - return FdbSubspace.CreateEncoder(this.Subspace.ConcatKey(this.Encoder.EncodeKey(value1, value2)), encoder); - } - - } -} \ No newline at end of file diff --git a/FoundationDB.Client/Subspaces/FdbEncoderSubspacePartition`3.cs b/FoundationDB.Client/Subspaces/FdbEncoderSubspacePartition`3.cs deleted file mode 100644 index f2d174088..000000000 --- a/FoundationDB.Client/Subspaces/FdbEncoderSubspacePartition`3.cs +++ /dev/null @@ -1,87 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -using System; -using System.Diagnostics.Contracts; -using JetBrains.Annotations; - -namespace FoundationDB.Client -{ - public struct FdbEncoderSubspacePartition - { - [NotNull] - public readonly IFdbSubspace Subspace; - - [NotNull] - public readonly ICompositeKeyEncoder Encoder; - - public FdbEncoderSubspacePartition([NotNull] IFdbSubspace subspace, [NotNull] ICompositeKeyEncoder encoder) - { - Contract.Requires(subspace != null && encoder != null); - this.Subspace = subspace; - this.Encoder = encoder; - } - - public IFdbSubspace this[T1 value1, T2 value2, T3 value3] - { - [NotNull] - get { return ByKey(value1, value2, value3); } - } - - [NotNull] - public IFdbSubspace ByKey(T1 value1, T2 value2, T3 value3) - { - return this.Subspace[this.Encoder.EncodeKey(value1, value2, value3)]; - } - - [NotNull] - public IFdbDynamicSubspace ByKey(T1 value1, T2 value2, T3 value3, IFdbKeyEncoding encoding) - { - return FdbSubspace.CreateDynamic(this.Subspace.ConcatKey(this.Encoder.EncodeKey(value1, value2, value3)), encoding); - } - - [NotNull] - public IFdbDynamicSubspace ByKey(T1 value1, T2 value2, T3 value3, IDynamicKeyEncoder encoder) - { - return FdbSubspace.CreateDynamic(this.Subspace.ConcatKey(this.Encoder.EncodeKey(value1, value2, value3)), encoder); - } - - [NotNull] - public IFdbEncoderSubspace ByKey(T1 value1, T2 value2, T3 value3, IFdbKeyEncoding encoding) - { - return FdbSubspace.CreateEncoder(this.Subspace.ConcatKey(this.Encoder.EncodeKey(value1, value2, value3)), encoding); - } - - [NotNull] - public IFdbEncoderSubspace ByKey(T1 value1, T2 value2, T3 value3, IKeyEncoder encoder) - { - return FdbSubspace.CreateEncoder(this.Subspace.ConcatKey(this.Encoder.EncodeKey(value1, value2, value3)), encoder); - } - - } -} \ No newline at end of file diff --git a/FoundationDB.Client/Subspaces/FdbEncoderSubspacePartition`4.cs b/FoundationDB.Client/Subspaces/FdbEncoderSubspacePartition`4.cs deleted file mode 100644 index 56191ebb4..000000000 --- a/FoundationDB.Client/Subspaces/FdbEncoderSubspacePartition`4.cs +++ /dev/null @@ -1,87 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -using System; -using JetBrains.Annotations; -using FoundationDB.Client.Utils; - -namespace FoundationDB.Client -{ - public struct FdbEncoderSubspacePartition - { - [NotNull] - public readonly IFdbSubspace Subspace; - - [NotNull] - public readonly ICompositeKeyEncoder Encoder; - - public FdbEncoderSubspacePartition([NotNull] IFdbSubspace subspace, [NotNull] ICompositeKeyEncoder encoder) - { - Contract.Requires(subspace != null && encoder != null); - this.Subspace = subspace; - this.Encoder = encoder; - } - - public IFdbSubspace this[T1 value1, T2 value2, T3 value3, T4 value4] - { - [NotNull] - get { return ByKey(value1, value2, value3, value4); } - } - - [NotNull] - public IFdbSubspace ByKey(T1 value1, T2 value2, T3 value3, T4 value4) - { - return this.Subspace[this.Encoder.EncodeKey(value1, value2, value3, value4)]; - } - - [NotNull] - public IFdbDynamicSubspace ByKey(T1 value1, T2 value2, T3 value3, T4 value4, [NotNull] IFdbKeyEncoding encoding) - { - return FdbSubspace.CreateDynamic(this.Subspace.ConcatKey(this.Encoder.EncodeKey(value1, value2, value3, value4)), encoding); - } - - [NotNull] - public IFdbDynamicSubspace ByKey(T1 value1, T2 value2, T3 value3, T4 value4, [NotNull] IDynamicKeyEncoder encoder) - { - return FdbSubspace.CreateDynamic(this.Subspace.ConcatKey(this.Encoder.EncodeKey(value1, value2, value3, value4)), encoder); - } - - [NotNull] - public IFdbEncoderSubspace ByKey(T1 value1, T2 value2, T3 value3, T4 value4, [NotNull] IFdbKeyEncoding encoding) - { - return FdbSubspace.CreateEncoder(this.Subspace.ConcatKey(this.Encoder.EncodeKey(value1, value2, value3, value4)), encoding); - } - - [NotNull] - public IFdbEncoderSubspace ByKey(T1 value1, T2 value2, T3 value3, T4 value4, [NotNull] IKeyEncoder encoder) - { - return FdbSubspace.CreateEncoder(this.Subspace.ConcatKey(this.Encoder.EncodeKey(value1, value2, value3, value4)), encoder); - } - - } -} \ No newline at end of file diff --git a/FoundationDB.Client/Subspaces/FdbEncoderSubspace`1.cs b/FoundationDB.Client/Subspaces/FdbEncoderSubspace`1.cs deleted file mode 100644 index 645c17fc1..000000000 --- a/FoundationDB.Client/Subspaces/FdbEncoderSubspace`1.cs +++ /dev/null @@ -1,73 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -using System; -using JetBrains.Annotations; - -namespace FoundationDB.Client -{ - - /// Subspace that knows how to encode and decode its key - /// Type of the key handled by this subspace - public class FdbEncoderSubspace : FdbSubspace, IFdbEncoderSubspace - { - private readonly IKeyEncoder m_encoder; - - // ReSharper disable once FieldCanBeMadeReadOnly.Local - private /*readonly*/ FdbEncoderSubspaceKeys m_keys; - - public FdbEncoderSubspace(Slice rawPrefix, [NotNull] IKeyEncoder encoder) - : this(rawPrefix, true, encoder) - { } - - internal FdbEncoderSubspace(Slice rawPrefix, bool copy, [NotNull] IKeyEncoder encoder) - : base(rawPrefix, copy) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - m_encoder = encoder; - m_keys = new FdbEncoderSubspaceKeys(this, encoder); - } - - public IKeyEncoder Encoder - { - get { return m_encoder; } - } - - public FdbEncoderSubspaceKeys Keys - { - get { return m_keys; } - } - - public FdbEncoderSubspacePartition Partition - { - get { return new FdbEncoderSubspacePartition(this, m_encoder); } - } - - } - -} \ No newline at end of file diff --git a/FoundationDB.Client/Subspaces/FdbEncoderSubspace`2.cs b/FoundationDB.Client/Subspaces/FdbEncoderSubspace`2.cs deleted file mode 100644 index 64d522a43..000000000 --- a/FoundationDB.Client/Subspaces/FdbEncoderSubspace`2.cs +++ /dev/null @@ -1,81 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -using System; -using JetBrains.Annotations; - -namespace FoundationDB.Client -{ - - /// Subspace that knows how to encode and decode its key - /// Type of the first item of the keys handled by this subspace - /// Type of the second item of the keys handled by this subspace - public class FdbEncoderSubspace : FdbSubspace, IFdbEncoderSubspace - { - private readonly ICompositeKeyEncoder m_encoder; - - // ReSharper disable once FieldCanBeMadeReadOnly.Local - private /*readonly*/ FdbEncoderSubspaceKeys m_keys; - - public FdbEncoderSubspace(Slice rawPrefix, [NotNull] ICompositeKeyEncoder encoder) - : this(rawPrefix, true, encoder) - { } - - internal FdbEncoderSubspace(Slice rawPrefix, bool copy, [NotNull] ICompositeKeyEncoder encoder) - : base(rawPrefix, copy) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - m_encoder = encoder; - m_keys = new FdbEncoderSubspaceKeys(this, encoder); - } - - private FdbEncoderSubspace m_partial; - - public IFdbEncoderSubspace Partial - { - get { return m_partial ?? (m_partial = new FdbEncoderSubspace(GetKeyPrefix(), false, KeyValueEncoders.Head(m_encoder))); } - } - - public ICompositeKeyEncoder Encoder - { - get { return m_encoder; } - } - - public FdbEncoderSubspaceKeys Keys - { - get { return m_keys; } - } - - public FdbEncoderSubspacePartition Partition - { - get { return new FdbEncoderSubspacePartition(this, m_encoder); } - } - - } - -} \ No newline at end of file diff --git a/FoundationDB.Client/Subspaces/FdbEncoderSubspace`3.cs b/FoundationDB.Client/Subspaces/FdbEncoderSubspace`3.cs deleted file mode 100644 index 444f91b9a..000000000 --- a/FoundationDB.Client/Subspaces/FdbEncoderSubspace`3.cs +++ /dev/null @@ -1,87 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -using System; -using JetBrains.Annotations; - -namespace FoundationDB.Client -{ - - /// Subspace that knows how to encode and decode its key - /// Type of the first item of the keys handled by this subspace - /// Type of the second item of the keys handled by this subspace - /// Type of the third item of the keys handled by this subspace - public class FdbEncoderSubspace : FdbSubspace, IFdbEncoderSubspace - { - private readonly ICompositeKeyEncoder m_encoder; - - // ReSharper disable once FieldCanBeMadeReadOnly.Local - private /*readonly*/ FdbEncoderSubspaceKeys m_keys; - private FdbEncoderSubspace m_head; - private FdbEncoderSubspace m_partial; - - public FdbEncoderSubspace(Slice rawPrefix, [NotNull] ICompositeKeyEncoder encoder) - : this(rawPrefix, true, encoder) - { } - - internal FdbEncoderSubspace(Slice rawPrefix, bool copy, [NotNull] ICompositeKeyEncoder encoder) - : base(rawPrefix, copy) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - m_encoder = encoder; - m_keys = new FdbEncoderSubspaceKeys(this, encoder); - } - - public IFdbEncoderSubspace Head - { - get { return m_head ?? (m_head = new FdbEncoderSubspace(GetKeyPrefix(), false, KeyValueEncoders.Head(m_encoder))); } - } - - public IFdbEncoderSubspace Partial - { - get { return m_partial ?? (m_partial = new FdbEncoderSubspace(GetKeyPrefix(), false, KeyValueEncoders.Pair(m_encoder))); } - } - - public ICompositeKeyEncoder Encoder - { - get { return m_encoder; } - } - - public FdbEncoderSubspaceKeys Keys - { - get { return m_keys; } - } - - public FdbEncoderSubspacePartition Partition - { - get { return new FdbEncoderSubspacePartition(this, m_encoder); } - } - - } - -} \ No newline at end of file diff --git a/FoundationDB.Client/Subspaces/FdbEncoderSubspace`4.cs b/FoundationDB.Client/Subspaces/FdbEncoderSubspace`4.cs deleted file mode 100644 index 787cb5131..000000000 --- a/FoundationDB.Client/Subspaces/FdbEncoderSubspace`4.cs +++ /dev/null @@ -1,87 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -using System; -using JetBrains.Annotations; - -namespace FoundationDB.Client -{ - - /// Subspace that knows how to encode and decode its key - /// Type of the first item of the keys handled by this subspace - /// Type of the second item of the keys handled by this subspace - /// Type of the third item of the keys handled by this subspace - public class FdbEncoderSubspace : FdbSubspace, IFdbEncoderSubspace - { - private readonly ICompositeKeyEncoder m_encoder; - - // ReSharper disable once FieldCanBeMadeReadOnly.Local - private /*readonly*/ FdbEncoderSubspaceKeys m_keys; - private FdbEncoderSubspace m_head; - private FdbEncoderSubspace m_partial; - - public FdbEncoderSubspace(Slice rawPrefix, [NotNull] ICompositeKeyEncoder encoder) - : this(rawPrefix, true, encoder) - { } - - internal FdbEncoderSubspace(Slice rawPrefix, bool copy, [NotNull] ICompositeKeyEncoder encoder) - : base(rawPrefix, copy) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - m_encoder = encoder; - m_keys = new FdbEncoderSubspaceKeys(this, encoder); - } - - public IFdbEncoderSubspace Head - { - get { return m_head ?? (m_head = new FdbEncoderSubspace(GetKeyPrefix(), false, KeyValueEncoders.Head(m_encoder))); } - } - - public IFdbEncoderSubspace Partial - { - get { return m_partial ?? (m_partial = new FdbEncoderSubspace(GetKeyPrefix(), false, KeyValueEncoders.Pair(m_encoder))); } - } - - public ICompositeKeyEncoder Encoder - { - get { return m_encoder; } - } - - public FdbEncoderSubspaceKeys Keys - { - get { return m_keys; } - } - - public FdbEncoderSubspacePartition Partition - { - get { return new FdbEncoderSubspacePartition(this, m_encoder); } - } - - } - -} \ No newline at end of file diff --git a/FoundationDB.Client/Subspaces/FdbSubspace.cs b/FoundationDB.Client/Subspaces/FdbSubspace.cs deleted file mode 100644 index d0703c3cd..000000000 --- a/FoundationDB.Client/Subspaces/FdbSubspace.cs +++ /dev/null @@ -1,618 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -using System.Linq; - -namespace FoundationDB.Client -{ - using FoundationDB.Layers.Tuples; - using JetBrains.Annotations; - using System; - using System.Collections.Generic; - using System.Diagnostics; - - /// Adds a prefix on every keys, to group them inside a common subspace - public class FdbSubspace : IFdbSubspace, IFdbKey, IEquatable, IComparable - { - /// Empty subspace, that does not add any prefix to the keys - public static readonly IFdbSubspace Empty = new FdbSubspace(Slice.Empty); - - /// Binary prefix of this subspace - private Slice m_rawPrefix; //PERF: readonly struct - - /// Returns the key of this directory subspace - /// This should only be used by methods that can use the key internally, even if it is not supposed to be exposed (as is the case for directory partitions) - protected Slice InternalKey - { - get { return m_rawPrefix; } - } - - #region Constructors... - - /// Wraps an existing subspace, without copying the prefix (if possible) - protected FdbSubspace([NotNull] IFdbSubspace copy) - { - if (copy == null) throw new ArgumentNullException("copy"); - var sub = copy as FdbSubspace; - Slice key = sub != null ? sub.m_rawPrefix : copy.ToFoundationDbKey(); - if (key.IsNull) throw new ArgumentException("The subspace key cannot be null. Use Slice.Empty if you want a subspace with no prefix.", "copy"); - m_rawPrefix = key; - } - - /// Create a new subspace from a binary prefix - /// Prefix of the new subspace - /// If true, take a copy of the prefix - internal FdbSubspace(Slice rawPrefix, bool copy) - { - if (rawPrefix.IsNull) throw new ArgumentException("The subspace key cannot be null. Use Slice.Empty if you want a subspace with no prefix.", "rawPrefix"); - if (copy) rawPrefix = rawPrefix.Memoize(); - m_rawPrefix = rawPrefix.Memoize(); - } - - /// Create a new subspace from a binary prefix - /// Prefix of the new subspace - public FdbSubspace(Slice rawPrefix) - : this(rawPrefix, true) - { } - - #endregion - - #region Static Prefix Helpers... - - /// Create a new Subspace using a binary key as the prefix - /// Prefix of the new subspace - /// New subspace that will use a copy of as its prefix - [NotNull] - public static IFdbSubspace Create(Slice slice) - { - return new FdbDynamicSubspace(slice, TypeSystem.Default.GetDynamicEncoder()); - } - - public static IFdbSubspace Create([NotNull] TKey key) - where TKey : IFdbKey - { - if (key == null) throw new ArgumentNullException("key"); - return new FdbSubspace(key.ToFoundationDbKey()); - } - - /// Create a new Subspace using a binary key as the prefix - /// Prefix of the new subspace - /// Type System used to encode the keys of this subspace - /// New subspace that will use a copy of as its prefix - [NotNull] - public static IFdbDynamicSubspace CreateDynamic(Slice slice, IFdbKeyEncoding encoding = null) - { - var encoder = (encoding ?? TypeSystem.Default).GetDynamicEncoder(); - return new FdbDynamicSubspace(slice, encoder); - } - - /// Create a new Subspace using a binary key as the prefix - /// Prefix of the new subspace - /// Type System used to encode the keys of this subspace - /// New subspace that will use a copy of as its prefix - [NotNull] - public static IFdbDynamicSubspace CreateDynamic(Slice slice, [NotNull] IDynamicKeyEncoder encoder) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - return new FdbDynamicSubspace(slice, encoder); - } - - public static IFdbDynamicSubspace CreateDynamic([NotNull] TKey key, IFdbKeyEncoding encoding = null) - where TKey : IFdbKey - { - if (key == null) throw new ArgumentNullException("key"); - var encoder = (encoding ?? TypeSystem.Default).GetDynamicEncoder(); - return new FdbDynamicSubspace(key.ToFoundationDbKey(), encoder); - } - - public static IFdbDynamicSubspace CreateDynamic([NotNull] TKey key, IDynamicKeyEncoder encoder) - where TKey : IFdbKey - { - if (key == null) throw new ArgumentNullException("key"); - if (encoder == null) throw new ArgumentNullException("encoder"); - return new FdbDynamicSubspace(key.ToFoundationDbKey(), encoder); - } - - /// Create a new Subspace using a tuples as the prefix - /// Tuple that represents the prefix of the new subspace - /// Optional type encoding used by this subspace. - /// New subspace instance that will use the packed representation of as its prefix - [NotNull] - public static IFdbDynamicSubspace CreateDynamic([NotNull] IFdbTuple tuple, IFdbKeyEncoding encoding = null) - { - if (tuple == null) throw new ArgumentNullException("tuple"); - var encoder = (encoding ?? TypeSystem.Default).GetDynamicEncoder(); - return new FdbDynamicSubspace(tuple.ToSlice(), true, encoder); - } - - [NotNull] - public static IFdbEncoderSubspace CreateEncoder(Slice slice, IFdbKeyEncoding encoding = null) - { - var encoder = (encoding ?? TypeSystem.Default).GetEncoder(); - return new FdbEncoderSubspace(slice, encoder); - } - - [NotNull] - public static IFdbEncoderSubspace CreateEncoder(Slice slice, IKeyEncoder encoder) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - return new FdbEncoderSubspace(slice, encoder); - } - - [NotNull] - public static IFdbEncoderSubspace CreateEncoder(Slice slice, IFdbKeyEncoding encoding = null) - { - var encoder = (encoding ?? TypeSystem.Default).GetEncoder(); - return new FdbEncoderSubspace(slice, encoder); - } - - [NotNull] - public static IFdbEncoderSubspace CreateEncoder(Slice slice, ICompositeKeyEncoder encoder) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - return new FdbEncoderSubspace(slice, encoder); - } - - [NotNull] - public static IFdbEncoderSubspace CreateEncoder(Slice slice, IFdbKeyEncoding encoding = null) - { - var encoder = (encoding ?? TypeSystem.Default).GetEncoder(); - return new FdbEncoderSubspace(slice, encoder); - } - - [NotNull] - public static IFdbEncoderSubspace CreateEncoder(Slice slice, ICompositeKeyEncoder encoder) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - return new FdbEncoderSubspace(slice, encoder); - } - - [NotNull] - public static IFdbEncoderSubspace CreateEncoder(Slice slice, IFdbKeyEncoding encoding = null) - { - var encoder = (encoding ?? TypeSystem.Default).GetEncoder(); - return new FdbEncoderSubspace(slice, encoder); - } - - [NotNull] - public static IFdbEncoderSubspace CreateEncoder(Slice slice, ICompositeKeyEncoder encoder) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - return new FdbEncoderSubspace(slice, encoder); - } - - /// Clone this subspace - /// New Subspace that uses the same prefix key - /// Hint: Cloning a special Subspace like a or will not keep all the "special abilities" of the parent. - [NotNull] - public static IFdbSubspace Copy([NotNull] IFdbSubspace subspace) - { - var dyn = subspace as FdbDynamicSubspace; - if (dyn != null) - { - return new FdbDynamicSubspace(dyn.InternalKey, true, dyn.Encoder); - } - - var sub = subspace as FdbSubspace; - if (sub != null) - { - //SPOILER WARNING: You didn't hear it from me, but some say that you can use this to bypass the fact that FdbDirectoryPartition.get_Key and ToRange() throws in v2.x ... If you bypass this protection and bork your database, don't come crying! - return new FdbSubspace(sub.InternalKey, true); - } - - return new FdbSubspace(subspace.Key, true); - } - - /// Create a copy of a subspace, using a specific Type System - /// New Subspace that uses the same prefix key, and the provided Type System - [NotNull] - public static IFdbDynamicSubspace CopyDynamic([NotNull] IFdbSubspace subspace, IFdbKeyEncoding encoding = null) - { - var encoder = (encoding ?? TypeSystem.Default).GetDynamicEncoder(); - return new FdbDynamicSubspace(subspace.Key, true, encoder); - } - - /// Create a copy of a subspace, using a specific Type System - /// New Subspace that uses the same prefix key, and the provided Type System - [NotNull] - public static IFdbDynamicSubspace CopyDynamic([NotNull] IFdbSubspace subspace, [NotNull] IDynamicKeyEncoder encoder) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - return new FdbDynamicSubspace(subspace.Key, true, encoder); - } - - /// Create a copy of a subspace, using a specific Type System - /// New Subspace that uses the same prefix key, and the provided Type System - [NotNull] - public static IFdbEncoderSubspace CopyEncoder([NotNull] IFdbSubspace subspace, IFdbKeyEncoding encoding = null) - { - var encoder = (encoding ?? TypeSystem.Default).GetEncoder(); - return new FdbEncoderSubspace(subspace.Key, true, encoder); - } - - /// Create a copy of a subspace, using a specific Type System - /// New Subspace that uses the same prefix key, and the provided Type System - [NotNull] - public static IFdbEncoderSubspace CopyEncoder([NotNull] IFdbSubspace subspace, [NotNull] IKeyEncoder encoder) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - return new FdbEncoderSubspace(subspace.Key, true, encoder); - } - - /// Create a copy of a subspace, using a specific Type System - /// New Subspace that uses the same prefix key, and the provided Type System - [NotNull] - public static IFdbEncoderSubspace CopyEncoder([NotNull] IFdbSubspace subspace, IFdbKeyEncoding encoding = null) - { - var encoder = (encoding ?? TypeSystem.Default).GetEncoder(); - return new FdbEncoderSubspace(subspace.Key, true, encoder); - } - - /// Create a copy of a subspace, using a specific Type System - /// New Subspace that uses the same prefix key, and the provided Type System - [NotNull] - public static IFdbEncoderSubspace CopyEncoder([NotNull] IFdbSubspace subspace, [NotNull] ICompositeKeyEncoder encoder) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - return new FdbEncoderSubspace(subspace.Key, true, encoder); - } - - /// Create a copy of a subspace, using a specific Type System - /// New Subspace that uses the same prefix key, and the provided Type System - [NotNull] - public static IFdbEncoderSubspace CopyEncoder([NotNull] IFdbSubspace subspace, IFdbKeyEncoding encoding = null) - { - var encoder = (encoding ?? TypeSystem.Default).GetEncoder(); - return new FdbEncoderSubspace(subspace.Key, true, encoder); - } - - /// Create a copy of a subspace, using a specific Type System - /// New Subspace that uses the same prefix key, and the provided Type System - [NotNull] - public static IFdbEncoderSubspace CopyEncoder([NotNull] IFdbSubspace subspace, [NotNull] ICompositeKeyEncoder encoder) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - return new FdbEncoderSubspace(subspace.Key, true, encoder); - } - - /// Create a copy of a subspace, using a specific Type System - /// New Subspace that uses the same prefix key, and the provided Type System - [NotNull] - public static IFdbEncoderSubspace CopyEncoder([NotNull] IFdbSubspace subspace, IFdbKeyEncoding encoding = null) - { - var encoder = (encoding ?? TypeSystem.Default).GetEncoder(); - return new FdbEncoderSubspace(subspace.Key, true, encoder); - } - - /// Create a copy of a subspace, using a specific Type System - /// New Subspace that uses the same prefix key, and the provided Type System - [NotNull] - public static IFdbEncoderSubspace CopyEncoder([NotNull] IFdbSubspace subspace, [NotNull] ICompositeKeyEncoder encoder) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - return new FdbEncoderSubspace(subspace.Key, true, encoder); - } - - #endregion - - #region IFdbKey... - - Slice IFdbKey.ToFoundationDbKey() - { - return GetKeyPrefix(); - } - - #endregion - - #region IFdbSubspace... - - /// Returns the raw prefix of this subspace - /// Will throw if the prefix is not publicly visible, as is the case for Directory Partitions - public Slice Key - { - get { return GetKeyPrefix(); } - } - - /// Returns the key to use when creating direct keys that are inside this subspace - /// Prefix that must be added to all keys created by this subspace - /// Subspaces that disallow the creation of keys should override this method and throw an exception - [DebuggerStepThrough] - protected virtual Slice GetKeyPrefix() - { - return m_rawPrefix; - } - - protected virtual IFdbSubspace CreateChildren(Slice suffix) - { - return new FdbSubspace(ConcatKey(suffix)); - } - - public FdbKeyRange ToRange() - { - return ToRange(Slice.Empty); - } - - public virtual FdbKeyRange ToRange(Slice suffix) - { - return FdbKeyRange.StartsWith(ConcatKey(suffix)); - } - - public virtual FdbKeyRange ToRange(TKey key) - where TKey : IFdbKey - { - if (key == null) throw new ArgumentNullException("key"); - return FdbKeyRange.StartsWith(ConcatKey(key.ToFoundationDbKey())); - } - - public IFdbSubspace this[Slice suffix] - { - get { return CreateChildren(suffix); } - } - - public IFdbSubspace this[IFdbKey key] - { - get - { - if (key == null) throw new ArgumentNullException("key"); - return CreateChildren(key.ToFoundationDbKey()); - } - } - - /// Tests whether the specified starts with this Subspace's prefix, indicating that the Subspace logically contains . - /// The key to be tested - /// The key Slice.Nil is not contained by any Subspace, so subspace.Contains(Slice.Nil) will always return false - public virtual bool Contains(Slice key) - { - return key.HasValue && key.StartsWith(this.InternalKey); - } - - /// Append a key to the subspace key - /// This is the equivalent of calling 'subspace.Key + suffix' - public Slice ConcatKey(Slice suffix) - { - //REVIEW: what to do with Slice.Nil? - return GetKeyPrefix().Concat(suffix); - } - - public Slice ConcatKey(TKey key) - where TKey : IFdbKey - { - if (key == null) throw new ArgumentNullException("key"); - var suffix = key.ToFoundationDbKey(); - return GetKeyPrefix().Concat(suffix); - } - - /// Merge an array of keys with the subspace's prefix, all sharing the same buffer - /// Array of keys to pack - /// Array of slices (for all keys) that share the same underlying buffer - public Slice[] ConcatKeys(IEnumerable keys) - { - if (keys == null) throw new ArgumentNullException("keys"); - //REVIEW: what to do with keys that are Slice.Nil ? - return Slice.ConcatRange(GetKeyPrefix(), keys); - } - - /// Merge an array of keys with the subspace's prefix, all sharing the same buffer - /// Array of keys to pack - /// Array of slices (for all keys) that share the same underlying buffer - public Slice[] ConcatKeys(IEnumerable keys) - where TKey : IFdbKey - { - if (keys == null) throw new ArgumentNullException("keys"); - //REVIEW: what to do with keys that are Slice.Nil ? - return Slice.ConcatRange(GetKeyPrefix(), keys.Select(key => key.ToFoundationDbKey())); - } - - /// Remove the subspace prefix from a binary key, and only return the tail, or Slice.Nil if the key does not fit inside the namespace - /// Complete key that contains the current subspace prefix, and a binary suffix - /// If true, verify that is inside the bounds of the subspace - /// Binary suffix of the key (or Slice.Empty is the key is exactly equal to the subspace prefix). If the key is outside of the subspace, returns Slice.Nil - /// This is the inverse operation of - /// If is true and is outside the current subspace. - public Slice ExtractKey(Slice key, bool boundCheck = false) - { - if (key.IsNull) return Slice.Nil; - - var prefix = GetKeyPrefix(); - if (!key.StartsWith(prefix)) - { - if (boundCheck) FailKeyOutOfBound(key); - return Slice.Nil; - } - - return key.Substring(prefix.Count); - } - - /// Remove the subspace prefix from a batch of binary keys, and only return the tail, or Slice.Nil if a key does not fit inside the namespace - /// Sequence of complete keys that contains the current subspace prefix, and a binary suffix - /// If true, verify that each key in is inside the bounds of the subspace - /// Array of only the binary suffix of the keys, Slice.Empty for a key that is exactly equal to the subspace prefix, or Slice.Nil for a key that is outside of the subspace - /// If is true and at least one key in is outside the current subspace. - public Slice[] ExtractKeys(IEnumerable keys, bool boundCheck = false) - { - if (keys == null) throw new ArgumentNullException("keys"); - - var prefix = GetKeyPrefix(); - - var arr = keys as Slice[]; - if (arr != null) - { // fast-path for Sice[] (frequent for range reads) - - var res = new Slice[arr.Length]; - for (int i = 0; i < arr.Length; i++) - { - if (arr[i].StartsWith(prefix)) - { - res[i] = arr[i].Substring(prefix.Count); - } - else if (boundCheck) - { - FailKeyOutOfBound(arr[i]); - } - } - return res; - } - else - { // slow path for the rest - var coll = keys as ICollection; - var res = coll != null ? new List(coll.Count) : new List(); - foreach(var key in keys) - { - if (key.StartsWith(prefix)) - { - res.Add(key.Substring(prefix.Count)); - } - else if (boundCheck) - { - FailKeyOutOfBound(key); - } - } - return res.ToArray(); - } - } - - public SliceWriter GetWriter(int capacity = 0) - { - if (capacity < 0) throw new ArgumentOutOfRangeException("capacity"); - - var prefix = GetKeyPrefix(); - if (capacity > 0) - { - capacity += prefix.Count; - //TODO: round up to multiple of 8? - } - return new SliceWriter(prefix, capacity); - } - - #endregion - - #region IEquatable / IComparable... - - /// Compare this subspace with another subspace - public int CompareTo(IFdbSubspace other) - { - if (other == null) return +1; - if (object.ReferenceEquals(this, other)) return 0; - var sub = other as FdbSubspace; - if (sub != null) - return this.InternalKey.CompareTo(sub.InternalKey); - else - return this.InternalKey.CompareTo(other.ToFoundationDbKey()); - } - - /// Test if both subspaces have the same prefix - public bool Equals(IFdbSubspace other) - { - if (other == null) return false; - if (object.ReferenceEquals(this, other)) return true; - var sub = other as FdbSubspace; - if (sub != null) - return this.InternalKey.Equals(sub.InternalKey); - else - return this.InternalKey.Equals(other.ToFoundationDbKey()); - } - - /// Test if an object is a subspace with the same prefix - public override bool Equals(object obj) - { - return Equals(obj as FdbSubspace); - } - - /// Compute a hashcode based on the prefix of this subspace - /// - public override int GetHashCode() - { - return this.InternalKey.GetHashCode(); - } - - #endregion - - #region Helpers... - - /// Check that a key fits inside this subspace, and return '' or '\xFF' if it is outside the bounds - /// Key that needs to be checked - /// If true, allow keys that starts with \xFF even if this subspace is not the Empty subspace or System subspace itself. - /// The unchanged if it is contained in the namespace, Slice.Empty if it was before the subspace, or FdbKey.MaxValue if it was after. - public Slice BoundCheck(Slice key, bool allowSystemKeys) - { - //note: Since this is needed to make GetRange/GetKey work properly, this should work for all subspace, include directory partitions - var prefix = this.InternalKey; - - // don't touch to nil and keys inside the globalspace - if (key.IsNull || key.StartsWith(prefix)) return key; - - // let the system keys pass - if (allowSystemKeys && key.Count > 0 && key[0] == 255) return key; - - // The key is outside the bounds, and must be corrected - // > return empty if we are before - // > return \xFF if we are after - if (key < prefix) - return Slice.Empty; - else - return FdbKey.System; - } - - /// Throw an exception for a key that is out of the bounds of this subspace - /// - [ContractAnnotation("=> halt")] - protected void FailKeyOutOfBound(Slice key) - { -#if DEBUG - // only in debug mode, because have the key and subspace in the exception message could leak sensitive information - string msg = String.Format("The key {0} does not belong to subspace {1}", FdbKey.Dump(key), this.ToString()); -#else - string msg = "The specifed key does not belong to this subspace"; -#endif - throw new ArgumentException(msg, "key"); - } - - /// Return a user-friendly representation of a key from this subspace - /// Key that is contained in this subspace - /// Printable version of this key, minus the subspace prefix - [NotNull] - public virtual string DumpKey(Slice key) - { - // note: we can't use ExtractAndCheck(...) because it may throw in derived classes - var prefix = this.InternalKey; - if (!key.StartsWith(prefix)) FailKeyOutOfBound(key); - - return FdbKey.Dump(key.Substring(prefix.Count)); - } - - /// Printable representation of this subspace - public override string ToString() - { - return "Subspace(" + this.InternalKey.ToString() + ")"; - } - - #endregion - - } - -} diff --git a/FoundationDB.Client/Subspaces/FdbSubspaceExtensions.cs b/FoundationDB.Client/Subspaces/FdbSubspaceExtensions.cs deleted file mode 100644 index c6e2eb387..000000000 --- a/FoundationDB.Client/Subspaces/FdbSubspaceExtensions.cs +++ /dev/null @@ -1,193 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Client -{ - using FoundationDB.Client.Utils; - using JetBrains.Annotations; - using System; - using System.Collections.Generic; - using System.Threading; - using System.Threading.Tasks; - - /// Extensions methods to add FdbSubspace overrides to various types - public static class FdbSubspaceExtensions - { - - /// Return a version of this subspace, which uses a different type system to produces the keys and values - /// Instance of a generic subspace - /// If non-null, uses this specific instance of the TypeSystem. If null, uses the default instance for this particular TypeSystem - /// Subspace equivalent to , but augmented with a specific TypeSystem - public static IFdbDynamicSubspace Using([NotNull] this IFdbSubspace subspace, [NotNull] IFdbKeyEncoding encoding) - { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (encoding == null) throw new ArgumentNullException("encoding"); - return FdbSubspace.CopyDynamic(subspace, encoding); - } - - /// Return a version of this subspace, which uses a different type system to produces the keys and values - /// Instance of a generic subspace - /// Custom key encoder - /// Subspace equivalent to , but augmented with a specific TypeSystem - public static IFdbDynamicSubspace UsingEncoder([NotNull] this IFdbSubspace subspace, [NotNull] IDynamicKeyEncoder encoder) - { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (encoder == null) throw new ArgumentNullException("encoder"); - return FdbSubspace.CopyDynamic(subspace, encoder); - } - - /// Return a version of this subspace, which uses a different type system to produces the keys and values - /// Instance of a generic subspace - /// Custom key encoder - /// Subspace equivalent to , but augmented with a specific TypeSystem - public static IFdbEncoderSubspace UsingEncoder([NotNull] this IFdbSubspace subspace, [NotNull] IFdbKeyEncoding encoding) - { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (encoding == null) throw new ArgumentNullException("encoding"); - return FdbSubspace.CopyEncoder(subspace, encoding); - } - - /// Return a version of this subspace, which uses a different type system to produces the keys and values - /// Instance of a generic subspace - /// Custom key encoder - /// Subspace equivalent to , but augmented with a specific TypeSystem - public static IFdbEncoderSubspace UsingEncoder([NotNull] this IFdbSubspace subspace, [NotNull] IKeyEncoder encoder) - { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (encoder == null) throw new ArgumentNullException("encoder"); - return FdbSubspace.CopyEncoder(subspace, encoder); - } - - /// Return a version of this subspace, which uses a different type system to produces the keys and values - /// Instance of a generic subspace - /// Custom key encoder - /// Subspace equivalent to , but augmented with a specific TypeSystem - public static IFdbEncoderSubspace UsingEncoder([NotNull] this IFdbSubspace subspace, [NotNull] IFdbKeyEncoding encoding) - { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (encoding == null) throw new ArgumentNullException("encoding"); - return FdbSubspace.CopyEncoder(subspace, encoding); - } - - /// Return a version of this subspace, which uses a different type system to produces the keys and values - /// Instance of a generic subspace - /// Custom key encoder - /// Subspace equivalent to , but augmented with a specific TypeSystem - public static IFdbEncoderSubspace UsingEncoder([NotNull] this IFdbSubspace subspace, [NotNull] ICompositeKeyEncoder encoder) - { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (encoder == null) throw new ArgumentNullException("encoder"); - return FdbSubspace.CopyEncoder(subspace, encoder); - } - - /// Return a version of this subspace, which uses a different type system to produces the keys and values - /// Instance of a generic subspace - /// Custom key encoder - /// Subspace equivalent to , but augmented with a specific TypeSystem - public static IFdbEncoderSubspace UsingEncoder([NotNull] this IFdbSubspace subspace, [NotNull] IFdbKeyEncoding encoding) - { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (encoding == null) throw new ArgumentNullException("encoding"); - return FdbSubspace.CopyEncoder(subspace, encoding); - } - - /// Return a version of this subspace, which uses a different type system to produces the keys and values - /// Instance of a generic subspace - /// Custom key encoder - /// Subspace equivalent to , but augmented with a specific TypeSystem - public static IFdbEncoderSubspace UsingEncoder([NotNull] this IFdbSubspace subspace, [NotNull] ICompositeKeyEncoder encoder) - { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (encoder == null) throw new ArgumentNullException("encoder"); - return FdbSubspace.CopyEncoder(subspace, encoder); - } - - /// Return a version of this subspace, which uses a different type system to produces the keys and values - /// Instance of a generic subspace - /// Custom key encoder - /// Subspace equivalent to , but augmented with a specific TypeSystem - public static IFdbEncoderSubspace UsingEncoder([NotNull] this IFdbSubspace subspace, [NotNull] IFdbKeyEncoding encoding) - { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (encoding == null) throw new ArgumentNullException("encoding"); - return FdbSubspace.CopyEncoder(subspace, encoding); - } - - /// Return a version of this subspace, which uses a different type system to produces the keys and values - /// Instance of a generic subspace - /// Custom key encoder - /// Subspace equivalent to , but augmented with a specific TypeSystem - public static IFdbEncoderSubspace UsingEncoder([NotNull] this IFdbSubspace subspace, [NotNull] ICompositeKeyEncoder encoder) - { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (encoder == null) throw new ArgumentNullException("encoder"); - return FdbSubspace.CopyEncoder(subspace, encoder); - } - - /// Clear the entire content of a subspace - public static void ClearRange(this IFdbTransaction trans, [NotNull] IFdbSubspace subspace) - { - Contract.Requires(trans != null && subspace != null); - - //BUGBUG: should we call subspace.ToRange() ? - trans.ClearRange(FdbKeyRange.StartsWith(subspace.ToFoundationDbKey())); - } - - /// Clear the entire content of a subspace - public static Task ClearRangeAsync(this IFdbRetryable db, [NotNull] IFdbSubspace subspace, CancellationToken cancellationToken) - { - if (db == null) throw new ArgumentNullException("db"); - if (subspace == null) throw new ArgumentNullException("subspace"); - - return db.WriteAsync((tr) => ClearRange(tr, subspace), cancellationToken); - } - - /// Returns all the keys inside of a subspace - [NotNull] - public static FdbRangeQuery> GetRangeStartsWith(this IFdbReadOnlyTransaction trans, [NotNull] IFdbSubspace subspace, FdbRangeOptions options = null) - { - //REVIEW: should we remove this method? - Contract.Requires(trans != null && subspace != null); - - return trans.GetRange(FdbKeyRange.StartsWith(subspace.ToFoundationDbKey()), options); - } - - /// Tests whether the specified starts with this Subspace's prefix, indicating that the Subspace logically contains . - /// - /// The key to be tested - /// If is null - public static bool Contains([NotNull] this IFdbSubspace subspace, [NotNull] TKey key) - where TKey : IFdbKey - { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (key == null) throw new ArgumentNullException("key"); - return subspace.Contains(key.ToFoundationDbKey()); - } - - } -} diff --git a/FoundationDB.Client/Subspaces/IFdbDynamicSubspace.cs b/FoundationDB.Client/Subspaces/IDynamicKeySubspace.cs similarity index 53% rename from FoundationDB.Client/Subspaces/IFdbDynamicSubspace.cs rename to FoundationDB.Client/Subspaces/IDynamicKeySubspace.cs index 927807c44..d0ab4cd71 100644 --- a/FoundationDB.Client/Subspaces/IFdbDynamicSubspace.cs +++ b/FoundationDB.Client/Subspaces/IDynamicKeySubspace.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,24 +26,37 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -using System; -using JetBrains.Annotations; - namespace FoundationDB.Client { - - public interface IFdbDynamicSubspace : IFdbSubspace + using System; + using Doxense.Serialization.Encoders; + using JetBrains.Annotations; + + /// Represents a Key Subspace which can encode and decode keys of arbitrary size. + /// This is usefull when dealing with subspaces that store keys of different types and shapes. + /// In pseudo code, we obtain a dynamic subspace that wraps a prefix, and uses the Tuple Encoder Format to encode variable-size tuples into binary: + /// + /// subspace = {...}.OpenOrCreate(..., "/some/path/to/data", TypeSystem.Tuples) + /// subspace.GetPrefix() => {prefix} + /// subspace.Keys.Pack(("Hello", "World")) => (PREFIX, 'Hello', 'World') => {prefix}.'\x02Hello\x00\x02World\x00' + /// subspace.Keys.Encode("Hello", "World") => (PREFIX, 'Hello', 'World') => {prefix}.'\x02Hello\x00\x02World\x00' + /// subspace.Keys.Decode({prefix}'\x02Hello\x00\x15\x42') => ('Hello', 0x42) + /// + /// + [PublicAPI] + public interface IDynamicKeySubspace : IKeySubspace { - /// Encoding used to convert keys of this subspace into Slice - IDynamicKeyEncoder Encoder {[NotNull] get; } - - /// Returns a view of the keys of this subspace - FdbDynamicSubspaceKeys Keys { get; } + /// View of the keys of this subspace + [NotNull] + DynamicKeys Keys { get; } /// Returns an helper object that knows how to create sub-partitions of this subspace - FdbDynamicSubspacePartition Partition { get; } + [NotNull] + DynamicPartition Partition { get; } - } + /// Encoding used to generate and parse the keys of this subspace + [NotNull] IKeyEncoding Encoding { get; } -} \ No newline at end of file + } +} diff --git a/FoundationDB.Client/Subspaces/IFdbEncoderSubspace.cs b/FoundationDB.Client/Subspaces/IFdbEncoderSubspace.cs deleted file mode 100644 index 59ff78a9b..000000000 --- a/FoundationDB.Client/Subspaces/IFdbEncoderSubspace.cs +++ /dev/null @@ -1,97 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -using System; -using JetBrains.Annotations; - -namespace FoundationDB.Client -{ - - public interface IFdbEncoderSubspace : IFdbSubspace - { - - IKeyEncoder Encoder { [NotNull] get; } - - /// Return a view of all the possible keys of this subspace - FdbEncoderSubspaceKeys Keys { get; } - - /// Returns an helper object that knows how to create sub-partitions of this subspace - FdbEncoderSubspacePartition Partition { get; } - - } - - public interface IFdbEncoderSubspace : IFdbSubspace - { - - ICompositeKeyEncoder Encoder {[NotNull] get; } - - /// Return a view of all the possible keys of this subspace - FdbEncoderSubspaceKeys Keys { get; } - - /// Returns an helper object that knows how to create sub-partitions of this subspace - FdbEncoderSubspacePartition Partition { get; } - - IFdbEncoderSubspace Partial {[NotNull] get; } - - } - - public interface IFdbEncoderSubspace : IFdbSubspace - { - - ICompositeKeyEncoder Encoder {[NotNull] get; } - - /// Return a view of all the possible keys of this subspace - FdbEncoderSubspaceKeys Keys { get; } - - /// Returns an helper object that knows how to create sub-partitions of this subspace - FdbEncoderSubspacePartition Partition { get; } - - IFdbEncoderSubspace Head { [NotNull] get; } - - IFdbEncoderSubspace Partial {[NotNull] get; } - } - - public interface IFdbEncoderSubspace : IFdbSubspace - { - - ICompositeKeyEncoder Encoder {[NotNull] get; } - - /// Return a view of all the possible keys of this subspace - FdbEncoderSubspaceKeys Keys { get; } - - /// Returns an helper object that knows how to create sub-partitions of this subspace - FdbEncoderSubspacePartition Partition { get; } - - IFdbEncoderSubspace Head {[NotNull] get; } - - IFdbEncoderSubspace Partial {[NotNull] get; } - - //TODO: how to name ? - } - -} \ No newline at end of file diff --git a/FoundationDB.Client/Subspaces/IFdbSubspace.cs b/FoundationDB.Client/Subspaces/IFdbSubspace.cs deleted file mode 100644 index c5d1fe066..000000000 --- a/FoundationDB.Client/Subspaces/IFdbSubspace.cs +++ /dev/null @@ -1,124 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Client -{ - using JetBrains.Annotations; - using System; - using System.Collections.Generic; - - - public interface IFdbSubspace : IFdbKey - { - // This interface helps solve some type resolution ambiguities at compile time between types that all implement IFdbKey but have different semantics for partitionning and concatenation - - /// Returns the prefix of this subspace - Slice Key { get; } - - /// Return a key range that contains all the keys in this subspace, including the prefix itself - /// Return the range: Key <= x <= Increment(Key) - FdbKeyRange ToRange(); - - /// Return a key range that contains all the keys under a suffix in this subspace - /// Binary suffix that will be appended to the current prefix, before computing the range - /// Return the range: (this.Key + suffix) <= x <= Increment(this.Key + suffix) - FdbKeyRange ToRange(Slice suffix); - - /// Return a key range that contains all the keys under a serializable key in this subspace - /// Return the range: (this.Key + key.ToFoundationDbKey()) <= x <= Increment(this.Key + key.ToFoundationDbKey()) - FdbKeyRange ToRange([NotNull] TKey key) where TKey : IFdbKey; - - /// Create a new subspace by adding a suffix to the key of the current subspace. - /// Binary suffix that will be appended to the current prefix - /// New subspace whose prefix is the concatenation of the parent prefix, and - IFdbSubspace this[Slice suffix] { [NotNull] get; } - - /// Create a new subspace by adding a suffix to the key of the current subspace. - /// Item that can serialize itself into a binary suffix, that will be appended to the current subspace's prefix - /// New subspace whose prefix is the concatenation of the parent prefix, and - IFdbSubspace this[[NotNull] IFdbKey key] { [NotNull] get; } - - /// Test if a key is inside the range of keys logically contained by this subspace - /// Key to test - /// True if the key can exist inside the current subspace. - /// Please note that this method does not test if the key *actually* exists in the database, only if the key is not ouside the range of keys defined by the subspace. - bool Contains(Slice key); - - /// Check that a key fits inside this subspace, and return '' or '\xFF' if it is outside the bounds - /// Key that needs to be checked - /// If true, allow keys that starts with \xFF even if this subspace is not the Empty subspace or System subspace itself. - /// The unchanged if it is contained in the namespace, Slice.Empty if it was before the subspace, or FdbKey.MaxValue if it was after. - Slice BoundCheck(Slice key, bool allowSystemKeys); - - /// Return the key that is composed of the subspace's prefix and a binary suffix - /// Binary suffix that will be appended to the current prefix - /// Full binary key - Slice ConcatKey(Slice suffix); - - /// Return the key that is composed of the subspace's prefix and a serializable key - /// Item that can serialize itself into a binary suffix, that will be appended to the current prefix - /// Full binary key - Slice ConcatKey([NotNull] TKey key) where TKey : IFdbKey; - - /// Concatenate a batch of keys under this subspace - /// List of suffixes to process - /// Array of which is equivalent to calling on each entry in - [NotNull] - Slice[] ConcatKeys([NotNull] IEnumerable suffixes); - - /// Concatenate a batch of serializable keys under this subspace - /// List of serializable keys to process - /// Array of which is equivalent to calling on each entry in - [NotNull] - Slice[] ConcatKeys([NotNull, ItemNotNull] IEnumerable keys) where TKey : IFdbKey; - - /// Remove the subspace prefix from a binary key, and only return the tail, or Slice.Nil if the key does not fit inside the namespace - /// Complete key that contains the current subspace prefix, and a binary suffix - /// If true, verify that is inside the bounds of the subspace - /// Binary suffix of the key (or Slice.Empty if the key is exactly equal to the subspace prefix). If the key is outside of the subspace, returns Slice.Nil - /// This is the inverse operation of - /// If is true and is outside the current subspace. - Slice ExtractKey(Slice key, bool boundCheck = false); - - /// Remove the subspace prefix from a batch of binary keys, and only return the tail, or Slice.Nil if a key does not fit inside the namespace - /// Sequence of complete keys that contains the current subspace prefix, and a binary suffix - /// If true, verify that each key in is inside the bounds of the subspace - /// Array of only the binary suffix of the keys, Slice.Empty for a key that is exactly equal to the subspace prefix, or Slice.Nil for a key that is outside of the subspace - /// If is true and at least one key in is outside the current subspace. - [NotNull] - Slice[] ExtractKeys([NotNull] IEnumerable keys, bool boundCheck = false); - - /// Return a new slice buffer, initialized with the subspace prefix, that can be used for custom key serialization - /// If non-zero, the expected buffer capacity. The size of the subspace prefix will be added to this value. - /// Instance of a SliceWriter with the prefix of this subspace already copied. - SliceWriter GetWriter(int capacity = 0); - - - } - -} diff --git a/FoundationDB.Client/Subspaces/IKeySubspace.cs b/FoundationDB.Client/Subspaces/IKeySubspace.cs new file mode 100644 index 000000000..23383acd7 --- /dev/null +++ b/FoundationDB.Client/Subspaces/IKeySubspace.cs @@ -0,0 +1,90 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace FoundationDB.Client +{ + using System; + using JetBrains.Annotations; + + /// Represents a sub-partition of the global key space. + /// + /// A subspace is the logical equivalent of a key prefix that is implicitly prepended to all keys generated from it. + /// A "vanilla" data subspace does not imply any encoding scheme by default, but can be wrapped into a more complex subspace which includes Key Codec. + /// + /// + /// In pseudo code, and given a 'MySubspaceImpl' that implement : + /// + /// subspace = new MySubspaceImpl({ABC}) + /// subspace.ConcatKey({123}) => {ABC123} + /// subspace.ExtractKey({ABC123}) => {123} + /// subspace.ExtractKey({DEF123}) => ERROR + /// + /// + [PublicAPI] + public interface IKeySubspace + { + // This interface helps solve some type resolution ambiguities at compile time between types that all implement IFdbKey but have different semantics for partitionning and concatenation + + /// Returns the prefix of this subspace + Slice GetPrefix(); + + /// Return a key range that contains all the keys in this subspace, including the prefix itself + /// Return the range: Key <= x <= Increment(Key) + [Pure] + KeyRange ToRange(); + + /// Return the key that is composed of the subspace's prefix and a binary suffix + /// Binary suffix that will be appended to the current prefix + /// Full binary key + Slice this[Slice relativeKey] { [Pure] get; } + + /// Test if a key is inside the range of keys logically contained by this subspace + /// Key to test + /// True if the key can exist inside the current subspace. + /// Please note that this method does not test if the key *actually* exists in the database, only if the key is not ouside the range of keys defined by the subspace. + [Pure] + bool Contains(Slice absoluteKey); //REVIEW: should this be renamed to "ContainsKey" ? + + /// Check that a key fits inside this subspace, and return '' or '\xFF' if it is outside the bounds + /// Key that needs to be checked + /// If true, allow keys that starts with \xFF even if this subspace is not the Empty subspace or System subspace itself. + /// The unchanged if it is contained in the namespace, Slice.Empty if it was before the subspace, or FdbKey.MaxValue if it was after. + Slice BoundCheck(Slice key, bool allowSystemKeys); + + /// Remove the subspace prefix from a binary key, and only return the tail, or Slice.Nil if the key does not fit inside the namespace + /// Complete key that contains the current subspace prefix, and a binary suffix + /// If true, verify that is inside the bounds of the subspace + /// Binary suffix of the key (or Slice.Empty if the key is exactly equal to the subspace prefix). If the key is outside of the subspace, returns Slice.Nil + /// This is the inverse operation of + /// If is true and is outside the current subspace. + [Pure] + Slice ExtractKey(Slice absoluteKey, bool boundCheck = false); + + } + +} diff --git a/FoundationDB.Client/Subspaces/KeySubspace.cs b/FoundationDB.Client/Subspaces/KeySubspace.cs new file mode 100644 index 000000000..78d904c22 --- /dev/null +++ b/FoundationDB.Client/Subspaces/KeySubspace.cs @@ -0,0 +1,346 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace FoundationDB.Client +{ + using System; + using System.Diagnostics; + using Doxense.Collections.Tuples; + using Doxense.Diagnostics.Contracts; + using Doxense.Memory; + using Doxense.Serialization.Encoders; + using JetBrains.Annotations; + + /// Adds a prefix on every keys, to group them inside a common subspace + [PublicAPI] + [DebuggerDisplay("{ToString(),nq}")] + public class KeySubspace : IKeySubspace, IEquatable, IComparable + { + + /// Prefix common to all keys in this subspace + private readonly Slice Key; + + /// Precomputed range that encompass all the keys in this subspace + private readonly KeyRange Range; + + #region Constructors... + + [NotNull] + public static KeySubspace Empty => new KeySubspace(Slice.Empty); + + #region FromKey... + + /// Initializes a new generic subspace with the given prefix. + [Pure, NotNull] + public static KeySubspace FromKey(Slice prefix) + { + return new KeySubspace(prefix.Memoize()); + } + + /// Initializes a new dynamic subspace with the given binary and key . + /// A subspace that can handle keys of any types and size. + [Pure, NotNull] + public static DynamicKeySubspace CreateDynamic(Slice prefix, [NotNull] IDynamicKeyEncoder encoder) + { + Contract.NotNull(encoder, nameof(encoder)); + return new DynamicKeySubspace(prefix, encoder); + } + + /// Initializes a new subspace with the given binary , that uses a dynamic key . + /// A subspace that can handle keys of any types and size. + [Pure, NotNull] + public static DynamicKeySubspace CreateDynamic(Slice prefix, [CanBeNull] IKeyEncoding encoding = null) + { + return new DynamicKeySubspace(prefix, (encoding ?? TuPack.Encoding).GetDynamicKeyEncoder()); + } + + /// Initializes a new subspace with the given binary , that uses a typed key . + /// A subspace that can handle keys of type . + public static TypedKeySubspace CreateTyped(Slice prefix, [CanBeNull] IKeyEncoding encoding = null) + { + return new TypedKeySubspace(prefix, (encoding ?? TuPack.Encoding).GetKeyEncoder()); + } + + /// Initializes a new subspace with the given binary , that uses a typed key . + /// A subspace that can handle keys of type . + public static TypedKeySubspace CreateTyped(Slice prefix, [NotNull] IKeyEncoder encoder) + { + Contract.NotNull(encoder, nameof(encoder)); + return new TypedKeySubspace(prefix, encoder); + } + + /// Initializes a new subspace with the given binary , that uses a typed key . + /// A subspace that can handle composite keys of type (, ). + public static TypedKeySubspace CreateTyped(Slice prefix, [CanBeNull] IKeyEncoding encoding = null) + { + return new TypedKeySubspace(prefix, (encoding ?? TuPack.Encoding).GetKeyEncoder()); + } + + /// Initializes a new subspace with the given binary , that uses a typed key . + /// A subspace that can handle composite keys of type (, ). + public static TypedKeySubspace CreateTyped(Slice prefix, [NotNull] ICompositeKeyEncoder encoder) + { + Contract.NotNull(encoder, nameof(encoder)); + return new TypedKeySubspace(prefix, encoder); + } + + /// Initializes a new subspace with the given binary , that uses a typed key . + /// A subspace that can handle composite keys of type (, , ). + public static TypedKeySubspace CreateTyped(Slice prefix, [CanBeNull] IKeyEncoding encoding = null) + { + return new TypedKeySubspace(prefix, (encoding ?? TuPack.Encoding).GetKeyEncoder()); + } + + /// Initializes a new subspace with the given binary , that uses a typed key . + /// A subspace that can handle composite keys of type (, , ). + public static TypedKeySubspace CreateTyped(Slice prefix, [NotNull] ICompositeKeyEncoder encoder) + { + Contract.NotNull(encoder, nameof(encoder)); + return new TypedKeySubspace(prefix, encoder); + } + + /// Initializes a new subspace with the given binary , that uses a typed key . + /// A subspace that can handle composite keys of type (, , ). + public static TypedKeySubspace CreateTyped(Slice prefix, [CanBeNull] IKeyEncoding encoding = null) + { + return new TypedKeySubspace(prefix, (encoding ?? TuPack.Encoding).GetKeyEncoder()); + } + + /// Initializes a new subspace with the given binary , that uses a typed key . + /// A subspace that can handle composite keys of type (, , ). + public static TypedKeySubspace CreateTyped(Slice prefix, [NotNull] ICompositeKeyEncoder encoder) + { + Contract.NotNull(encoder, nameof(encoder)); + return new TypedKeySubspace(prefix, encoder); + } + + #endregion + + internal KeySubspace(Slice prefix) + { + this.Key = prefix; + this.Range = KeyRange.StartsWith(prefix); + } + + internal KeySubspace(Slice prefix, KeyRange range) + { + this.Key = prefix; + this.Range = range; + } + + /// Returns the raw prefix of this subspace + /// Will throw if the prefix is not publicly visible, as is the case for Directory Partitions + public Slice GetPrefix() + { + return GetKeyPrefix(); + } + + /// Returns the key to use when creating direct keys that are inside this subspace + /// Prefix that must be added to all keys created by this subspace + /// Subspaces that disallow the creation of keys should override this method and throw an exception + [DebuggerStepThrough] + protected virtual Slice GetKeyPrefix() + { + return this.Key; + } + + /// Returns the master instance of the prefix, without any safety checks + /// This instance should NEVER be exposed to anyone else, and should ONLY be used for logging/troubleshooting + internal Slice GetPrefixUnsafe() + { + return this.Key; + } + + public KeyRange ToRange() + { + return GetKeyRange(); + } + + protected virtual KeyRange GetKeyRange() + { + return this.Range; + } + + public virtual KeyRange ToRange(Slice suffix) + { + return KeyRange.StartsWith(this[suffix]); + } + + /// Tests whether the specified key starts with this Subspace's prefix, indicating that the Subspace logically contains key. + /// The key to be tested + /// The key Slice.Nil is not contained by any Subspace, so subspace.Contains(Slice.Nil) will always return false + public virtual bool Contains(Slice absoluteKey) + { + return absoluteKey.StartsWith(this.Key); + } + + /// Append a key to the subspace key + /// This is the equivalent of calling 'subspace.Key + suffix' + public Slice this[Slice relativeKey] + { + get + { + //note: we don't want to leak our key! + var key = GetKeyPrefix(); + if (relativeKey.IsNullOrEmpty) return key.Memoize(); //TODO: better solution! + return key.Concat(relativeKey); + } + } + + /// Remove the subspace prefix from a binary key, and only return the tail, or Slice.Nil if the key does not fit inside the namespace + /// Complete key that contains the current subspace prefix, and a binary suffix + /// If true, verify that is inside the bounds of the subspace + /// Binary suffix of the key (or Slice.Empty is the key is exactly equal to the subspace prefix). If the key is outside of the subspace, returns Slice.Nil + /// This is the inverse operation of + /// If is true and is outside the current subspace. + public virtual Slice ExtractKey(Slice absoluteKey, bool boundCheck = false) + { + if (absoluteKey.IsNull) return Slice.Nil; + + var key = GetKeyPrefix(); + if (!absoluteKey.StartsWith(key)) + { + if (boundCheck) FailKeyOutOfBound(absoluteKey); + return Slice.Nil; + } + return absoluteKey.Substring(key.Count); + } + + public SliceWriter OpenWriter(int extra = 32) + { + var key = GetKeyPrefix(); + var sw = new SliceWriter(key.Count + extra); //TODO: BufferPool ? + sw.WriteBytes(key); + return sw; + } + + #endregion + + #region IEquatable / IComparable... + + /// Compare this subspace with another subspace + public int CompareTo(IKeySubspace other) + { + if (other == null) return +1; + if (object.ReferenceEquals(this, other)) return 0; + if (other is KeySubspace sub) + return this.Key.CompareTo(sub.Key); + else + return this.Key.CompareTo(other.GetPrefix()); + } + + /// Test if both subspaces have the same prefix + public bool Equals(IKeySubspace other) + { + if (other == null) return false; + if (object.ReferenceEquals(this, other)) return true; + if (other is KeySubspace sub) + return this.Key.Equals(sub.Key); + else + return this.Key.Equals(other.GetPrefix()); + } + + /// Test if an object is a subspace with the same prefix + public override bool Equals(object obj) + { + return Equals(obj as KeySubspace); + } + + /// Compute a hashcode based on the prefix of this subspace + /// + public override int GetHashCode() + { + return this.Key.GetHashCode(); + } + + #endregion + + #region Helpers... + + /// Check that a key fits inside this subspace, and return '' or '\xFF' if it is outside the bounds + /// Key that needs to be checked + /// If true, allow keys that starts with \xFF even if this subspace is not the Empty subspace or System subspace itself. + /// The unchanged if it is contained in the namespace, Slice.Empty if it was before the subspace, or FdbKey.MaxValue if it was after. + public Slice BoundCheck(Slice key, bool allowSystemKeys) + { + //note: Since this is needed to make GetRange/GetKey work properly, this should work for all subspace, include directory partitions + var prefix = this.Key; + + // don't touch to nil and keys inside the globalspace + if (key.IsNull || key.StartsWith(prefix)) return key; + + // let the system keys pass + if (allowSystemKeys && key.Count > 0 && key[0] == 255) return key; + + // The key is outside the bounds, and must be corrected + // > return empty if we are before + // > return \xFF if we are after + if (key < prefix) + return Slice.Empty; + else + return FdbKey.System; + } + + /// Throw an exception for a key that is out of the bounds of this subspace + /// + [ContractAnnotation("=> halt")] + protected void FailKeyOutOfBound(Slice key) + { +#if DEBUG + // only in debug mode, because have the key and subspace in the exception message could leak sensitive information + string msg = $"The key {FdbKey.Dump(key)} does not belong to subspace {this}"; +#else + string msg = "The specifed key does not belong to this subspace"; +#endif + throw new ArgumentException(msg, nameof(key)); + } + + /// Return a user-friendly representation of a key from this subspace + /// Key that is contained in this subspace + /// Printable version of this key, minus the subspace prefix + [NotNull] + public virtual string DumpKey(Slice key) + { + // note: we can't use ExtractAndCheck(...) because it may throw in derived classes + var prefix = this.Key; + if (!key.StartsWith(prefix)) FailKeyOutOfBound(key); + + return FdbKey.Dump(key.Substring(prefix.Count)); + } + + /// Printable representation of this subspace + public override string ToString() + { + return "Subspace(" + this.Key.ToString() + ")"; + } + + #endregion + + } + +} diff --git a/FoundationDB.Client/Subspaces/KeySubspaceExtensions.cs b/FoundationDB.Client/Subspaces/KeySubspaceExtensions.cs new file mode 100644 index 000000000..54e5f3fcd --- /dev/null +++ b/FoundationDB.Client/Subspaces/KeySubspaceExtensions.cs @@ -0,0 +1,286 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace FoundationDB.Client +{ + using System; + using System.Collections.Generic; + using System.Threading; + using System.Threading.Tasks; + using Doxense.Collections.Tuples; + using Doxense.Diagnostics.Contracts; + using Doxense.Serialization.Encoders; + using JetBrains.Annotations; + + /// Extensions methods and helpers to work with Key Subspaces + public static class KeySubspaceExtensions + { + + #region Encodings... + + /// Return a version of this subspace, which uses a different type system to produces the keys and values + /// Instance of a generic subspace + /// If non-null, uses this specific instance of the TypeSystem. If null, uses the default instance for this particular TypeSystem + /// Subspace equivalent to , but augmented with a specific TypeSystem + [Pure, NotNull] + public static IDynamicKeySubspace AsDynamic([NotNull] this IKeySubspace subspace, IKeyEncoding encoding = null) + { + Contract.NotNull(subspace, nameof(subspace)); + return new DynamicKeySubspace(subspace.GetPrefix(), encoding ?? TuPack.Encoding); + } + + /// Return a version of this subspace, which uses a different type system to produces the keys and values + /// Instance of a generic subspace to extend + /// Encoding by the keys of this subspace. If not specified, the Tuple Encoding will be used to generate an encoder. + /// Subspace equivalent to , but augmented with a specific TypeSystem + [Pure, NotNull] + public static ITypedKeySubspace AsTyped([NotNull] this IKeySubspace subspace, [CanBeNull] IKeyEncoding encoding = null) + { + Contract.NotNull(subspace, nameof(subspace)); + return new TypedKeySubspace(subspace.GetPrefix(), (encoding ?? TuPack.Encoding).GetKeyEncoder()); + } + + /// Return a version of this subspace, which uses a different type system to produces the keys and values + /// Instance of a generic subspace to extend + /// Encoding used by the keys of this subspace. If not specified, the Tuple Encoding will be used to generate an encoder. + /// Subspace equivalent to , but augmented with a specific TypeSystem + [Pure, NotNull] + public static ITypedKeySubspace AsTyped([NotNull] this IKeySubspace subspace, [CanBeNull] IKeyEncoding encoding = null) + { + Contract.NotNull(subspace, nameof(subspace)); + return new TypedKeySubspace(subspace.GetPrefix(), (encoding ?? TuPack.Encoding).GetKeyEncoder()); + } + + /// Return a version of this subspace, which uses a different type system to produces the keys and values + /// Instance of a generic subspace to extend + /// Encoding used by the keys of this subspace. If not specified, the Tuple Encoding will be used to generate an encoder. + /// Subspace equivalent to , but augmented with a specific TypeSystem + [Pure, NotNull] + public static ITypedKeySubspace AsTyped([NotNull] this IKeySubspace subspace, [CanBeNull] IKeyEncoding encoding = null) + { + Contract.NotNull(subspace, nameof(subspace)); + return new TypedKeySubspace(subspace.GetPrefix(), (encoding ?? TuPack.Encoding).GetKeyEncoder()); + } + + /// Return a version of this subspace, which uses a different type system to produces the keys and values + /// Instance of a generic subspace + /// Encoding used by the keys of this namespace. If not specified, the Tuple Encoding will be used to generate an encoder. + /// Subspace equivalent to , but augmented with a specific TypeSystem + [Pure, NotNull] + public static ITypedKeySubspace AsTyped([NotNull] this IKeySubspace subspace, [CanBeNull] IKeyEncoding encoding = null) + { + Contract.NotNull(subspace, nameof(subspace)); + return new TypedKeySubspace(subspace.GetPrefix(), (encoding ?? TuPack.Encoding).GetKeyEncoder()); + } + + + #endregion + + #region Encoders... + + /// Return a version of this subspace, which uses a different type system to produces the keys and values + /// Instance of a generic subspace to extend + /// Custom key encoder + /// Subspace equivalent to , but augmented with a specific TypeSystem + [Pure, NotNull] + public static IDynamicKeySubspace UsingEncoder([NotNull] this IKeySubspace subspace, [NotNull] IDynamicKeyEncoder encoder) + { + Contract.NotNull(subspace, nameof(subspace)); + Contract.NotNull(encoder, nameof(encoder)); + return new DynamicKeySubspace(subspace.GetPrefix(), encoder.Encoding); + } + + /// Return a version of this subspace, which uses a different type system to produces the keys and values + /// Instance of a generic subspace to extend + /// Custom key encoder + /// Subspace equivalent to , but augmented with a specific TypeSystem + [Pure, NotNull] + public static ITypedKeySubspace UsingEncoder([NotNull] this IKeySubspace subspace, [NotNull] IKeyEncoder encoder) + { + Contract.NotNull(subspace, nameof(subspace)); + Contract.NotNull(encoder, nameof(encoder)); + return new TypedKeySubspace(subspace.GetPrefix(), encoder); + } + + /// Return a version of this subspace, which uses a different type system to produces the keys and values + /// Instance of a generic subspace to extend + /// Custom key encoder + /// Subspace equivalent to , but augmented with a specific TypeSystem + [Pure, NotNull] + public static ITypedKeySubspace UsingEncoder([NotNull] this IKeySubspace subspace, [NotNull] ICompositeKeyEncoder encoder) + { + Contract.NotNull(subspace, nameof(subspace)); + Contract.NotNull(encoder, nameof(encoder)); + return new TypedKeySubspace(subspace.GetPrefix(), encoder); + } + + /// Return a version of this subspace, which uses a different type system to produces the keys and values + /// Instance of a generic subspace to extend + /// Custom key encoder + /// Subspace equivalent to , but augmented with a specific TypeSystem + [Pure, NotNull] + public static ITypedKeySubspace UsingEncoder([NotNull] this IKeySubspace subspace, [NotNull] ICompositeKeyEncoder encoder) + { + Contract.NotNull(subspace, nameof(subspace)); + Contract.NotNull(encoder, nameof(encoder)); + return new TypedKeySubspace(subspace.GetPrefix(), encoder); + } + + /// Return a version of this subspace, which uses a different type system to produces the keys and values + /// Instance of a generic subspace + /// Encoder used to serialize the keys of this namespace. + /// Subspace equivalent to , but augmented with a specific TypeSystem + [Pure, NotNull] + public static ITypedKeySubspace UsingEncoder([NotNull] this IKeySubspace subspace, [NotNull] ICompositeKeyEncoder encoder) + { + Contract.NotNull(subspace, nameof(subspace)); + Contract.NotNull(encoder, nameof(encoder)); + return new TypedKeySubspace(subspace.GetPrefix(), encoder); + } + + #endregion + + #region Copy... + + /// Create a new copy of a subspace's prefix + [Pure] + internal static Slice StealPrefix([NotNull] IKeySubspace subspace) + { + //note: we can workaround the 'security' in top directory partition by accessing their key prefix without triggering an exception! + return subspace is KeySubspace ks + ? ks.GetPrefixUnsafe().Memoize() + : subspace.GetPrefix().Memoize(); + } + + /// Create a copy of a generic subspace, sharing the same binary prefix + [Pure, NotNull] + public static KeySubspace Copy([NotNull] this IKeySubspace subspace) + { + Contract.NotNull(subspace, nameof(subspace)); + + var prefix = StealPrefix(subspace); + + if (subspace is IDynamicKeySubspace dyn) + { // reuse the encoding of the original + return new DynamicKeySubspace(prefix, dyn.Encoding); + } + + // no encoding + return new KeySubspace(prefix); + } + + /// Create a copy of a generic subspace, sharing the same binary prefix + [Pure, NotNull] + public static DynamicKeySubspace Copy([NotNull] this IKeySubspace subspace, IKeyEncoding encoding) + { + Contract.NotNull(subspace, nameof(subspace)); + Contract.NotNull(encoding, nameof(encoding)); + return new DynamicKeySubspace(StealPrefix(subspace), encoding); + } + + /// Create a copy of a generic subspace, sharing the same binary prefix + [Pure, NotNull] + public static DynamicKeySubspace Copy([NotNull] this IKeySubspace subspace, IDynamicKeyEncoder encoder) + { + Contract.NotNull(subspace, nameof(subspace)); + Contract.NotNull(encoder, nameof(encoder)); + return new DynamicKeySubspace(StealPrefix(subspace), encoder); + } + + /// Create a copy of a dynamic subspace, sharing the same binary prefix and encoder + [Pure, NotNull] + public static DynamicKeySubspace Copy([NotNull] this IDynamicKeySubspace subspace) + { + Contract.NotNull(subspace, nameof(subspace)); + return new DynamicKeySubspace(StealPrefix(subspace), subspace.Encoding); + } + + /// Create a copy of a typed subspace, sharing the same binary prefix and encoder + [Pure, NotNull] + public static TypedKeySubspace Copy([NotNull] this ITypedKeySubspace subspace) + { + Contract.NotNull(subspace, nameof(subspace)); + return new TypedKeySubspace(StealPrefix(subspace), subspace.KeyEncoder); + } + + /// Create a copy of a typed subspace, sharing the same binary prefix and encoder + [Pure, NotNull] + public static TypedKeySubspace Copy([NotNull] this ITypedKeySubspace subspace) + { + Contract.NotNull(subspace, nameof(subspace)); + return new TypedKeySubspace(StealPrefix(subspace), subspace.KeyEncoder); + } + + /// Create a copy of a typed subspace, sharing the same binary prefix and encoder + [Pure, NotNull] + public static TypedKeySubspace Copy([NotNull] this ITypedKeySubspace subspace) + { + Contract.NotNull(subspace, nameof(subspace)); + return new TypedKeySubspace(StealPrefix(subspace), subspace.KeyEncoder); + } + + /// Create a copy of a typed subspace, sharing the same binary prefix and encoder + [Pure, NotNull] + public static TypedKeySubspace Copy([NotNull] this ITypedKeySubspace subspace) + { + Contract.NotNull(subspace, nameof(subspace)); + return new TypedKeySubspace(StealPrefix(subspace), subspace.KeyEncoder); + } + + #endregion + + /// Clear the entire content of a subspace + public static void ClearRange(this IFdbTransaction trans, [NotNull] IKeySubspace subspace) + { + Contract.Requires(trans != null && subspace != null); + + //BUGBUG: should we call subspace.ToRange() ? + trans.ClearRange(subspace.ToRange()); + } + + /// Clear the entire content of a subspace + public static Task ClearRangeAsync(this IFdbRetryable db, [NotNull] IKeySubspace subspace, CancellationToken ct) + { + Contract.NotNull(db, nameof(db)); + Contract.NotNull(subspace, nameof(subspace)); + + return db.WriteAsync((tr) => ClearRange(tr, subspace), ct); + } + + /// Returns all the keys inside of a subspace + [Pure, NotNull] + public static FdbRangeQuery> GetRangeStartsWith(this IFdbReadOnlyTransaction trans, [NotNull] IKeySubspace subspace, FdbRangeOptions options = null) + { + //REVIEW: should we remove this method? + Contract.Requires(trans != null && subspace != null); + + return trans.GetRange(subspace.ToRange(), options); + } + + } +} diff --git a/FoundationDB.Client/Subspaces/TypedKeySubspace`1.cs b/FoundationDB.Client/Subspaces/TypedKeySubspace`1.cs new file mode 100644 index 000000000..075760b9e --- /dev/null +++ b/FoundationDB.Client/Subspaces/TypedKeySubspace`1.cs @@ -0,0 +1,213 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace FoundationDB.Client +{ + using System; + using System.Diagnostics; + using System.Runtime.CompilerServices; + using Doxense.Collections.Tuples; + using Doxense.Diagnostics.Contracts; + using Doxense.Serialization.Encoders; + using JetBrains.Annotations; + + [PublicAPI] + public interface ITypedKeySubspace : IKeySubspace + { + /// Return a view of all the possible keys of this subspace + [NotNull] + TypedKeys Keys { get; } + + /// Encoding used to generate and parse the keys of this subspace + [NotNull] + IKeyEncoder KeyEncoder { get; } + + } + + /// Subspace that knows how to encode and decode its key + /// Type of the key handled by this subspace + public sealed class TypedKeySubspace : KeySubspace, ITypedKeySubspace + { + public IKeyEncoder KeyEncoder { get; } + + internal TypedKeySubspace(Slice prefix, [NotNull] IKeyEncoder encoder) + : base(prefix) + { + Contract.Requires(encoder != null); + this.KeyEncoder = encoder; + this.Keys = new TypedKeys(this, this.KeyEncoder); + } + + public TypedKeys Keys { get; } + + } + + /// Encodes and Decodes keys composed of a single element + /// Type of the key handled by this subspace + [DebuggerDisplay("{Parent.ToString(),nq)}")] + public sealed class TypedKeys + { + + [NotNull] + private readonly TypedKeySubspace Parent; + + [NotNull] + public IKeyEncoder Encoder { get; } + + internal TypedKeys( + [NotNull] TypedKeySubspace parent, + [NotNull] IKeyEncoder encoder) + { + Contract.Requires(parent != null && encoder != null); + this.Parent = parent; + this.Encoder = encoder; + } + + #region ToRange() + + /// Return the range of all legal keys in this subpsace + /// A "legal" key is one that can be decoded into the original pair of values + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public KeyRange ToRange() + { + return this.Parent.ToRange(); + } + + /// Return the range of all legal keys in this subpsace, that start with the specified value + /// Range that encompass all keys that start with (tuple.Item1, ..) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public KeyRange ToRange(STuple tuple) + { + return ToRange(tuple.Item1); + } + + /// Return the range of all legal keys in this subpsace, that start with the specified value + /// Range that encompass all keys that start with (tuple.Item1, ..) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public KeyRange ToRange(ValueTuple tuple) + { + return ToRange(tuple.Item1); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public KeyRange ToRange(T1 item1) + { + //TODO: add concept of "range" on IKeyEncoder ? + return KeyRange.PrefixedBy(Encode(item1)); + } + + #endregion + + #region Pack() + + public Slice this[ValueTuple items] + { + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + get => Encode(items.Item1); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Slice Pack(STuple tuple) + { + return Encode(tuple.Item1); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Slice Pack(ValueTuple tuple) + { + return Encode(tuple.Item1); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Slice Pack([NotNull] TTuple tuple) + where TTuple : ITuple + { + return Encode(tuple.OfSize(1).Get(0)); + } + + #endregion + + #region Encode() + + public Slice this[T1 item1] + { + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + get => Encode(item1); + } + + [Pure] + public Slice Encode(T1 item1) + { + var bytes = this.Encoder.EncodeKey(item1); + var sw = this.Parent.OpenWriter(bytes.Count); + sw.WriteBytes(bytes); + return sw.ToSlice(); + } + + #endregion + + #region Decode() + + [Pure] + public T1 Decode(Slice packedKey) + { + return this.Encoder.DecodeKey(this.Parent.ExtractKey(packedKey)); + } + + public void Decode(Slice packedKey, out T1 item1) + { + item1 = this.Encoder.DecodeKey(this.Parent.ExtractKey(packedKey)); + } + + #endregion + + #region Dump() + + /// Return a user-friendly string representation of a key of this subspace + [Pure] + public string Dump(Slice packedKey) + { + if (packedKey.IsNull) return String.Empty; + //TODO: defer to the encoding itself? + var key = this.Parent.ExtractKey(packedKey); + try + { + //REVIEW: we need a TryUnpack! + return this.Encoder.DecodeKey(key).ToString(); + } + catch (Exception) + { // decoding failed, or some other non-trival + return key.PrettyPrint(); + } + } + + #endregion + + } + +} diff --git a/FoundationDB.Client/Subspaces/TypedKeySubspace`2.cs b/FoundationDB.Client/Subspaces/TypedKeySubspace`2.cs new file mode 100644 index 000000000..85ce04d1b --- /dev/null +++ b/FoundationDB.Client/Subspaces/TypedKeySubspace`2.cs @@ -0,0 +1,272 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace FoundationDB.Client +{ + using System; + using System.Diagnostics; + using System.Runtime.CompilerServices; + using Doxense.Collections.Tuples; + using Doxense.Diagnostics.Contracts; + using Doxense.Serialization.Encoders; + using JetBrains.Annotations; + + public interface ITypedKeySubspace : IKeySubspace + { + /// Helper to encode/decode keys using this subspace's default encoding + [NotNull] + TypedKeys Keys { get; } + + /// Encoding used to generate and parse the keys of this subspace + [NotNull] + ICompositeKeyEncoder KeyEncoder { get; } + + } + + public class TypedKeySubspace : KeySubspace, ITypedKeySubspace + { + public ICompositeKeyEncoder KeyEncoder { get; } + + + internal TypedKeySubspace(Slice prefix, [NotNull] ICompositeKeyEncoder encoder) + : base(prefix) + { + Contract.Requires(encoder != null); + this.KeyEncoder = encoder; + this.Keys = new TypedKeys(this, this.KeyEncoder); + } + + public TypedKeys Keys { get; } + + } + + [DebuggerDisplay("{Parent.ToString(),nq)}")] + public sealed class TypedKeys + { + + [NotNull] + private readonly TypedKeySubspace Parent; + + [NotNull] + public ICompositeKeyEncoder Encoder { get; } + + internal TypedKeys( + [NotNull] TypedKeySubspace parent, + [NotNull] ICompositeKeyEncoder encoder) + { + Contract.Requires(parent != null && encoder != null); + this.Parent = parent; + this.Encoder = encoder; + } + + #region ToRange() + + /// Return the range of all legal keys in this subpsace + /// A "legal" key is one that can be decoded into the original pair of values + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public KeyRange ToRange() + { + return this.Parent.ToRange(); + } + + /// Return the range of all legal keys in this subpsace, that start with the specified pair of values + /// Range that encompass all keys that start with (tuple.Item1, tuple.Item2, ..) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public KeyRange ToRange(STuple tuple) + { + return ToRange(tuple.Item1, tuple.Item2); + } + + /// Return the range of all legal keys in this subpsace, that start with the specified pair of values + /// Range that encompass all keys that start with (tuple.Item1, tuple.Item2, ..) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public KeyRange ToRange((T1, T2) tuple) + { + return ToRange(tuple.Item1, tuple.Item2); + } + + /// Return the range of all legal keys in this subpsace, that start with the specified pair of values + /// Range that encompass all keys that start with (item1, item2, ..) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public KeyRange ToRange(T1 item1, T2 item2) + { + //TODO: add concept of "range" on IKeyEncoder ? + return KeyRange.PrefixedBy(Encode(item1, item2)); + } + + #endregion + + #region ToRangePartial() + + /// Return the range of all legal keys in this subpsace, that start with the specified first item + /// Range that encompass all keys that start with (tuple.Item1, ..) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public KeyRange ToRangePartial(STuple tuple) + { + return ToRangePartial(tuple.Item1); + } + + /// Return the range of all legal keys in this subpsace, that start with the specified first item + /// Range that encompass all keys that start with (item1, ..) + [Pure] + public KeyRange ToRangePartial(T1 item1) + { + return KeyRange.PrefixedBy(EncodePartial(item1)); + } + + #endregion + + #region Pack() + + /// Pack a 2-tuple into a key in this subspace + /// Pair of values + /// Encoded key in this subspace + [Pure] + public Slice Pack(STuple tuple) + { + return Pack(tuple.ToValueTuple()); + } + + /// Pack a 2-tuple into a key in this subspace + /// Pair of values + /// Encoded key in this subspace + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Slice Pack((T1, T2) tuple) + { + //REVIEW: how could we better guess the capacity, depending on the values of T1/T2? + var sw = this.Parent.OpenWriter(24); + this.Encoder.WriteKeyPartsTo(ref sw, 2, ref tuple); + return sw.ToSlice(); + } + + /// Pack a 2-tuple into a key in this subspace + /// Tuple that must be of size 2 + /// Encoded key in this subspace + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Slice Pack([NotNull] TTuple tuple) + where TTuple : ITuple + { + tuple.OfSize(2); + return Encode(tuple.Get(0), tuple.Get(1)); + } + + #endregion + + #region Encode() + + public Slice this[T1 item1, T2 item2] + { + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + get => Encode(item1, item2); + } + + public Slice this[(T1, T2) items] + { + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + get => Encode(items.Item1, items.Item2); + } + + /// Encode a pair of values into a key in this subspace + /// First part of the key + /// Second part of the key + /// Encoded key in this subspace + /// The key can be decoded back into its original components using or + [Pure] + public Slice Encode(T1 item1, T2 item2) + { + var sw = this.Parent.OpenWriter(24); + var tuple = (item1, item2); + this.Encoder.WriteKeyPartsTo(ref sw, 2, ref tuple); + return sw.ToSlice(); + } + + #endregion + + #region EncodePartial() + + [Pure] + public Slice EncodePartial(T1 item1) + { + var sw = this.Parent.OpenWriter(16); + var tuple = (item1, default(T2)); + this.Encoder.WriteKeyPartsTo(ref sw, 1, ref tuple); + return sw.ToSlice(); + } + + #endregion + + #region Decode() + + [Pure] + //REVIEW: => Unpack()? + //REVIEW: return ValueTuple<..> instead? (C#7) + public STuple Decode(Slice packedKey) + { + return this.Encoder.DecodeKey(this.Parent.ExtractKey(packedKey)); + } + + public void Decode(Slice packedKey, out T1 item1, out T2 item2) + { + this.Encoder + .DecodeKey(this.Parent.ExtractKey(packedKey)) + .Deconstruct(out item1, out item2); + } + + #endregion + + #region DecodePartial() + + [Pure] + public T1 DecodePartial(Slice packedKey) + { + return this.Encoder.DecodeKeyParts(1, packedKey).Item1; + } + + #endregion + + /// Return a user-friendly string representation of a key of this subspace + [Pure] + public string Dump(Slice packedKey) + { + if (packedKey.IsNull) return String.Empty; + //TODO: defer to the encoding itself? + var key = this.Parent.ExtractKey(packedKey); + try + { + //REVIEW: we need a TryUnpack! + return this.Encoder.DecodeKey(key).ToString(); + } + catch (Exception) + { // decoding failed, or some other non-trival + return key.PrettyPrint(); + } + } + + } + +} diff --git a/FoundationDB.Client/Subspaces/TypedKeySubspace`3.cs b/FoundationDB.Client/Subspaces/TypedKeySubspace`3.cs new file mode 100644 index 000000000..91593a3df --- /dev/null +++ b/FoundationDB.Client/Subspaces/TypedKeySubspace`3.cs @@ -0,0 +1,268 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace FoundationDB.Client +{ + using System; + using System.Diagnostics; + using System.Runtime.CompilerServices; + using Doxense.Collections.Tuples; + using Doxense.Diagnostics.Contracts; + using Doxense.Serialization.Encoders; + using JetBrains.Annotations; + + public interface ITypedKeySubspace : IKeySubspace + { + /// Helper to encode/decode keys using this subspace's default encoding + [NotNull] + TypedKeys Keys { get; } + + /// Encoding used to generate and parse the keys of this subspace + [NotNull] + ICompositeKeyEncoder KeyEncoder { get; } + + } + + public sealed class TypedKeySubspace : KeySubspace, ITypedKeySubspace + { + public ICompositeKeyEncoder KeyEncoder { get; } + + internal TypedKeySubspace(Slice prefix, [NotNull] ICompositeKeyEncoder encoder) + : base(prefix) + { + Contract.Requires(encoder != null); + this.KeyEncoder = encoder; + this.Keys = new TypedKeys(this, this.KeyEncoder); + } + + public TypedKeys Keys { get; } + + } + + [DebuggerDisplay("{Parent.ToString(),nq)}")] + public sealed class TypedKeys + { + + [NotNull] + private readonly TypedKeySubspace Parent; + + [NotNull] + public ICompositeKeyEncoder Encoder { get; } + + internal TypedKeys( + [NotNull] TypedKeySubspace parent, + [NotNull] ICompositeKeyEncoder encoder) + { + Contract.Requires(parent != null && encoder != null); + this.Parent = parent; + this.Encoder = encoder; + } + + #region ToRange() + + /// Return the range of all legal keys in this subpsace + /// A "legal" key is one that can be decoded into the original triple of values + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public KeyRange ToRange() + { + return this.Parent.ToRange(); + } + + /// Return the range of all legal keys in this subpsace, that start with the specified triple of values + /// Range that encompass all keys that start with (tuple.Item1, tuple.Item2, tuple.Item3) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public KeyRange ToRange(STuple tuple) + { + return ToRange(tuple.Item1, tuple.Item2, tuple.Item3); + } + + /// Return the range of all legal keys in this subpsace, that start with the specified triple of values + /// Range that encompass all keys that start with (tuple.Item1, tuple.Item2, tuple.Item3) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public KeyRange ToRange((T1, T2, T3) tuple) + { + return ToRange(tuple.Item1, tuple.Item2, tuple.Item3); + } + + /// Return the range of all legal keys in this subpsace, that start with the specified triple of values + /// Range that encompass all keys that start with (item1, item2, item3) + public KeyRange ToRange(T1 item1, T2 item2, T3 item3) + { + //HACKHACK: add concept of "range" on IKeyEncoder ? + return KeyRange.PrefixedBy(Encode(item1, item2, item3)); + } + + #endregion + + #region ToRangePartial() + + /// Return the range of all legal keys in this subpsace, that start with the specified triple of values + /// Range that encompass all keys that start with (item1, item2, item3) + public KeyRange ToRangePartial(STuple tuple) + { + //HACKHACK: add concept of "range" on IKeyEncoder ? + return KeyRange.PrefixedBy(EncodePartial(tuple.Item1, tuple.Item2)); + } + + /// Return the range of all legal keys in this subpsace, that start with the specified triple of values + /// Range that encompass all keys that start with (item1, item2, item3) + public KeyRange ToRangePartial((T1, T2) tuple) + { + //HACKHACK: add concept of "range" on IKeyEncoder ? + return KeyRange.PrefixedBy(EncodePartial(tuple.Item1, tuple.Item2)); + } + + /// Return the range of all legal keys in this subpsace, that start with the specified triple of values + /// Range that encompass all keys that start with (item1, item2, item3) + public KeyRange ToRangePartial(T1 item1, T2 item2) + { + //HACKHACK: add concept of "range" on IKeyEncoder ? + return KeyRange.PrefixedBy(EncodePartial(item1, item2)); + } + + /// Return the range of all legal keys in this subpsace, that start with the specified triple of values + /// Range that encompass all keys that start with (item1, item2, item3) + public KeyRange ToRangePartial(T1 item1) + { + //HACKHACK: add concept of "range" on IKeyEncoder ? + return KeyRange.PrefixedBy(EncodePartial(item1)); + } + + #endregion + + #region Pack() + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Slice Pack(STuple tuple) + { + return Encode(tuple.Item1, tuple.Item2, tuple.Item3); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Slice Pack((T1, T2, T3) tuple) + { + return Encode(tuple.Item1, tuple.Item2, tuple.Item3); + } + + [Pure] + public Slice Pack(TTuple tuple) + where TTuple : ITuple + { + tuple.OfSize(3); + return Encode(tuple.Get(0), tuple.Get(1), tuple.Get(2)); + } + + #endregion + + #region Encode() + + public Slice this[T1 item1, T2 item2, T3 item3] + { + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + get => Encode(item1, item2, item3); + } + + public Slice this[(T1, T2, T3) items] + { + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + get => Encode(items.Item1, items.Item2, items.Item3); + } + + [Pure] + public Slice Encode(T1 item1, T2 item2, T3 item3) + { + var bytes = this.Encoder.EncodeKey(item1, item2, item3); + var sw = this.Parent.OpenWriter(bytes.Count); + sw.WriteBytes(bytes); + return sw.ToSlice(); + } + + [Pure] + public Slice EncodePartial(T1 item1, T2 item2) + { + var sw = this.Parent.OpenWriter(16); + var tuple = (item1, item2, default(T3)); + this.Encoder.WriteKeyPartsTo(ref sw, 2, ref tuple); + return sw.ToSlice(); + } + + [Pure] + public Slice EncodePartial(T1 item1) + { + var sw = this.Parent.OpenWriter(16); + var tuple = (item1, default(T2), default(T3)); + this.Encoder.WriteKeyPartsTo(ref sw, 1, ref tuple); + return sw.ToSlice(); + } + + #endregion + + #region Decode() + + [Pure] + //REVIEW: => Unpack()? + //REVIEW: return ValueTuple<..> instead? (C#7) + public STuple Decode(Slice packedKey) + { + return this.Encoder.DecodeKey(this.Parent.ExtractKey(packedKey)); + } + + public void Decode(Slice packedKey, out T1 item1, out T2 item2, out T3 item3) + { + this.Encoder + .DecodeKey(this.Parent.ExtractKey(packedKey)) + .Deconstruct(out item1, out item2, out item3); + } + + #endregion + + #region Dump() + + /// Return a user-friendly string representation of a key of this subspace + [Pure] + public string Dump(Slice packedKey) + { + if (packedKey.IsNull) return String.Empty; + //TODO: defer to the encoding itself? + var key = this.Parent.ExtractKey(packedKey); + try + { + //REVIEW: we need a TryUnpack! + return this.Encoder.DecodeKey(key).ToString(); + } + catch (Exception) + { // decoding failed, or some other non-trival + return key.PrettyPrint(); + } + } + + #endregion + + } + +} diff --git a/FoundationDB.Client/Subspaces/TypedKeySubspace`4.cs b/FoundationDB.Client/Subspaces/TypedKeySubspace`4.cs new file mode 100644 index 000000000..16b870e4a --- /dev/null +++ b/FoundationDB.Client/Subspaces/TypedKeySubspace`4.cs @@ -0,0 +1,289 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace FoundationDB.Client +{ + using System; + using System.Diagnostics; + using System.Runtime.CompilerServices; + using Doxense.Collections.Tuples; + using Doxense.Diagnostics.Contracts; + using Doxense.Serialization.Encoders; + using JetBrains.Annotations; + + public interface ITypedKeySubspace : IKeySubspace + { + /// Helper to encode/decode keys using this subspace's default encoding + [NotNull] + TypedKeys Keys { get; } + + /// Encoding used to generate and parse the keys of this subspace + [NotNull] + ICompositeKeyEncoder KeyEncoder { get; } + + } + + public sealed class TypedKeySubspace : KeySubspace, ITypedKeySubspace + { + public ICompositeKeyEncoder KeyEncoder { get; } + + internal TypedKeySubspace(Slice prefix, [NotNull] ICompositeKeyEncoder encoder) + : base(prefix) + { + this.KeyEncoder = encoder; + this.Keys = new TypedKeys(this, this.KeyEncoder); + } + + public TypedKeys Keys { get; } + + } + + [DebuggerDisplay("{Parent.ToString(),nq)}")] + public sealed class TypedKeys + { + + [NotNull] + private readonly TypedKeySubspace Parent; + + [NotNull] + public ICompositeKeyEncoder Encoder { get; } + + internal TypedKeys( + [NotNull] TypedKeySubspace parent, + [NotNull] ICompositeKeyEncoder encoder) + { + Contract.Requires(parent != null && encoder != null); + this.Parent = parent; + this.Encoder = encoder; + } + + #region ToRange() + + /// Return the range of all legal keys in this subpsace + /// A "legal" key is one that can be decoded into the original triple of values + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public KeyRange ToRange() + { + return this.Parent.ToRange(); + } + + /// Return the range of all legal keys in this subpsace, that start with the specified triple of values + /// Range that encompass all keys that start with (tuple.Item1, tuple.Item2, tuple.Item3) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public KeyRange ToRange(STuple tuple) + { + return ToRange(tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4); + } + + /// Return the range of all legal keys in this subpsace, that start with the specified triple of values + /// Range that encompass all keys that start with (tuple.Item1, tuple.Item2, tuple.Item3) + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public KeyRange ToRange((T1, T2, T3, T4) tuple) + { + return ToRange(tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4); + } + + /// Return the range of all legal keys in this subpsace, that start with the specified triple of values + /// Range that encompass all keys that start with (item1, item2, item3) + public KeyRange ToRange(T1 item1, T2 item2, T3 item3, T4 item4) + { + //HACKHACK: add concept of "range" on IKeyEncoder ? + return KeyRange.PrefixedBy(Encode(item1, item2, item3, item4)); + } + + #endregion + + #region ToRangePartial() + + /// Return the range of all legal keys in this subpsace, that start with the specified triple of values + /// Range that encompass all keys that start with (item1, item2, item3) + public KeyRange ToRangePartial(STuple tuple) + { + //HACKHACK: add concept of "range" on IKeyEncoder ? + return KeyRange.PrefixedBy(EncodePartial(tuple.Item1, tuple.Item2, tuple.Item3)); + } + + /// Return the range of all legal keys in this subpsace, that start with the specified triple of values + /// Range that encompass all keys that start with (item1, item2, item3) + public KeyRange ToRangePartial((T1, T2, T3) tuple) + { + //HACKHACK: add concept of "range" on IKeyEncoder ? + return KeyRange.PrefixedBy(EncodePartial(tuple.Item1, tuple.Item2, tuple.Item3)); + } + + /// Return the range of all legal keys in this subpsace, that start with the specified triple of values + /// Range that encompass all keys that start with (item1, item2, item3) + public KeyRange ToRangePartial(T1 item1, T2 item2, T3 item3) + { + //HACKHACK: add concept of "range" on IKeyEncoder ? + return KeyRange.PrefixedBy(EncodePartial(item1, item2, item3)); + } + + /// Return the range of all legal keys in this subpsace, that start with the specified pair of values + /// Range that encompass all keys that start with (item1, item2) + public KeyRange ToRangePartial(T1 item1, T2 item2) + { + //HACKHACK: add concept of "range" on IKeyEncoder ? + return KeyRange.PrefixedBy(EncodePartial(item1, item2)); + } + + /// Return the range of all legal keys in this subpsace, that start with the specified triple of values + /// Range that encompass all keys that start with (item1, item2, item3) + public KeyRange ToRangePartial(T1 item1) + { + //HACKHACK: add concept of "range" on IKeyEncoder ? + return KeyRange.PrefixedBy(EncodePartial(item1)); + } + + #endregion + + #region Pack() + + public Slice this[T1 item1, T2 item2, T3 item3, T4 item4] + { + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + get => Encode(item1, item2, item3, item4); + } + + public Slice this[(T1, T2, T3, T4) items] + { + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + get => Encode(items.Item1, items.Item2, items.Item3, items.Item4); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Slice Pack(STuple tuple) + { + return Encode(tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public Slice Pack((T1, T2, T3, T4) tuple) + { + return Encode(tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4); + } + + [Pure] + public Slice Pack(TTuple tuple) + where TTuple : ITuple + { + tuple.OfSize(4); + return Encode(tuple.Get(0), tuple.Get(1), tuple.Get(2), tuple.Get(3)); + } + + #endregion + + #region Encode() + + [Pure] + public Slice Encode(T1 item1, T2 item2, T3 item3, T4 item4) + { + var bytes = this.Encoder.EncodeKey(item1, item2, item3, item4); + var sw = this.Parent.OpenWriter(bytes.Count); + sw.WriteBytes(bytes); + return sw.ToSlice(); + } + + #endregion + + + #region EncodePartial() + + [Pure] + public Slice EncodePartial(T1 item1, T2 item2, T3 item3) + { + var sw = this.Parent.OpenWriter(24); + var tuple = (item1, item2, item3, default(T4)); + this.Encoder.WriteKeyPartsTo(ref sw, 3, ref tuple); + return sw.ToSlice(); + } + + [Pure] + public Slice EncodePartial(T1 item1, T2 item2) + { + var sw = this.Parent.OpenWriter(16); + var tuple = (item1, item2, default(T3), default(T4)); + this.Encoder.WriteKeyPartsTo(ref sw, 1, ref tuple); + return sw.ToSlice(); + } + + [Pure] + public Slice EncodePartial(T1 item1) + { + var sw = this.Parent.OpenWriter(16); + var tuple = (item1, default(T2), default(T3), default(T4)); + this.Encoder.WriteKeyPartsTo(ref sw, 1, ref tuple); + return sw.ToSlice(); + } + + #endregion + + #region Decode() + + [Pure] + //REVIEW: => Unpack()? + //REVIEW: return ValueTuple<..> instead? (C#7) + public STuple Decode(Slice packedKey) + { + return this.Encoder.DecodeKey(this.Parent.ExtractKey(packedKey)); + } + + public void Decode(Slice packedKey, out T1 item1, out T2 item2, out T3 item3, out T4 item4) + { + this.Encoder + .DecodeKey(this.Parent.ExtractKey(packedKey)) + .Deconstruct(out item1, out item2, out item3, out item4); + } + + #endregion + + #region Dump() + + /// Return a user-friendly string representation of a key of this subspace + [Pure] + public string Dump(Slice packedKey) + { + if (packedKey.IsNull) return String.Empty; + //TODO: defer to the encoding itself? + var key = this.Parent.ExtractKey(packedKey); + try + { + //REVIEW: we need a TryUnpack! + return this.Encoder.DecodeKey(key).ToString(); + } + catch (Exception) + { // decoding failed, or some other non-trival + return key.PrettyPrint(); + } + } + + #endregion + + } + +} diff --git a/FoundationDB.Client/Tuples/Encoding/ITupleSerializable.cs b/FoundationDB.Client/Tuples/Encoding/ITupleSerializable.cs new file mode 100644 index 000000000..6e6d28d1f --- /dev/null +++ b/FoundationDB.Client/Tuples/Encoding/ITupleSerializable.cs @@ -0,0 +1,65 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Collections.Tuples.Encoding +{ + using System; + using JetBrains.Annotations; + + /// Represents an object that can serialize itself using the Tuple Binary Encoding format + public interface ITupleSerializable //REVIEW: ITuplePackable? + { + /// Appends the packed bytes of this instance to the end of a buffer + /// Buffer that will received the packed bytes of this instance + void PackTo(ref TupleWriter writer); + + //note: there is not UnpackFrom, because it does not play way with constructors and readonly fields! + // => use ITupleSerializer for this! + } + + /// Represents an object that can serialize or deserialize tuples of type , using the Tuple Binary Encoding format + /// Type of tuples that can be processed by this instance + public interface ITupleSerializer //REVIEW: ITuplePacker ? + where TTuple : ITuple + { + /// Appends the packed bytes of an item to the end of a buffer + /// Buffer that will received the packed bytes of this instance + /// Tuple that will be packed + void PackTo(ref TupleWriter writer, in TTuple tuple); + + /// Decode the packed bytes from a buffer, and return the corresponding item + /// Buffer that contains the bytes the decode + /// Receives the decoded tuple + /// + /// The value of will be updated to point to either the end of the buffer, or the next "element" if there are more bytes available. + /// + [Pure] + void UnpackFrom(ref TupleReader reader, out TTuple tuple); + + } +} diff --git a/FoundationDB.Client/Layers/Tuples/FdbPrefixedTuple.cs b/FoundationDB.Client/Tuples/Encoding/PrefixedTuple.cs similarity index 70% rename from FoundationDB.Client/Layers/Tuples/FdbPrefixedTuple.cs rename to FoundationDB.Client/Tuples/Encoding/PrefixedTuple.cs index 2e007c03f..cc424df50 100644 --- a/FoundationDB.Client/Layers/Tuples/FdbPrefixedTuple.cs +++ b/FoundationDB.Client/Tuples/Encoding/PrefixedTuple.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,28 +26,27 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples.Encoding { - using FoundationDB.Client; - using FoundationDB.Client.Converters; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; + using Doxense.Diagnostics.Contracts; + using Doxense.Runtime.Converters; + using JetBrains.Annotations; /// Tuple that has a fixed abitrary binary prefix - [DebuggerDisplay("{ToString()}")] - public sealed class FdbPrefixedTuple : IFdbTuple + [DebuggerDisplay("{ToString(),nq}")] + public sealed class PrefixedTuple : ITuple, ITupleSerializable { // Used in scenario where we will append keys to a common base tuple // note: linked list are not very efficient, but we do not expect a very long chain, and the head will usually be a subspace or memoized tuple - private Slice m_prefix; //PERF: readonly struct - private readonly IFdbTuple m_items; + private readonly Slice m_prefix; + private readonly ITuple m_items; - internal FdbPrefixedTuple(Slice prefix, IFdbTuple items) + public PrefixedTuple(Slice prefix, ITuple items) { Contract.Requires(!prefix.IsNull && items != null); @@ -56,15 +55,16 @@ internal FdbPrefixedTuple(Slice prefix, IFdbTuple items) } /// Binary prefix to all the keys produced by this tuple - public Slice Prefix + public Slice Prefix => m_prefix; + + void ITupleSerializable.PackTo(ref TupleWriter writer) { - get { return m_prefix; } + PackTo(ref writer); } - - public void PackTo(ref TupleWriter writer) + internal void PackTo(ref TupleWriter writer) { writer.Output.WriteBytes(m_prefix); - m_items.PackTo(ref writer); + TupleEncoder.WriteTo(ref writer, m_items); } public Slice ToSlice() @@ -74,62 +74,48 @@ public Slice ToSlice() return writer.Output.ToSlice(); } - Slice IFdbKey.ToFoundationDbKey() - { - return this.ToSlice(); - } + public int Count => m_items.Count; - public int Count - { - get { return m_items.Count; } - } + public object this[int index] => m_items[index]; - public object this[int index] - { - get { return m_items[index]; } - } + public ITuple this[int? fromIncluded, int? toExcluded] => m_items[fromIncluded, toExcluded]; - public IFdbTuple this[int? fromIncluded, int? toExcluded] + public T Get(int index) { - get { return m_items[fromIncluded, toExcluded]; } + return m_items.Get(index); } - public R Get(int index) + public T Last() { - return m_items.Get(index); + return m_items.Last(); } - public R Last() + ITuple ITuple.Append(T value) { - return m_items.Last(); + return Append(value); } - IFdbTuple IFdbTuple.Append(R value) + ITuple ITuple.Concat(ITuple tuple) { - return this.Append(value); - } - - IFdbTuple IFdbTuple.Concat(IFdbTuple tuple) - { - return this.Concat(tuple); + return Concat(tuple); } [NotNull] - public FdbPrefixedTuple Append(R value) + public PrefixedTuple Append(T value) { - return new FdbPrefixedTuple(m_prefix, m_items.Append(value)); + return new PrefixedTuple(m_prefix, m_items.Append(value)); } - [NotNull] - public FdbPrefixedTuple Concat([NotNull] IFdbTuple tuple) + [Pure, NotNull] + public PrefixedTuple Concat([NotNull] ITuple tuple) { - if (tuple == null) throw new ArgumentNullException("tuple"); + Contract.NotNull(tuple, nameof(tuple)); if (tuple.Count == 0) return this; - return new FdbPrefixedTuple(m_prefix, m_items.Concat(tuple)); + return new PrefixedTuple(m_prefix, m_items.Concat(tuple)); } - public void CopyTo([NotNull] object[] array, int offset) + public void CopyTo(object[] array, int offset) { m_items.CopyTo(array, offset); } @@ -148,7 +134,7 @@ public override string ToString() { //TODO: should we add the prefix to the string representation ? // => something like "(123, 'abc', true)" - return FdbTuple.ToString(this); + return STuple.Formatter.ToString(this); } public override bool Equals(object obj) @@ -156,7 +142,7 @@ public override bool Equals(object obj) return obj != null && ((IStructuralEquatable)this).Equals(obj, SimilarValueComparer.Default); } - public bool Equals(IFdbTuple other) + public bool Equals(ITuple other) { return !object.ReferenceEquals(other, null) && ((IStructuralEquatable)this).Equals(other, SimilarValueComparer.Default); } @@ -171,7 +157,7 @@ bool System.Collections.IStructuralEquatable.Equals(object other, System.Collect if (object.ReferenceEquals(this, other)) return true; if (other == null) return false; - var linked = other as FdbPrefixedTuple; + var linked = other as PrefixedTuple; if (!object.ReferenceEquals(linked, null)) { // Should all of these tuples be considered equal ? @@ -193,12 +179,12 @@ bool System.Collections.IStructuralEquatable.Equals(object other, System.Collect return comparer.Equals(m_items, linked.m_items); } - return FdbTuple.Equals(this, other, comparer); + return TupleHelpers.Equals(this, other, comparer); } int IStructuralEquatable.GetHashCode(System.Collections.IEqualityComparer comparer) { - return FdbTuple.CombineHashCodes( + return HashCodes.Combine( m_prefix.GetHashCode(), comparer.GetHashCode(m_items) ); diff --git a/FoundationDB.Client/Layers/Tuples/FdbSlicedTuple.cs b/FoundationDB.Client/Tuples/Encoding/SlicedTuple.cs similarity index 68% rename from FoundationDB.Client/Layers/Tuples/FdbSlicedTuple.cs rename to FoundationDB.Client/Tuples/Encoding/SlicedTuple.cs index 871fc8354..eaa48554b 100644 --- a/FoundationDB.Client/Layers/Tuples/FdbSlicedTuple.cs +++ b/FoundationDB.Client/Tuples/Encoding/SlicedTuple.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,19 +26,19 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples.Encoding { - using FoundationDB.Client; - using FoundationDB.Client.Converters; - using FoundationDB.Client.Utils; using System; using System.Collections; using System.Collections.Generic; + using Doxense.Collections.Tuples; + using Doxense.Diagnostics.Contracts; + using Doxense.Runtime.Converters; /// Lazily-evaluated tuple that was unpacked from a key - internal sealed class FdbSlicedTuple : IFdbTuple + public sealed class SlicedTuple : ITuple, ITupleSerializable { - // FdbTuple.Unpack() splits a key into an array of slices (one for each item). We hold onto these slices, and only deserialize them if needed. + // STuple.Unpack() splits a key into an array of slices (one for each item). We hold onto these slices, and only deserialize them if needed. // This is helpful because in most cases, the app code will only want to get the last few items (e.g: tuple[-1]) or skip the first few items (some subspace). // We also support offset/count so that Splicing is efficient (used a lot to remove the suffixes from keys) @@ -51,7 +51,7 @@ internal sealed class FdbSlicedTuple : IFdbTuple private int? m_hashCode; - public FdbSlicedTuple(Slice[] slices, int offset, int count) + public SlicedTuple(Slice[] slices, int offset, int count) { Contract.Requires(slices != null && offset >= 0 && count >= 0); Contract.Requires(offset + count <= slices.Length); @@ -61,75 +61,61 @@ public FdbSlicedTuple(Slice[] slices, int offset, int count) m_count = count; } - public void PackTo(ref TupleWriter writer) + void ITupleSerializable.PackTo(ref TupleWriter writer) { - var slices = m_slices; - for (int n = m_count, p = m_offset; n > 0; n--) - { - writer.Output.WriteBytes(slices[p++]); - } - } - - public Slice ToSlice() - { - // merge all the slices making up this segment - //TODO: should we get the sum of all slices to pre-allocated the buffer ? - var writer = new TupleWriter(); PackTo(ref writer); - return writer.Output.ToSlice(); } - - Slice IFdbKey.ToFoundationDbKey() + internal void PackTo(ref TupleWriter writer) { - return this.ToSlice(); + var slices = m_slices; + int offset = m_offset; + int count = m_count; + for (int i = 0; i < count; i++) + { + writer.Output.WriteBytes(slices[i + offset]); + } } - public int Count - { - get { return m_count; } - } + public int Count => m_count; - public object this[int index] - { - get { return FdbTuplePackers.DeserializeBoxed(GetSlice(index)); } - } + public object this[int index] => TuplePackers.DeserializeBoxed(GetSlice(index)); - public IFdbTuple this[int? fromIncluded, int? toExcluded] + public ITuple this[int? fromIncluded, int? toExcluded] { get { - int begin = fromIncluded.HasValue ? FdbTuple.MapIndexBounded(fromIncluded.Value, m_count) : 0; - int end = toExcluded.HasValue ? FdbTuple.MapIndexBounded(toExcluded.Value, m_count) : m_count; + int begin = fromIncluded.HasValue ? TupleHelpers.MapIndexBounded(fromIncluded.Value, m_count) : 0; + int end = toExcluded.HasValue ? TupleHelpers.MapIndexBounded(toExcluded.Value, m_count) : m_count; int len = end - begin; - if (len <= 0) return FdbTuple.Empty; + if (len <= 0) return STuple.Empty; if (begin == 0 && len == m_count) return this; - return new FdbSlicedTuple(m_slices, m_offset + begin, len); + return new SlicedTuple(m_slices, m_offset + begin, len); } } - public R Get(int index) + public T Get(int index) { - return FdbTuplePacker.Deserialize(GetSlice(index)); + return TuplePacker.Deserialize(GetSlice(index)); } - public R Last() + public T Last() { if (m_count == 0) throw new InvalidOperationException("Tuple is empty"); - return FdbTuplePacker.Deserialize(m_slices[m_offset + m_count - 1]); + return TuplePacker.Deserialize(m_slices[m_offset + m_count - 1]); } public Slice GetSlice(int index) { - return m_slices[m_offset + FdbTuple.MapIndex(index, m_count)]; + return m_slices[m_offset + TupleHelpers.MapIndex(index, m_count)]; } - IFdbTuple IFdbTuple.Append(T value) + ITuple ITuple.Append(T value) { throw new NotSupportedException(); } - IFdbTuple IFdbTuple.Concat(IFdbTuple tuple) + ITuple ITuple.Concat(ITuple tuple) { throw new NotSupportedException(); } @@ -138,7 +124,7 @@ public void CopyTo(object[] array, int offset) { for (int i = 0; i < m_count;i++) { - array[i + offset] = FdbTuplePackers.DeserializeBoxed(m_slices[i + m_offset]); + array[i + offset] = TuplePackers.DeserializeBoxed(m_slices[i + m_offset]); } } @@ -146,7 +132,7 @@ public IEnumerator GetEnumerator() { for (int i = 0; i < m_count; i++) { - yield return FdbTuplePackers.DeserializeBoxed(m_slices[i + m_offset]); + yield return TuplePackers.DeserializeBoxed(m_slices[i + m_offset]); } } @@ -159,7 +145,7 @@ public override string ToString() { //OPTIMIZE: this could be optimized, because it may be called a lot when logging is enabled on keys parsed from range reads // => each slice has a type prefix that could be used to format it to a StringBuilder faster, maybe? - return FdbTuple.ToString(this); + return STuple.Formatter.ToString(this); } public override bool Equals(object obj) @@ -167,7 +153,7 @@ public override bool Equals(object obj) return obj != null && ((IStructuralEquatable)this).Equals(obj, SimilarValueComparer.Default); } - public bool Equals(IFdbTuple other) + public bool Equals(ITuple other) { return !object.ReferenceEquals(other, null) && ((IStructuralEquatable)this).Equals(other, SimilarValueComparer.Default); } @@ -182,7 +168,7 @@ bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer) if (object.ReferenceEquals(this, other)) return true; if (other == null) return false; - var sliced = other as FdbSlicedTuple; + var sliced = other as SlicedTuple; if (!object.ReferenceEquals(sliced, null)) { if (sliced.m_count != m_count) return false; @@ -195,7 +181,7 @@ bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer) return false; } - return FdbTuple.Equals(this, other, comparer); + return TupleHelpers.Equals(this, other, comparer); } int IStructuralEquatable.GetHashCode(IEqualityComparer comparer) @@ -210,7 +196,7 @@ int IStructuralEquatable.GetHashCode(IEqualityComparer comparer) int h = 0; for (int i = 0; i < m_count; i++) { - h = FdbTuple.CombineHashCodes(h, comparer.GetHashCode(m_slices[i + m_offset])); + h = HashCodes.Combine(h, comparer.GetHashCode(m_slices[i + m_offset])); } if (canUseCache) m_hashCode = h; return h; diff --git a/FoundationDB.Client/Layers/Tuples/FdbTupleCodec`1.cs b/FoundationDB.Client/Tuples/Encoding/TupleCodec`1.cs similarity index 81% rename from FoundationDB.Client/Layers/Tuples/FdbTupleCodec`1.cs rename to FoundationDB.Client/Tuples/Encoding/TupleCodec`1.cs index f9bf577b1..af34a14ec 100644 --- a/FoundationDB.Client/Layers/Tuples/FdbTupleCodec`1.cs +++ b/FoundationDB.Client/Tuples/Encoding/TupleCodec`1.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,56 +26,56 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples.Encoding { - using FoundationDB.Client; - using JetBrains.Annotations; using System; + using Doxense.Memory; + using Doxense.Serialization.Encoders; + using JetBrains.Annotations; /// Type codec that uses the Tuple Encoding format /// Type of the values encoded by this codec - public sealed class FdbTupleCodec : FdbTypeCodec, IValueEncoder + public sealed class TupleCodec : TypeCodec, IValueEncoder { - private static volatile FdbTupleCodec s_defaultSerializer; + private static volatile TupleCodec s_defaultSerializer; - public static FdbTupleCodec Default + public static TupleCodec Default { [NotNull] - get { return s_defaultSerializer ?? (s_defaultSerializer = new FdbTupleCodec(default(T))); } + get { return s_defaultSerializer ?? (s_defaultSerializer = new TupleCodec(default(T))); } } private readonly T m_missingValue; - public FdbTupleCodec(T missingValue) + public TupleCodec(T missingValue) { m_missingValue = missingValue; } public override Slice EncodeOrdered(T value) { - return FdbTuple.EncodeKey(value); + return TupleEncoder.EncodeKey(default(Slice), value); } public override void EncodeOrderedSelfTerm(ref SliceWriter output, T value) { //HACKHACK: we lose the current depth! var writer = new TupleWriter(output); - FdbTuplePacker.Encoder(ref writer, value); + TuplePackers.SerializeTo(ref writer, value); output = writer.Output; } public override T DecodeOrdered(Slice input) { - return FdbTuple.DecodeKey(input); + return TuPack.DecodeKey(input); } public override T DecodeOrderedSelfTerm(ref SliceReader input) { //HACKHACK: we lose the current depth! var reader = new TupleReader(input); - T value; - bool res = FdbTuple.DecodeNext(ref reader, out value); + bool res = TuPack.DecodeNext(ref reader, out T value); input = reader.Input; return res ? value : m_missingValue; } diff --git a/FoundationDB.Client/Tuples/Encoding/TupleEncoder.cs b/FoundationDB.Client/Tuples/Encoding/TupleEncoder.cs new file mode 100644 index 000000000..ce40f4bfa --- /dev/null +++ b/FoundationDB.Client/Tuples/Encoding/TupleEncoder.cs @@ -0,0 +1,1199 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Collections.Tuples.Encoding +{ + using System; + using System.Collections.Generic; + using Doxense.Collections.Tuples; + using Doxense.Diagnostics.Contracts; + using Doxense.Memory; + using Doxense.Serialization.Encoders; + using JetBrains.Annotations; + + /// Helper class to encode and decode tuples to and from binary buffers + /// This class is intended for implementors of tuples, and should not be called directly by application code! + public static class TupleEncoder + { + + /// Internal helper that serializes the content of a Tuple into a TupleWriter, meant to be called by implementers of types. + /// Warning: This method will call into if inmplements + + internal static void WriteTo(ref TupleWriter writer, [NotNull] TTuple tuple) + where TTuple : ITuple + { + // ReSharper disable once SuspiciousTypeConversion.Global + if (tuple is ITupleSerializable ts) + { // optimized version + ts.PackTo(ref writer); + return; + } + + int n = tuple.Count; + // small tuples probably are faster with indexers + //REVIEW: when should we use indexers, and when should we use foreach? + if (n <= 4) + { + for (int i = 0; i < n; i++) + { + TuplePackers.SerializeObjectTo(ref writer, tuple[i]); + } + } + else + { + foreach (object item in tuple) + { + TuplePackers.SerializeObjectTo(ref writer, item); + } + } + } + + #region Packing... + + // Without prefix + + /// Pack a tuple into a slice + /// Tuple that must be serialized into a binary slice + [Pure] + public static Slice Pack([CanBeNull] TTuple tuple) + where TTuple : ITuple + { + if (tuple == null) return Slice.Nil; + var writer = new TupleWriter(); + WriteTo(ref writer, tuple); + return writer.ToSlice(); + } + + /// Pack an array of N-tuples, all sharing the same buffer + /// Sequence of N-tuples to pack + /// Array containing the buffer segment of each packed tuple + /// BatchPack([ ("Foo", 1), ("Foo", 2) ]) => [ "\x02Foo\x00\x15\x01", "\x02Foo\x00\x15\x02" ] + [NotNull] + public static Slice[] Pack([NotNull] params TTuple[] tuples) //REVIEW: change name to PackRange or PackBatch? + where TTuple : ITuple + { + var empty = default(Slice); + return Pack(empty, tuples); + } + + public static void PackTo(ref SliceWriter writer, [CanBeNull] TTuple tuple) + where TTuple : ITuple + { + if (tuple != null) + { + var tw = new TupleWriter(writer); + WriteTo(ref tw, tuple); + writer = tw.Output; + } + } + + public static void Pack(ref TupleWriter writer, [CanBeNull] TTuple tuple) + where TTuple : ITuple + { + if (tuple != null) + { + WriteTo(ref writer, tuple); + } + } + + // With prefix + + /// Efficiently concatenate a prefix with the packed representation of a tuple + public static Slice Pack(Slice prefix, [CanBeNull] TTuple tuple) + where TTuple : ITuple + { + if (tuple == null || tuple.Count == 0) return prefix; + + var writer = new TupleWriter(32 + prefix.Count); + writer.Output.WriteBytes(prefix); + WriteTo(ref writer, tuple); + return writer.ToSlice(); + } + + /// Pack an array of N-tuples, all sharing the same buffer + /// Common prefix added to all the tuples + /// Sequence of N-tuples to pack + /// Array containing the buffer segment of each packed tuple + /// BatchPack("abc", [ ("Foo", 1), ("Foo", 2) ]) => [ "abc\x02Foo\x00\x15\x01", "abc\x02Foo\x00\x15\x02" ] + [NotNull] + public static Slice[] Pack(Slice prefix, [NotNull] params TTuple[] tuples) + where TTuple : ITuple + { + Contract.NotNull(tuples, nameof(tuples)); + + // pre-allocate by supposing that each tuple will take at least 16 bytes + var writer = new TupleWriter(tuples.Length * (16 + prefix.Count)); + var next = new List(tuples.Length); + + //TODO: use multiple buffers if item count is huge ? + + foreach (var tuple in tuples) + { + writer.Output.WriteBytes(prefix); + WriteTo(ref writer, tuple); + next.Add(writer.Output.Position); + } + + return Slice.SplitIntoSegments(writer.Output.Buffer, 0, next); + } + + /// Pack a sequence of N-tuples, all sharing the same buffer + /// Common prefix added to all the tuples + /// Sequence of N-tuples to pack + /// Array containing the buffer segment of each packed tuple + /// BatchPack("abc", [ ("Foo", 1), ("Foo", 2) ]) => [ "abc\x02Foo\x00\x15\x01", "abc\x02Foo\x00\x15\x02" ] + [NotNull] + public static Slice[] Pack(Slice prefix, [NotNull] IEnumerable tuples) + where TTuple : ITuple + { + Contract.NotNull(tuples, nameof(tuples)); + + // use optimized version for arrays + if (tuples is TTuple[] array) return Pack(prefix, array); + + var next = new List((tuples as ICollection)?.Count ?? 0); + var writer = new TupleWriter(next.Capacity * (16 + prefix.Count)); + + //TODO: use multiple buffers if item count is huge ? + + foreach (var tuple in tuples) + { + writer.Output.WriteBytes(prefix); + WriteTo(ref writer, tuple); + next.Add(writer.Output.Position); + } + + return Slice.SplitIntoSegments(writer.Output.Buffer, 0, next); + } + + [NotNull] + public static Slice[] Pack(Slice prefix, [NotNull] TElement[] elements, Func transform) + where TTuple : ITuple + { + Contract.NotNull(elements, nameof(elements)); + Contract.NotNull(transform, nameof(transform)); + + var next = new List(elements.Length); + var writer = new TupleWriter(next.Capacity * (16 + prefix.Count)); + + //TODO: use multiple buffers if item count is huge ? + + foreach (var element in elements) + { + var tuple = transform(element); + if (tuple == null) + { + next.Add(writer.Output.Position); + } + else + { + writer.Output.WriteBytes(prefix); + WriteTo(ref writer, tuple); + next.Add(writer.Output.Position); + } + } + + return Slice.SplitIntoSegments(writer.Output.Buffer, 0, next); + } + + [NotNull] + public static Slice[] Pack(Slice prefix, [NotNull] IEnumerable elements, Func transform) + where TTuple : ITuple + { + Contract.NotNull(elements, nameof(elements)); + Contract.NotNull(transform, nameof(transform)); + + // use optimized version for arrays + if (elements is TElement[] array) return Pack(prefix, array, transform); + + var next = new List((elements as ICollection)?.Count ?? 0); + var writer = new TupleWriter(next.Capacity * (16 + prefix.Count)); + + //TODO: use multiple buffers if item count is huge ? + + foreach (var element in elements) + { + var tuple = transform(element); + if (tuple == null) + { + next.Add(writer.Output.Position); + } + else + { + writer.Output.WriteBytes(prefix); + WriteTo(ref writer, tuple); + next.Add(writer.Output.Position); + } + } + + return Slice.SplitIntoSegments(writer.Output.Buffer, 0, next); + } + + // With prefix... + + /// Efficiently concatenate a prefix with the packed representation of a 1-tuple + [Pure] + public static Slice EncodeKey(Slice prefix, T1 value) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TuplePackers.SerializeTo(ref writer, value); + return writer.ToSlice(); + } + + /// Efficiently concatenate a prefix with the packed representation of a 2-tuple + [Pure] + public static Slice EncodeKey(Slice prefix, T1 value1, T2 value2) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TuplePackers.SerializeTo(ref writer, value1); + TuplePackers.SerializeTo(ref writer, value2); + return writer.ToSlice(); + } + + /// Efficiently concatenate a prefix with the packed representation of a 2-tuple + [Pure] + public static Slice Pack(Slice prefix, ref (T1, T2) items) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TuplePackers.SerializeTo(ref writer, items.Item1); + TuplePackers.SerializeTo(ref writer, items.Item2); + return writer.ToSlice(); + } + + /// Efficiently concatenate a prefix with the packed representation of a 3-tuple + public static Slice EncodeKey(Slice prefix, T1 value1, T2 value2, T3 value3) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TuplePackers.SerializeTo(ref writer, value1); + TuplePackers.SerializeTo(ref writer, value2); + TuplePackers.SerializeTo(ref writer, value3); + return writer.ToSlice(); + } + + /// Efficiently concatenate a prefix with the packed representation of a 3-tuple + public static Slice Pack(Slice prefix, ref (T1, T2, T3) items) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TuplePackers.SerializeTo(ref writer, items.Item1); + TuplePackers.SerializeTo(ref writer, items.Item2); + TuplePackers.SerializeTo(ref writer, items.Item3); + return writer.ToSlice(); + } + + /// Efficiently concatenate a prefix with the packed representation of a 4-tuple + public static Slice EncodeKey(Slice prefix, T1 value1, T2 value2, T3 value3, T4 value4) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TuplePackers.SerializeTo(ref writer, value1); + TuplePackers.SerializeTo(ref writer, value2); + TuplePackers.SerializeTo(ref writer, value3); + TuplePackers.SerializeTo(ref writer, value4); + return writer.ToSlice(); + } + + /// Efficiently concatenate a prefix with the packed representation of a 4-tuple + public static Slice Pack(Slice prefix, ref (T1, T2, T3, T4) items) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TuplePackers.SerializeTo(ref writer, items.Item1); + TuplePackers.SerializeTo(ref writer, items.Item2); + TuplePackers.SerializeTo(ref writer, items.Item3); + TuplePackers.SerializeTo(ref writer, items.Item4); + return writer.ToSlice(); + } + + /// Efficiently concatenate a prefix with the packed representation of a 5-tuple + public static Slice EncodeKey(Slice prefix, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TuplePackers.SerializeTo(ref writer, value1); + TuplePackers.SerializeTo(ref writer, value2); + TuplePackers.SerializeTo(ref writer, value3); + TuplePackers.SerializeTo(ref writer, value4); + TuplePackers.SerializeTo(ref writer, value5); + return writer.ToSlice(); + } + + /// Efficiently concatenate a prefix with the packed representation of a 5-tuple + public static Slice Pack(Slice prefix, ref (T1, T2, T3, T4, T5) items) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TuplePackers.SerializeTo(ref writer, items.Item1); + TuplePackers.SerializeTo(ref writer, items.Item2); + TuplePackers.SerializeTo(ref writer, items.Item3); + TuplePackers.SerializeTo(ref writer, items.Item4); + TuplePackers.SerializeTo(ref writer, items.Item5); + return writer.ToSlice(); + } + + /// Efficiently concatenate a prefix with the packed representation of a 6-tuple + public static Slice EncodeKey(Slice prefix, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TuplePackers.SerializeTo(ref writer, value1); + TuplePackers.SerializeTo(ref writer, value2); + TuplePackers.SerializeTo(ref writer, value3); + TuplePackers.SerializeTo(ref writer, value4); + TuplePackers.SerializeTo(ref writer, value5); + TuplePackers.SerializeTo(ref writer, value6); + return writer.ToSlice(); + } + + /// Efficiently concatenate a prefix with the packed representation of a 6-tuple + public static Slice Pack(Slice prefix, ref (T1, T2, T3, T4, T5, T6) items) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TuplePackers.SerializeTo(ref writer, items.Item1); + TuplePackers.SerializeTo(ref writer, items.Item2); + TuplePackers.SerializeTo(ref writer, items.Item3); + TuplePackers.SerializeTo(ref writer, items.Item4); + TuplePackers.SerializeTo(ref writer, items.Item5); + TuplePackers.SerializeTo(ref writer, items.Item6); + return writer.ToSlice(); + } + + /// Efficiently concatenate a prefix with the packed representation of a 7-tuple + public static Slice EncodeKey(Slice prefix, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6, T7 value7) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TuplePackers.SerializeTo(ref writer, value1); + TuplePackers.SerializeTo(ref writer, value2); + TuplePackers.SerializeTo(ref writer, value3); + TuplePackers.SerializeTo(ref writer, value4); + TuplePackers.SerializeTo(ref writer, value5); + TuplePackers.SerializeTo(ref writer, value6); + TuplePackers.SerializeTo(ref writer, value7); + return writer.ToSlice(); + } + + /// Efficiently concatenate a prefix with the packed representation of a 7-tuple + public static Slice Pack(Slice prefix, ref (T1, T2, T3, T4, T5, T6, T7) items) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TuplePackers.SerializeTo(ref writer, items.Item1); + TuplePackers.SerializeTo(ref writer, items.Item2); + TuplePackers.SerializeTo(ref writer, items.Item3); + TuplePackers.SerializeTo(ref writer, items.Item4); + TuplePackers.SerializeTo(ref writer, items.Item5); + TuplePackers.SerializeTo(ref writer, items.Item6); + TuplePackers.SerializeTo(ref writer, items.Item7); + return writer.ToSlice(); + } + + /// Efficiently concatenate a prefix with the packed representation of a 8-tuple + public static Slice EncodeKey(Slice prefix, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6, T7 value7, T8 value8) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TuplePackers.SerializeTo(ref writer, value1); + TuplePackers.SerializeTo(ref writer, value2); + TuplePackers.SerializeTo(ref writer, value3); + TuplePackers.SerializeTo(ref writer, value4); + TuplePackers.SerializeTo(ref writer, value5); + TuplePackers.SerializeTo(ref writer, value6); + TuplePackers.SerializeTo(ref writer, value7); + TuplePackers.SerializeTo(ref writer, value8); + return writer.ToSlice(); + } + + /// Efficiently concatenate a prefix with the packed representation of a 8-tuple + public static Slice Pack(Slice prefix, ref (T1, T2, T3, T4, T5, T6, T7, T8) items) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TuplePackers.SerializeTo(ref writer, items.Item1); + TuplePackers.SerializeTo(ref writer, items.Item2); + TuplePackers.SerializeTo(ref writer, items.Item3); + TuplePackers.SerializeTo(ref writer, items.Item4); + TuplePackers.SerializeTo(ref writer, items.Item5); + TuplePackers.SerializeTo(ref writer, items.Item6); + TuplePackers.SerializeTo(ref writer, items.Item7); + TuplePackers.SerializeTo(ref writer, items.Item8); + return writer.ToSlice(); + } + + // EncodeKey... + + //REVIEW: do we really ned "Key" in the name? + // => we want to make it obvious that this is to pack ordered keys, but this could be used for anything else... + // => EncodeValues? (may be confused with unordered encoding) + // => EncodeItems? + // => Encode? + + /// Pack a 1-tuple directly into a slice + public static Slice Pack(Slice prefix, ref STuple tuple) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TupleSerializer.Default.PackTo(ref writer, tuple); + return writer.ToSlice(); + } + + /// Pack a 2-tuple directly into a slice + public static Slice Pack(Slice prefix, ref STuple tuple) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TupleSerializer.Default.PackTo(ref writer, tuple); + return writer.ToSlice(); + } + + /// Pack a 3-tuple directly into a slice + public static Slice Pack(Slice prefix, ref STuple tuple) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TupleSerializer.Default.PackTo(ref writer, tuple); + return writer.ToSlice(); + } + + /// Pack a 4-tuple directly into a slice + public static Slice Pack(Slice prefix, ref STuple tuple) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TupleSerializer.Default.PackTo(ref writer, tuple); + return writer.ToSlice(); + } + + /// Pack a 5-tuple directly into a slice + public static Slice Pack(Slice prefix, ref STuple tuple) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TupleSerializer.Default.PackTo(ref writer, tuple); + return writer.ToSlice(); + } + + /// Pack a 6-tuple directly into a slice + public static Slice Pack(Slice prefix, ref STuple tuple) + { + var writer = new TupleWriter(); + writer.Output.WriteBytes(prefix); + TupleSerializer.Default.PackTo(ref writer, tuple); + return writer.Output.ToSlice(); + } + + /// Pack a 1-tuple directly into a slice + public static void WriteKeysTo(ref SliceWriter writer, T1 item1) + { + var tw = new TupleWriter(writer); + TuplePackers.SerializeTo(ref tw, item1); + writer = tw.Output; + } + + /// Pack a 2-tuple directly into a slice + public static void WriteKeysTo(ref SliceWriter writer, T1 item1, T2 item2) + { + var tw = new TupleWriter(writer); + TuplePackers.SerializeTo(ref tw, item1); + TuplePackers.SerializeTo(ref tw, item2); + writer = tw.Output; + } + + /// Pack a 3-tuple directly into a slice + public static void WriteKeysTo(ref SliceWriter writer, T1 item1, T2 item2, T3 item3) + { + var tw = new TupleWriter(writer); + TuplePackers.SerializeTo(ref tw, item1); + TuplePackers.SerializeTo(ref tw, item2); + TuplePackers.SerializeTo(ref tw, item3); + writer = tw.Output; + } + + /// Pack a 4-tuple directly into a slice + public static void WriteKeysTo(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4) + { + var tw = new TupleWriter(writer); + TuplePackers.SerializeTo(ref tw, item1); + TuplePackers.SerializeTo(ref tw, item2); + TuplePackers.SerializeTo(ref tw, item3); + TuplePackers.SerializeTo(ref tw, item4); + writer = tw.Output; + } + + /// Pack a 5-tuple directly into a slice + public static void WriteKeysTo(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) + { + var tw = new TupleWriter(writer); + TuplePackers.SerializeTo(ref tw, item1); + TuplePackers.SerializeTo(ref tw, item2); + TuplePackers.SerializeTo(ref tw, item3); + TuplePackers.SerializeTo(ref tw, item4); + TuplePackers.SerializeTo(ref tw, item5); + writer = tw.Output; + } + + /// Pack a 6-tuple directly into a slice + public static void WriteKeysTo(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6) + { + var tw = new TupleWriter(writer); + TuplePackers.SerializeTo(ref tw, item1); + TuplePackers.SerializeTo(ref tw, item2); + TuplePackers.SerializeTo(ref tw, item3); + TuplePackers.SerializeTo(ref tw, item4); + TuplePackers.SerializeTo(ref tw, item5); + TuplePackers.SerializeTo(ref tw, item6); + writer = tw.Output; + } + + /// Pack a 6-tuple directly into a slice + public static void WriteKeysTo(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7) + { + var tw = new TupleWriter(writer); + TuplePackers.SerializeTo(ref tw, item1); + TuplePackers.SerializeTo(ref tw, item2); + TuplePackers.SerializeTo(ref tw, item3); + TuplePackers.SerializeTo(ref tw, item4); + TuplePackers.SerializeTo(ref tw, item5); + TuplePackers.SerializeTo(ref tw, item6); + TuplePackers.SerializeTo(ref tw, item7); + writer = tw.Output; + } + + /// Pack a 6-tuple directly into a slice + public static void WriteKeysTo(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7, T8 item8) + { + var tw = new TupleWriter(writer); + TuplePackers.SerializeTo(ref tw, item1); + TuplePackers.SerializeTo(ref tw, item2); + TuplePackers.SerializeTo(ref tw, item3); + TuplePackers.SerializeTo(ref tw, item4); + TuplePackers.SerializeTo(ref tw, item5); + TuplePackers.SerializeTo(ref tw, item6); + TuplePackers.SerializeTo(ref tw, item7); + TuplePackers.SerializeTo(ref tw, item8); + writer = tw.Output; + } + + /// Merge a sequence of keys with a same prefix, all sharing the same buffer + /// Type of the keys + /// Prefix shared by all keys + /// Sequence of keys to pack + /// Array of slices (for all keys) that share the same underlying buffer + [NotNull] + public static Slice[] EncodeKeys(Slice prefix, [NotNull] IEnumerable keys) + { + Contract.NotNull(keys, nameof(keys)); + + // use optimized version for arrays + if (keys is T[] array) return EncodeKeys(prefix, array); + + var next = new List((keys as ICollection)?.Count ?? 0); + var writer = new TupleWriter(); + var packer = TuplePacker.Encoder; + + //TODO: use multiple buffers if item count is huge ? + + bool hasPrefix = prefix.IsPresent; + + foreach (var key in keys) + { + if (hasPrefix) writer.Output.WriteBytes(prefix); + packer(ref writer, key); + next.Add(writer.Output.Position); + } + + return Slice.SplitIntoSegments(writer.Output.Buffer, 0, next); + } + + [NotNull] + public static Slice[] EncodeKeys([NotNull] params T[] keys) + { + var empty = default(Slice); + return EncodeKeys(empty, keys); + } + + /// Merge an array of keys with a same prefix, all sharing the same buffer + /// Type of the keys + /// Prefix shared by all keys + /// Sequence of keys to pack + /// Array of slices (for all keys) that share the same underlying buffer + [NotNull] + public static Slice[] EncodeKeys(Slice prefix, [NotNull] params T[] keys) + { + Contract.NotNull(keys, nameof(keys)); + + // pre-allocate by guessing that each key will take at least 8 bytes. Even if 8 is too small, we should have at most one or two buffer resize + var writer = new TupleWriter(keys.Length * (prefix.Count + 8)); + var next = new List(keys.Length); + var packer = TuplePacker.Encoder; + + //TODO: use multiple buffers if item count is huge ? + + foreach (var key in keys) + { + if (prefix.Count > 0) writer.Output.WriteBytes(prefix); + packer(ref writer, key); + next.Add(writer.Output.Position); + } + + return Slice.SplitIntoSegments(writer.Output.Buffer, 0, next); + } + + /// Merge an array of elements, all sharing the same buffer + /// Type of the elements + /// Type of the keys extracted from the elements + /// Sequence of elements to pack + /// Lambda that extract the key from each element + /// Array of slices (for all keys) that share the same underlying buffer + [NotNull] + public static Slice[] EncodeKeys([NotNull] TElement[] elements, [NotNull] Func selector) + { + var empty = default(Slice); + return EncodeKeys(empty, elements, selector); + } + + /// Merge an array of elements with a same prefix, all sharing the same buffer + /// Type of the elements + /// Type of the keys extracted from the elements + /// Prefix shared by all keys (can be empty) + /// Sequence of elements to pack + /// Lambda that extract the key from each element + /// Array of slices (for all keys) that share the same underlying buffer + [NotNull] + public static Slice[] EncodeKeys(Slice prefix, [NotNull] TElement[] elements, [NotNull] Func selector) + { + Contract.NotNull(elements, nameof(elements)); + Contract.NotNull(selector, nameof(selector)); + + // pre-allocate by guessing that each key will take at least 8 bytes. Even if 8 is too small, we should have at most one or two buffer resize + var writer = new TupleWriter(elements.Length * (prefix.Count + 8)); + var next = new List(elements.Length); + var packer = TuplePacker.Encoder; + + //TODO: use multiple buffers if item count is huge ? + + foreach (var value in elements) + { + if (prefix.Count > 0) writer.Output.WriteBytes(prefix); + packer(ref writer, selector(value)); + next.Add(writer.Output.Position); + } + + return Slice.SplitIntoSegments(writer.Output.Buffer, 0, next); + } + + /// Pack a sequence of keys with a same prefix, all sharing the same buffer + /// Type of the prefix tuple + /// Type of the keys + /// Prefix shared by all keys + /// Sequence of keys to pack + /// Array of slices (for all keys) that share the same underlying buffer + [NotNull] + public static Slice[] EncodeKeys([NotNull] TTuple prefix, [NotNull] IEnumerable keys) + where TTuple : ITuple + { + Contract.NotNullAllowStructs(prefix, nameof(prefix)); + var head = Pack(prefix); + return EncodeKeys(head, keys); + } + + /// Pack a sequence of keys with a same prefix, all sharing the same buffer + /// Type of the prefix tuple + /// Type of the keys + /// Prefix shared by all keys + /// Sequence of keys to pack + /// Array of slices (for all keys) that share the same underlying buffer + [NotNull] + public static Slice[] EncodeKeys([NotNull] TTuple prefix, [NotNull] params T1[] keys) + where TTuple : ITuple + { + Contract.NotNullAllowStructs(prefix, nameof(prefix)); + + var head = Pack(prefix); + return EncodeKeys(head, keys); + } + + #endregion + + #region Unpacking... + + /// Unpack a tuple from a serialied key blob + /// Binary key containing a previously packed tuple + /// Unpacked tuple, or the empty tuple if the key is + /// If is equal to + [NotNull] + public static ITuple Unpack(Slice packedKey) + { + if (packedKey.IsNull) throw new ArgumentNullException(nameof(packedKey)); + if (packedKey.Count == 0) return STuple.Empty; + + return TuplePackers.Unpack(packedKey, false); + } + + /// Unpack a tuple from a binary representation + /// Binary key containing a previously packed tuple, or Slice.Nil + /// Unpacked tuple, the empty tuple if is equal to , or null if the key is + [CanBeNull] + public static ITuple UnpackOrDefault(Slice packedKey) + { + if (packedKey.IsNull) return null; + if (packedKey.Count == 0) return STuple.Empty; + return TuplePackers.Unpack(packedKey, false); + } + + /// Unpack a tuple and only return its first element + /// Type of the first value in the decoded tuple + /// Slice that should be entirely parsable as a tuple + /// Decoded value of the first item in the tuple + public static T DecodeFirst(Slice packedKey) + { + if (packedKey.IsNullOrEmpty) throw new InvalidOperationException("Cannot unpack the first element of an empty tuple"); + + var slice = TuplePackers.UnpackFirst(packedKey); + if (slice.IsNull) throw new InvalidOperationException("Failed to unpack tuple"); + + return TuplePacker.Deserialize(slice); + } + + /// Unpack a tuple and only return its last element + /// Type of the last value in the decoded tuple + /// Slice that should be entirely parsable as a tuple + /// Decoded value of the last item in the tuple + public static T DecodeLast(Slice packedKey) + { + if (packedKey.IsNullOrEmpty) throw new InvalidOperationException("Cannot unpack the last element of an empty tuple"); + + var slice = TuplePackers.UnpackLast(packedKey); + if (slice.IsNull) throw new InvalidOperationException("Failed to unpack tuple"); + + return TuplePacker.Deserialize(slice); + } + + /// Unpack the value of a singleton tuple + /// Type of the single value in the decoded tuple + /// Slice that should contain the packed representation of a tuple with a single element + /// Receives the decoded tuple + /// Throws an exception if the tuple is empty of has more than one element. + public static void DecodeKey(Slice packedKey, out STuple tuple) //REVIEW: or T1 instead of STuple ? + { + if (packedKey.IsNullOrEmpty) throw new InvalidOperationException("Cannot unpack a single value out of an empty tuple"); + + var slice = TuplePackers.UnpackSingle(packedKey); + if (slice.IsNull) throw new InvalidOperationException("Failed to unpack singleton tuple"); + + tuple = new STuple(TuplePacker.Deserialize(slice)); + } + + public static void DecodeKey(ref TupleReader reader, out STuple tuple) //REVIEW: or T1 instead of STuple ? + { + if (!DecodeNext(ref reader, out T1 item1)) throw new FormatException("Failed to decode first item"); + if (reader.Input.HasMore) throw new FormatException("The key contains more than two items"); + + tuple = new STuple(item1); + } + + /// Unpack a key containing two elements + /// Slice that should contain the packed representation of a tuple with two elements + /// Receives the decoded tuple + /// Throws an exception if the tuple is empty of has more than two elements. + public static void DecodeKey(Slice packedKey, out STuple tuple) + { + if (packedKey.IsNullOrEmpty) throw new InvalidOperationException("Cannot unpack an empty tuple"); + + var reader = new TupleReader(packedKey); + DecodeKey(ref reader, out tuple); + } + + public static void DecodeKey(ref TupleReader reader, out STuple tuple) + { + if (!DecodeNext(ref reader, out T1 item1)) throw new FormatException("Failed to decode first item"); + if (!DecodeNext(ref reader, out T2 item2)) throw new FormatException("Failed to decode second item"); + if (reader.Input.HasMore) throw new FormatException("The key contains more than two items"); + tuple = new STuple(item1, item2); + } + + + public static void DecodeKey(ref TupleReader reader, out T1 item1, out T2 item2) + { + if (!DecodeNext(ref reader, out item1)) throw new FormatException("Failed to decode first item"); + if (!DecodeNext(ref reader, out item2)) throw new FormatException("Failed to decode second item"); + if (reader.Input.HasMore) throw new FormatException("The key contains more than two items"); + } + + /// Unpack a key containing three elements + /// Slice that should contain the packed representation of a tuple with three elements + /// Receives the decoded tuple + /// Throws an exception if the tuple is empty of has more than three elements. + public static void DecodeKey(Slice packedKey, out STuple tuple) + { + if (packedKey.IsNullOrEmpty) throw new InvalidOperationException("Cannot unpack an empty tuple"); + + var reader = new TupleReader(packedKey); + DecodeKey(ref reader, out tuple); + } + + public static void DecodeKey(ref TupleReader reader, out STuple tuple) + { + if (!DecodeNext(ref reader, out T1 item1)) throw new FormatException("Failed to decode first item"); + if (!DecodeNext(ref reader, out T2 item2)) throw new FormatException("Failed to decode second item"); + if (!DecodeNext(ref reader, out T3 item3)) throw new FormatException("Failed to decode third item"); + if (reader.Input.HasMore) throw new FormatException("The key contains more than three items"); + tuple = new STuple(item1, item2, item3); + } + + public static void DecodeKey(ref TupleReader reader, out T1 item1, out T2 item2, out T3 item3) + { + if (!DecodeNext(ref reader, out item1)) throw new FormatException("Failed to decode first item"); + if (!DecodeNext(ref reader, out item2)) throw new FormatException("Failed to decode second item"); + if (!DecodeNext(ref reader, out item3)) throw new FormatException("Failed to decode third item"); + if (reader.Input.HasMore) throw new FormatException("The key contains more than three items"); + } + + /// Unpack a key containing four elements + /// Slice that should contain the packed representation of a tuple with four elements + /// Receives the decoded tuple + /// Throws an exception if the tuple is empty of has more than four elements. + public static void DecodeKey(Slice packedKey, out STuple tuple) + { + if (packedKey.IsNullOrEmpty) throw new InvalidOperationException("Cannot unpack an empty tuple"); + + var reader = new TupleReader(packedKey); + DecodeKey(ref reader, out tuple); + } + + public static void DecodeKey(ref TupleReader reader, out STuple tuple) + { + if (!DecodeNext(ref reader, out T1 item1)) throw new FormatException("Failed to decode first item"); + if (!DecodeNext(ref reader, out T2 item2)) throw new FormatException("Failed to decode second item"); + if (!DecodeNext(ref reader, out T3 item3)) throw new FormatException("Failed to decode third item"); + if (!DecodeNext(ref reader, out T4 item4)) throw new FormatException("Failed to decode fourth item"); + if (reader.Input.HasMore) throw new FormatException("The key contains more than four items"); + tuple = new STuple(item1, item2, item3, item4); + } + + public static void DecodeKey(ref TupleReader reader, out T1 item1, out T2 item2, out T3 item3, out T4 item4) + { + if (!DecodeNext(ref reader, out item1)) throw new FormatException("Failed to decode first item"); + if (!DecodeNext(ref reader, out item2)) throw new FormatException("Failed to decode second item"); + if (!DecodeNext(ref reader, out item3)) throw new FormatException("Failed to decode third item"); + if (!DecodeNext(ref reader, out item4)) throw new FormatException("Failed to decode fourth item"); + if (reader.Input.HasMore) throw new FormatException("The key contains more than four items"); + } + + /// Unpack a key containing five elements + /// Slice that should contain the packed representation of a tuple with five elements + /// Receives the decoded tuple + /// Throws an exception if the tuple is empty of has more than five elements. + public static void DecodeKey(Slice packedKey, out STuple tuple) + { + if (packedKey.IsNullOrEmpty) throw new InvalidOperationException("Cannot unpack an empty tuple"); + + var reader = new TupleReader(packedKey); + DecodeKey(ref reader, out tuple); + } + + public static void DecodeKey(ref TupleReader reader, out STuple tuple) + { + if (!DecodeNext(ref reader, out T1 item1)) throw new FormatException("Failed to decode first item"); + if (!DecodeNext(ref reader, out T2 item2)) throw new FormatException("Failed to decode second item"); + if (!DecodeNext(ref reader, out T3 item3)) throw new FormatException("Failed to decode third item"); + if (!DecodeNext(ref reader, out T4 item4)) throw new FormatException("Failed to decode fourth item"); + if (!DecodeNext(ref reader, out T5 item5)) throw new FormatException("Failed to decode fifth item"); + if (reader.Input.HasMore) throw new FormatException("The key contains more than four items"); + tuple = new STuple(item1, item2, item3, item4, item5); + } + + public static void DecodeKey(ref TupleReader reader, out T1 item1, out T2 item2, out T3 item3, out T4 item4, out T5 item5) + { + if (!DecodeNext(ref reader, out item1)) throw new FormatException("Failed to decode first item"); + if (!DecodeNext(ref reader, out item2)) throw new FormatException("Failed to decode second item"); + if (!DecodeNext(ref reader, out item3)) throw new FormatException("Failed to decode third item"); + if (!DecodeNext(ref reader, out item4)) throw new FormatException("Failed to decode fourth item"); + if (!DecodeNext(ref reader, out item5)) throw new FormatException("Failed to decode fifth item"); + if (reader.Input.HasMore) throw new FormatException("The key contains more than four items"); + } + + /// Unpack a key containing six elements + /// Slice that should contain the packed representation of a tuple with six elements + /// Receives the decoded tuple + /// Throws an exception if the tuple is empty of has more than six elements. + public static void DecodeKey(Slice packedKey, out STuple tuple) + { + if (packedKey.IsNullOrEmpty) throw new InvalidOperationException("Cannot unpack an empty tuple"); + + var reader = new TupleReader(packedKey); + DecodeKey(ref reader, out tuple); + } + + public static void DecodeKey(ref TupleReader reader, out STuple tuple) + { + if (!DecodeNext(ref reader, out T1 item1)) throw new FormatException("Failed to decode first item"); + if (!DecodeNext(ref reader, out T2 item2)) throw new FormatException("Failed to decode second item"); + if (!DecodeNext(ref reader, out T3 item3)) throw new FormatException("Failed to decode third item"); + if (!DecodeNext(ref reader, out T4 item4)) throw new FormatException("Failed to decode fourth item"); + if (!DecodeNext(ref reader, out T5 item5)) throw new FormatException("Failed to decode fifth item"); + if (!DecodeNext(ref reader, out T6 item6)) throw new FormatException("Failed to decode sixth item"); + if (reader.Input.HasMore) throw new FormatException("The key contains more than six items"); + tuple = new STuple(item1, item2, item3, item4, item5, item6); + } + + /// Unpack the next item in the tuple, and advance the cursor + /// Type of the next value in the tuple + /// Reader positionned at the start of the next item to read + /// If decoding succeedsd, receives the decoded value. + /// True if the decoded succeeded (and receives the decoded value). False if the tuple has reached the end. + public static bool DecodeNext(ref TupleReader input, out T value) + { + if (!input.Input.HasMore) + { + value = default(T); + return false; + } + + var slice = TupleParser.ParseNext(ref input); + value = TuplePacker.Deserialize(slice); + return true; + } + + #endregion + + #region Encoders... + + internal class Encoder : IKeyEncoder, IValueEncoder + { + public static readonly Encoder Default = new Encoder(); + + private Encoder() { } + + public IKeyEncoding Encoding => TuPack.Encoding; + + public void WriteKeyTo(ref SliceWriter writer, T key) + { + TupleEncoder.WriteKeysTo(ref writer, key); + } + + public void ReadKeyFrom(ref SliceReader reader, out T key) + { + key = !reader.HasMore + ? default //BUGBUG + : TuPack.DecodeKey(reader.ReadToEnd()); + } + + public Slice EncodeValue(T key) + { + return TupleEncoder.EncodeKey(default(Slice), key); + } + + public T DecodeValue(Slice encoded) + { + if (encoded.IsNullOrEmpty) return default; //BUGBUG + return TuPack.DecodeKey(encoded); + } + + } + + internal class CompositeEncoder : CompositeKeyEncoder + { + + public static readonly CompositeEncoder Default = new CompositeEncoder(); + + private CompositeEncoder() { } + + public override IKeyEncoding Encoding => TuPack.Encoding; + + public override void WriteKeyPartsTo(ref SliceWriter writer, int count, ref (T1, T2) key) + { + switch (count) + { + case 2: TupleEncoder.WriteKeysTo(ref writer, key.Item1, key.Item2); break; + case 1: TupleEncoder.WriteKeysTo(ref writer, key.Item1); break; + default: throw new ArgumentOutOfRangeException(nameof(count), count, "Item count must be either 1 or 2"); + } + } + + public override void ReadKeyPartsFrom(ref SliceReader reader, int count, out (T1, T2) key) + { + if (count != 1 & count != 2) throw new ArgumentOutOfRangeException(nameof(count), count, "Item count must be either 1 or 2"); + + var t = TuPack.Unpack(reader.ReadToEnd()).OfSize(count); + Contract.Assert(t != null); + key.Item1 = t.Get(0); + key.Item2 = count == 2 ? t.Get(1) : default; + } + } + + internal class CompositeEncoder : CompositeKeyEncoder + { + + public static readonly CompositeEncoder Default = new CompositeEncoder(); + + private CompositeEncoder() { } + + public override IKeyEncoding Encoding => TuPack.Encoding; + + public override void WriteKeyPartsTo(ref SliceWriter writer, int count, ref (T1, T2, T3) key) + { + switch (count) + { + case 3: TupleEncoder.WriteKeysTo(ref writer, key.Item1, key.Item2, key.Item3); break; + case 2: TupleEncoder.WriteKeysTo(ref writer, key.Item1, key.Item2); break; + case 1: TupleEncoder.WriteKeysTo(ref writer, key.Item1); break; + default: throw new ArgumentOutOfRangeException(nameof(count), count, "Item count must be between 1 and 3"); + } + } + + public override void ReadKeyPartsFrom(ref SliceReader reader, int count, out (T1, T2, T3) key) + { + if (count < 1 | count > 3) throw new ArgumentOutOfRangeException(nameof(count), count, "Item count must be between 1 and 3"); + + var t = TuPack.Unpack(reader.ReadToEnd()).OfSize(count); + Contract.Assert(t != null); + key.Item1 = t.Get(0); + key.Item2 = count >= 2 ? t.Get(1) : default; + key.Item3 = count >= 3 ? t.Get(2) : default; + } + } + + internal class CompositeEncoder : CompositeKeyEncoder + { + + public static readonly CompositeEncoder Default = new CompositeEncoder(); + + private CompositeEncoder() { } + + public override IKeyEncoding Encoding => TuPack.Encoding; + + public override void WriteKeyPartsTo(ref SliceWriter writer, int count, ref (T1, T2, T3, T4) key) + { + switch (count) + { + case 4: TupleEncoder.WriteKeysTo(ref writer, key.Item1, key.Item2, key.Item3, key.Item4); break; + case 3: TupleEncoder.WriteKeysTo(ref writer, key.Item1, key.Item2, key.Item3); break; + case 2: TupleEncoder.WriteKeysTo(ref writer, key.Item1, key.Item2); break; + case 1: TupleEncoder.WriteKeysTo(ref writer, key.Item1); break; + default: throw new ArgumentOutOfRangeException(nameof(count), count, "Item count must be between 1 and 4"); + } + } + + public override void ReadKeyPartsFrom(ref SliceReader reader, int count, out (T1, T2, T3, T4) key) + { + if (count < 1 || count > 4) throw new ArgumentOutOfRangeException(nameof(count), count, "Item count must be between 1 and 4"); + + var t = TuPack.Unpack(reader.ReadToEnd()).OfSize(count); + Contract.Assert(t != null); + key.Item1 = t.Get(0); + key.Item2 = count >= 2 ? t.Get(1) : default; + key.Item3 = count >= 3 ? t.Get(2) : default; + key.Item4 = count >= 4 ? t.Get(3) : default; + } + } + + internal class CompositeEncoder : CompositeKeyEncoder + { + + public static readonly CompositeEncoder Default = new CompositeEncoder(); + + private CompositeEncoder() { } + + public override IKeyEncoding Encoding => TuPack.Encoding; + + public override void WriteKeyPartsTo(ref SliceWriter writer, int count, ref (T1, T2, T3, T4, T5) key) + { + switch (count) + { + case 5: TupleEncoder.WriteKeysTo(ref writer, key.Item1, key.Item2, key.Item3, key.Item4, key.Item5); break; + case 4: TupleEncoder.WriteKeysTo(ref writer, key.Item1, key.Item2, key.Item3, key.Item4); break; + case 3: TupleEncoder.WriteKeysTo(ref writer, key.Item1, key.Item2, key.Item3); break; + case 2: TupleEncoder.WriteKeysTo(ref writer, key.Item1, key.Item2); break; + case 1: TupleEncoder.WriteKeysTo(ref writer, key.Item1); break; + default: throw new ArgumentOutOfRangeException(nameof(count), count, "Item count must be between 1 and 5"); + } + } + + public override void ReadKeyPartsFrom(ref SliceReader reader, int count, out (T1, T2, T3, T4, T5) key) + { + if (count < 1 || count > 5) throw new ArgumentOutOfRangeException(nameof(count), count, "Item count must be between 1 and 5"); + + var t = TuPack.Unpack(reader.ReadToEnd()).OfSize(count); + Contract.Assert(t != null); + key.Item1 = t.Get(0); + key.Item2 = count >= 2 ? t.Get(1) : default; + key.Item3 = count >= 3 ? t.Get(2) : default; + key.Item4 = count >= 4 ? t.Get(3) : default; + key.Item5 = count >= 5 ? t.Get(4) : default; + } + } + + internal class CompositeEncoder : CompositeKeyEncoder + { + + public static readonly CompositeEncoder Default = new CompositeEncoder(); + + private CompositeEncoder() { } + + public override IKeyEncoding Encoding => TuPack.Encoding; + + public override void WriteKeyPartsTo(ref SliceWriter writer, int count, ref (T1, T2, T3, T4, T5, T6) key) + { + switch (count) + { + case 6: TupleEncoder.WriteKeysTo(ref writer, key.Item1, key.Item2, key.Item3, key.Item4, key.Item5, key.Item6); break; + case 5: TupleEncoder.WriteKeysTo(ref writer, key.Item1, key.Item2, key.Item3, key.Item4, key.Item5); break; + case 4: TupleEncoder.WriteKeysTo(ref writer, key.Item1, key.Item2, key.Item3, key.Item4); break; + case 3: TupleEncoder.WriteKeysTo(ref writer, key.Item1, key.Item2, key.Item3); break; + case 2: TupleEncoder.WriteKeysTo(ref writer, key.Item1, key.Item2); break; + case 1: TupleEncoder.WriteKeysTo(ref writer, key.Item1); break; + default: throw new ArgumentOutOfRangeException(nameof(count), count, "Item count must be between 1 and 6"); + } + } + + public override void ReadKeyPartsFrom(ref SliceReader reader, int count, out (T1, T2, T3, T4, T5, T6) key) + { + if (count < 1 || count > 6) throw new ArgumentOutOfRangeException(nameof(count), count, "Item count must be between 1 and 6"); + + var t = TuPack.Unpack(reader.ReadToEnd()).OfSize(count); + Contract.Assert(t != null); + key.Item1 = t.Get(0); + key.Item2 = count >= 2 ? t.Get(1) : default; + key.Item3 = count >= 3 ? t.Get(2) : default; + key.Item4 = count >= 4 ? t.Get(3) : default; + key.Item5 = count >= 5 ? t.Get(4) : default; + key.Item6 = count >= 6 ? t.Get(5) : default; + } + } + + #endregion + + } + +} diff --git a/FoundationDB.Client/Tuples/Encoding/TupleKeyEncoder.cs b/FoundationDB.Client/Tuples/Encoding/TupleKeyEncoder.cs new file mode 100644 index 000000000..c4404dc85 --- /dev/null +++ b/FoundationDB.Client/Tuples/Encoding/TupleKeyEncoder.cs @@ -0,0 +1,237 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Collections.Tuples.Encoding +{ + using System; + using Doxense.Collections.Tuples; + using Doxense.Memory; + using Doxense.Serialization.Encoders; + using FoundationDB.Client; + + /// Encoder for variable-length elements, that uses the Tuple Binary Encoding format + public sealed class TupleKeyEncoder : IDynamicKeyEncoder + { + + internal static TupleKeyEncoder Instance = new TupleKeyEncoder(); + + private TupleKeyEncoder() + { } + + public IKeyEncoding Encoding => TuPack.Encoding; + + public void PackKey(ref SliceWriter writer, TTuple items) + where TTuple : ITuple + { + var tw = new TupleWriter(writer); + TupleEncoder.WriteTo(ref tw, items); + writer = tw.Output; + } + + public void EncodeKey(ref SliceWriter writer, T1 item1) + { + var tw = new TupleWriter(writer); + TuplePacker.SerializeTo(ref tw, item1); + writer = tw.Output; + } + + public void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2) + { + var tw = new TupleWriter(writer); + TuplePacker.SerializeTo(ref tw, item1); + TuplePacker.SerializeTo(ref tw, item2); + writer = tw.Output; + } + + public void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3) + { + var tw = new TupleWriter(writer); + TuplePacker.SerializeTo(ref tw, item1); + TuplePacker.SerializeTo(ref tw, item2); + TuplePacker.SerializeTo(ref tw, item3); + writer = tw.Output; + } + + public void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4) + { + var tw = new TupleWriter(writer); + TuplePacker.SerializeTo(ref tw, item1); + TuplePacker.SerializeTo(ref tw, item2); + TuplePacker.SerializeTo(ref tw, item3); + TuplePacker.SerializeTo(ref tw, item4); + writer = tw.Output; + } + + public void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) + { + var tw = new TupleWriter(writer); + TuplePacker.SerializeTo(ref tw, item1); + TuplePacker.SerializeTo(ref tw, item2); + TuplePacker.SerializeTo(ref tw, item3); + TuplePacker.SerializeTo(ref tw, item4); + TuplePacker.SerializeTo(ref tw, item5); + writer = tw.Output; + } + + public void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6) + { + var tw = new TupleWriter(writer); + TuplePacker.SerializeTo(ref tw, item1); + TuplePacker.SerializeTo(ref tw, item2); + TuplePacker.SerializeTo(ref tw, item3); + TuplePacker.SerializeTo(ref tw, item4); + TuplePacker.SerializeTo(ref tw, item5); + TuplePacker.SerializeTo(ref tw, item6); + writer = tw.Output; + } + + public void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7) + { + var tw = new TupleWriter(writer); + TuplePacker.SerializeTo(ref tw, item1); + TuplePacker.SerializeTo(ref tw, item2); + TuplePacker.SerializeTo(ref tw, item3); + TuplePacker.SerializeTo(ref tw, item4); + TuplePacker.SerializeTo(ref tw, item5); + TuplePacker.SerializeTo(ref tw, item6); + TuplePacker.SerializeTo(ref tw, item7); + writer = tw.Output; + } + + public void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7, T8 item8) + { + var tw = new TupleWriter(writer); + TuplePacker.SerializeTo(ref tw, item1); + TuplePacker.SerializeTo(ref tw, item2); + TuplePacker.SerializeTo(ref tw, item3); + TuplePacker.SerializeTo(ref tw, item4); + TuplePacker.SerializeTo(ref tw, item5); + TuplePacker.SerializeTo(ref tw, item6); + TuplePacker.SerializeTo(ref tw, item7); + TuplePacker.SerializeTo(ref tw, item8); + writer = tw.Output; + } + + public ITuple UnpackKey(Slice packed) + { + return TuPack.Unpack(packed); + } + + public T DecodeKey(Slice packed) + { + return TuPack.DecodeKey(packed); + } + + public T DecodeKeyFirst(Slice packed) + { + return TuPack.DecodeFirst(packed); + } + + public T DecodeKeyLast(Slice packed) + { + return TuPack.DecodeLast(packed); + } + + public STuple DecodeKey(Slice packed) + { + return TuPack.DecodeKey(packed); + } + + public STuple DecodeKey(Slice packed) + { + return TuPack.DecodeKey(packed); + } + + public STuple DecodeKey(Slice packed) + { + return TuPack.DecodeKey(packed); + } + + public STuple DecodeKey(Slice packed) + { + return TuPack.DecodeKey(packed); + } + + public STuple DecodeKey(Slice packed) + { + return TuPack.DecodeKey(packed); + } + + public (Slice Begin, Slice End) ToRange(Slice prefix) + { + return TuPack.ToRange(prefix); + } + + public (Slice Begin, Slice End) ToRange(Slice prefix, ITuple items) + { + return TuPack.ToPrefixedKeyRange(prefix, items); + } + + public (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1) + { + return TuPack.ToPrefixedKeyRange(prefix, item1); + } + + public (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2) + { + return TuPack.ToPrefixedKeyRange(prefix, item1, item2); + } + + public (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3) + { + return TuPack.ToPrefixedKeyRange(prefix, item1, item2, item3); + } + + public (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4) + { + return TuPack.ToPrefixedKeyRange(prefix, item1, item2, item3, item4); + } + + public (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) + { + return TuPack.ToPrefixedKeyRange(prefix, item1, item2, item3, item4, item5); + } + + public (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6) + { + return TuPack.ToPrefixedKeyRange(prefix, item1, item2, item3, item4, item5, item6); + } + + public (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7) + { + return TuPack.ToPrefixedKeyRange(prefix, item1, item2, item3, item4, item5, item6, item7); + } + + public (Slice Begin, Slice End) ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7, T8 item8) + { + return TuPack.ToPrefixedKeyRange(prefix, item1, item2, item3, item4, item5, item6, item7, item8); + } + + } + +} diff --git a/FoundationDB.Client/Subspaces/FdbEncoderSubspacePartition`1.cs b/FoundationDB.Client/Tuples/Encoding/TupleKeyEncoding.cs similarity index 52% rename from FoundationDB.Client/Subspaces/FdbEncoderSubspacePartition`1.cs rename to FoundationDB.Client/Tuples/Encoding/TupleKeyEncoding.cs index 0e969fb6f..f7df06283 100644 --- a/FoundationDB.Client/Subspaces/FdbEncoderSubspacePartition`1.cs +++ b/FoundationDB.Client/Tuples/Encoding/TupleKeyEncoding.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,57 +26,62 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -using System; -using JetBrains.Annotations; - -namespace FoundationDB.Client +namespace Doxense.Collections.Tuples.Encoding { - public struct FdbEncoderSubspacePartition + using System; + using Doxense.Serialization.Encoders; + + /// Encoding that uses the Tuple Binary Encoding format + public sealed class TupleKeyEncoding : ITypeSystem { - public readonly IFdbSubspace Subspace; - public readonly IKeyEncoder Encoder; - public FdbEncoderSubspacePartition([NotNull] IFdbSubspace subspace, [NotNull] IKeyEncoder encoder) + public static readonly TupleKeyEncoding Instance = new TupleKeyEncoding(); + + public string Name => "TuPack"; + + #region Keys... + + public IDynamicKeyEncoder GetDynamicKeyEncoder() { - this.Subspace = subspace; - this.Encoder = encoder; + return TupleKeyEncoder.Instance; } - public IFdbSubspace this[T value] + public IKeyEncoder GetKeyEncoder() { - [NotNull] - get { return ByKey(value); } + return TupleEncoder.Encoder.Default; } - [NotNull] - public IFdbSubspace ByKey(T value) + public ICompositeKeyEncoder GetKeyEncoder() { - return this.Subspace[this.Encoder.EncodeKey(value)]; + return TupleEncoder.CompositeEncoder.Default; } - [NotNull] - public IFdbDynamicSubspace ByKey(T value, [NotNull] IFdbKeyEncoding encoding) + public ICompositeKeyEncoder GetKeyEncoder() { - return FdbSubspace.CreateDynamic(this.Subspace.ConcatKey(this.Encoder.EncodeKey(value)), encoding); + return TupleEncoder.CompositeEncoder.Default; } - [NotNull] - public IFdbDynamicSubspace ByKey(T value, [NotNull] IDynamicKeyEncoder encoder) + public ICompositeKeyEncoder GetKeyEncoder() { - return FdbSubspace.CreateDynamic(this.Subspace.ConcatKey(this.Encoder.EncodeKey(value)), encoder); + return TupleEncoder.CompositeEncoder.Default; } - [NotNull] - public IFdbEncoderSubspace ByKey(T value, [NotNull] IFdbKeyEncoding encoding) + public ICompositeKeyEncoder GetEncoder() { - return FdbSubspace.CreateEncoder(this.Subspace.ConcatKey(this.Encoder.EncodeKey(value)), encoding); + return TupleEncoder.CompositeEncoder.Default; } - [NotNull] - public IFdbEncoderSubspace ByKey(T value, [NotNull] IKeyEncoder encoder) + public ICompositeKeyEncoder GetEncoder() + { + return TupleEncoder.CompositeEncoder.Default; + } + + #endregion + + public IValueEncoder GetValueEncoder() { - return FdbSubspace.CreateEncoder(this.Subspace.ConcatKey(this.Encoder.EncodeKey(value)), encoder); + return TupleEncoder.Encoder.Default; } } -} \ No newline at end of file +} diff --git a/FoundationDB.Client/Layers/Tuples/FdbTuplePacker.cs b/FoundationDB.Client/Tuples/Encoding/TuplePacker.cs similarity index 64% rename from FoundationDB.Client/Layers/Tuples/FdbTuplePacker.cs rename to FoundationDB.Client/Tuples/Encoding/TuplePacker.cs index 01962aaad..abf20d6e7 100644 --- a/FoundationDB.Client/Layers/Tuples/FdbTuplePacker.cs +++ b/FoundationDB.Client/Tuples/Encoding/TuplePacker.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,33 +26,58 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -using FoundationDB.Client; -using System; - -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples.Encoding { + using System; + using System.Runtime.CompilerServices; + using Doxense.Memory; + using JetBrains.Annotations; /// Helper class that can serialize values of type to the tuple binary format /// Type of values to be serialized - public static class FdbTuplePacker + public static class TuplePacker { - internal static readonly FdbTuplePackers.Encoder Encoder = FdbTuplePackers.GetSerializer(required: true); + [NotNull] + internal static readonly TuplePackers.Encoder Encoder = TuplePackers.GetSerializer(required: true); - internal static readonly Func Decoder = FdbTuplePackers.GetDeserializer(required: true); + [NotNull] + internal static readonly Func Decoder = TuplePackers.GetDeserializer(required: true); - /// Serialize a into a binary buffer + /// Serialize a using a Tuple Writer /// Target buffer /// Value that will be serialized - /// The buffer does not need to be preallocated. -#if !NET_4_0 - [System.Runtime.CompilerServices.MethodImpl(System.Runtime.CompilerServices.MethodImplOptions.AggressiveInlining)] -#endif + /// + /// The buffer does not need to be preallocated. + /// This method supports embedded tuples. + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void SerializeTo(ref TupleWriter writer, T value) { Encoder(ref writer, value); } + public static void SerializeBoxedTo(ref TupleWriter writer, object value) + { + Encoder(ref writer, (T) value); + } + + /// Serialize a into a binary buffer + /// Target buffer + /// Value that will be serialized + /// + /// The buffer does not need to be preallocated. + /// This method DOES NOT support embedded tupels, and assumes that we are serializing a top-level Tuple! + /// If you need support for embedded tuples, use instead! + /// + public static void SerializeTo(ref SliceWriter writer, T value) + { + var tw = new TupleWriter(writer); + Encoder(ref tw, value); + writer = tw.Output; + //REVIEW: we loose the depth information here! :( + } + /// Serialize a value of type into a tuple segment /// Value that will be serialized /// Slice that contains the binary representation of @@ -66,14 +91,11 @@ public static Slice Serialize(T value) /// Deserialize a tuple segment into a value of type /// Slice that contains the binary representation of a tuple item /// Decoded value, or an exception if the item type is not compatible -#if !NET_4_0 - [System.Runtime.CompilerServices.MethodImpl(System.Runtime.CompilerServices.MethodImplOptions.AggressiveInlining)] -#endif + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static T Deserialize(Slice slice) { return Decoder(slice); } } - } diff --git a/FoundationDB.Client/Tuples/Encoding/TuplePackers.cs b/FoundationDB.Client/Tuples/Encoding/TuplePackers.cs new file mode 100644 index 000000000..0664d1edc --- /dev/null +++ b/FoundationDB.Client/Tuples/Encoding/TuplePackers.cs @@ -0,0 +1,1826 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Collections.Tuples.Encoding +{ + using System; + using System.Collections.Concurrent; + using System.Collections.Generic; + using System.Globalization; + using System.Linq; + using System.Linq.Expressions; + using System.Reflection; + using System.Runtime.CompilerServices; + using Doxense.Collections.Tuples; + using Doxense.Diagnostics.Contracts; + using Doxense.Runtime.Converters; + using FoundationDB.Client; + using JetBrains.Annotations; + + /// Helper methods used during serialization of values to the tuple binary format + public static class TuplePackers + { + + #region Serializers... + + public delegate void Encoder(ref TupleWriter writer, T value); + + /// Returns a lambda that will be able to serialize values of type + /// Type of values to serialize + /// Reusable action that knows how to serialize values of type into binary buffers, or that throws an exception if the type is not supported + [CanBeNull, ContractAnnotation("true => notnull")] + internal static Encoder GetSerializer(bool required) + { + //note: this method is only called once per initializing of TuplePackers to create the cached delegate. + + var encoder = (Encoder) GetSerializerFor(typeof(T)); + if (encoder == null && required) + { + encoder = delegate { throw new InvalidOperationException($"Does not know how to serialize values of type '{typeof(T).Name}' into keys"); }; + } + return encoder; + } + + [CanBeNull] + private static Delegate GetSerializerFor([NotNull] Type type) + { + Contract.NotNull(type, nameof(type)); + + if (type == typeof(object)) + { // return a generic serializer that will inspect the runtime type of the object + return new Encoder(SerializeObjectTo); + } + + // look for well-known types that have their own (non-generic) TuplePackers.SerializeTo(...) method + var method = typeof(TuplePackers).GetMethod(nameof(SerializeTo), BindingFlags.Static | BindingFlags.Public, binder: null, types: new[] { typeof(TupleWriter).MakeByRefType(), type }, modifiers: null); + if (method != null) + { // we have a direct serializer + return method.CreateDelegate(typeof(Encoder<>).MakeGenericType(type)); + } + + // maybe it is a nullable type ? + var nullableType = Nullable.GetUnderlyingType(type); + if (nullableType != null) + { // nullable types can reuse the underlying type serializer + method = typeof(TuplePackers).GetMethod(nameof(SerializeNullableTo), BindingFlags.Static | BindingFlags.Public); + if (method != null) + { + return method.MakeGenericMethod(nullableType).CreateDelegate(typeof(Encoder<>).MakeGenericType(type)); + } + } + + // maybe it is a tuple ? + if (typeof(ITuple).IsAssignableFrom(type)) + { + if (type == typeof(STuple) || (type.Name.StartsWith(nameof(STuple) + "`", StringComparison.Ordinal) && type.Namespace == typeof(STuple).Namespace)) + { // well-known STuple struct + var typeArgs = type.GetGenericArguments(); + method = FindSTupleSerializerMethod(typeArgs); + if (method != null) + { + return method.MakeGenericMethod(typeArgs).CreateDelegate(typeof(Encoder<>).MakeGenericType(type)); + } + } + + // will use the default ITuple implementation + method = typeof(TuplePackers).GetMethod(nameof(SerializeTupleTo), BindingFlags.Static | BindingFlags.Public); + if (method != null) + { + return method.MakeGenericMethod(type).CreateDelegate(typeof(Encoder<>).MakeGenericType(type)); + } + } + + // Can it transform itself into a tuple? + if (typeof(ITupleFormattable).IsAssignableFrom(type)) + { + // If so, try to use the corresponding TuplePackers.SerializeFormattableTo(...) method + method = typeof(TuplePackers).GetMethod(nameof(SerializeFormattableTo), BindingFlags.Static | BindingFlags.Public); + if (method != null) + { + return method.CreateDelegate(typeof(Encoder<>).MakeGenericType(type)); + } + } + + // ValueTuple + if (type == typeof(ValueTuple) || (type.Name.StartsWith(nameof(System.ValueTuple) + "`", StringComparison.Ordinal) && type.Namespace == "System")) + { + var typeArgs = type.GetGenericArguments(); + method = FindValueTupleSerializerMethod(typeArgs); + if (method != null) + { + return method.MakeGenericMethod(typeArgs).CreateDelegate(typeof(Encoder<>).MakeGenericType(type)); + } + } + + // TODO: look for a static SerializeTo(ref TupleWriter, T) method on the type itself ? + + // no luck.. + return null; + } + + private static MethodInfo FindSTupleSerializerMethod(Type[] args) + { + //note: we want to find the correct SerializeSTuple<...>(ref TupleWriter, (...,), but this cannot be done with Type.GetMethod(...) directly + // => we have to scan for all methods with the correct name, and the same number of Type Arguments than the ValueTuple. + return typeof(TuplePackers) + .GetMethods(BindingFlags.Static | BindingFlags.Public) + .SingleOrDefault(m => m.Name == nameof(SerializeSTupleTo) && m.GetGenericArguments().Length == args.Length); + } + + private static MethodInfo FindValueTupleSerializerMethod(Type[] args) + { + //note: we want to find the correct SerializeValueTuple<...>(ref TupleWriter, (...,), but this cannot be done with Type.GetMethod(...) directly + // => we have to scan for all methods with the correct name, and the same number of Type Arguments than the ValueTuple. + return typeof(TuplePackers) + .GetMethods(BindingFlags.Static | BindingFlags.Public) + .SingleOrDefault(m => m.Name == nameof(SerializeValueTupleTo) && m.GetGenericArguments().Length == args.Length); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static void SerializeTo(ref TupleWriter writer, T value) + { + // + // - In Release builds, this will be cleaned up and inlined by the JIT as a direct invokatino of the correct WriteXYZ method + // - In Debug builds, we have to disabled this, because it would be too slow + //IMPORTANT: only ValueTypes and they must have a corresponding Write$TYPE$(ref TupleWriter, $TYPE) in TupleParser! +#if !DEBUG + if (typeof(T) == typeof(bool)) { TupleParser.WriteBool(ref writer, (bool) (object) value); return; } + if (typeof(T) == typeof(int)) { TupleParser.WriteInt32(ref writer, (int) (object) value); return; } + if (typeof(T) == typeof(long)) { TupleParser.WriteInt64(ref writer, (long) (object) value); return; } + if (typeof(T) == typeof(uint)) { TupleParser.WriteUInt32(ref writer, (uint) (object) value); return; } + if (typeof(T) == typeof(ulong)) { TupleParser.WriteUInt64(ref writer, (ulong) (object) value); return; } + if (typeof(T) == typeof(short)) { TupleParser.WriteInt32(ref writer, (short) (object) value); return; } + if (typeof(T) == typeof(ushort)) { TupleParser.WriteUInt32(ref writer, (ushort) (object) value); return; } + if (typeof(T) == typeof(sbyte)) { TupleParser.WriteInt32(ref writer, (sbyte) (object) value); return; } + if (typeof(T) == typeof(byte)) { TupleParser.WriteUInt32(ref writer, (byte) (object) value); return; } + if (typeof(T) == typeof(float)) { TupleParser.WriteSingle(ref writer, (float) (object) value); return; } + if (typeof(T) == typeof(double)) { TupleParser.WriteDouble(ref writer, (double) (object) value); return; } + if (typeof(T) == typeof(char)) { TupleParser.WriteChar(ref writer, (char) (object) value); return; } + if (typeof(T) == typeof(Guid)) { TupleParser.WriteGuid(ref writer, (Guid) (object) value); return; } + if (typeof(T) == typeof(Uuid128)) { TupleParser.WriteUuid128(ref writer, (Uuid128) (object) value); return; } + if (typeof(T) == typeof(Uuid64)) { TupleParser.WriteUuid64(ref writer, (Uuid64) (object) value); return; } + if (typeof(T) == typeof(decimal)) { TupleParser.WriteDecimal(ref writer, (decimal) (object) value); return; } + if (typeof(T) == typeof(Slice)) { TupleParser.WriteBytes(ref writer, (Slice) (object) value); return; } + + if (typeof(T) == typeof(bool?)) { TupleParser.WriteBool(ref writer, (bool?) (object) value); return; } + if (typeof(T) == typeof(int?)) { TupleParser.WriteInt32(ref writer, (int?) (object) value); return; } + if (typeof(T) == typeof(long?)) { TupleParser.WriteInt64(ref writer, (long?) (object) value); return; } + if (typeof(T) == typeof(uint?)) { TupleParser.WriteUInt32(ref writer, (uint?) (object) value); return; } + if (typeof(T) == typeof(ulong?)) { TupleParser.WriteUInt64(ref writer, (ulong?) (object) value); return; } + if (typeof(T) == typeof(short?)) { TupleParser.WriteInt32(ref writer, (short?) (object) value); return; } + if (typeof(T) == typeof(ushort?)) { TupleParser.WriteUInt32(ref writer, (ushort?) (object) value); return; } + if (typeof(T) == typeof(sbyte?)) { TupleParser.WriteInt32(ref writer, (sbyte?) (object) value); return; } + if (typeof(T) == typeof(byte?)) { TupleParser.WriteUInt32(ref writer, (byte?) (object) value); return; } + if (typeof(T) == typeof(float?)) { TupleParser.WriteSingle(ref writer, (float?) (object) value); return; } + if (typeof(T) == typeof(double?)) { TupleParser.WriteDouble(ref writer, (double?) (object) value); return; } + if (typeof(T) == typeof(char?)) { TupleParser.WriteChar(ref writer, (char?) (object) value); return; } + if (typeof(T) == typeof(Guid?)) { TupleParser.WriteGuid(ref writer, (Guid?) (object) value); return; } + if (typeof(T) == typeof(Uuid128?)) { TupleParser.WriteUuid128(ref writer, (Uuid128?) (object) value); return; } + if (typeof(T) == typeof(Uuid64?)) { TupleParser.WriteUuid64(ref writer, (Uuid64?) (object) value); return; } + if (typeof(T) == typeof(decimal?)) { TupleParser.WriteDecimal(ref writer, (decimal?) (object) value); return; } +#endif + // + + // invoke the encoder directly + TuplePacker.Encoder(ref writer, value); + } + + /// Serialize a nullable value, by checking for null at runtime + /// Underling type of the nullable type + /// Target buffer + /// Nullable value to serialize + /// Uses the underlying type's serializer if the value is not null + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeNullableTo(ref TupleWriter writer, T? value) + where T : struct + { + if (value == null) + TupleParser.WriteNil(ref writer); + else + SerializeTo(ref writer, value.Value); + } + + /// Serialize an untyped object, by checking its type at runtime [VERY SLOW] + /// Target buffer + /// Untyped value whose type will be inspected at runtime + /// + /// May throw at runtime if the type is not supported. + /// This method will be very slow! Please consider using typed tuples instead! + /// + public static void SerializeObjectTo(ref TupleWriter writer, object value) + { + if (value == null) + { // null value + // includes all null references to ref types, as nullables where HasValue == false + TupleParser.WriteNil(ref writer); + return; + } + GetBoxedEncoder(value.GetType())(ref writer, value); + } + + private static Encoder GetBoxedEncoder(Type type) + { + if (!BoxedEncoders.TryGetValue(type, out var encoder)) + { + encoder = CreateBoxedEncoder(type); + BoxedEncoders.TryAdd(type, encoder); + } + return encoder; + } + + private static ConcurrentDictionary> BoxedEncoders { get; } = GetDefaultBoxedEncoders(); + + private static ConcurrentDictionary> GetDefaultBoxedEncoders() + { + var encoders = new ConcurrentDictionary> + { + [typeof(bool)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (bool) value), + [typeof(char)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (char) value), + [typeof(string)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (string) value), + [typeof(sbyte)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (sbyte) value), + [typeof(short)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (short) value), + [typeof(int)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (int) value), + [typeof(long)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (long) value), + [typeof(byte)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (byte) value), + [typeof(ushort)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (ushort) value), + [typeof(uint)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (uint) value), + [typeof(ulong)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (ulong) value), + [typeof(float)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (float) value), + [typeof(double)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (double) value), + [typeof(decimal)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (decimal) value), + [typeof(Slice)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (Slice) value), + [typeof(byte[])] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (byte[]) value), + [typeof(Guid)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (Guid) value), + [typeof(Uuid128)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (Uuid128) value), + [typeof(Uuid64)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (Uuid64) value), + [typeof(TimeSpan)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (TimeSpan) value), + [typeof(DateTime)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (DateTime) value), + [typeof(DateTimeOffset)] = (ref TupleWriter writer, object value) => SerializeTo(ref writer, (DateTimeOffset) value), + [typeof(ITuple)] = (ref TupleWriter writer, object value) => SerializeTupleTo(ref writer, (ITuple) value), + [typeof(ITupleFormattable)] = (ref TupleWriter writer, object value) => SerializeTupleTo(ref writer, (ITuple) value), + [typeof(DBNull)] = (ref TupleWriter writer, object value) => TupleParser.WriteNil(ref writer) + }; + + return encoders; + } + + private static Encoder CreateBoxedEncoder(Type type) + { + var m = typeof(TuplePacker<>).MakeGenericType(type).GetMethod(nameof(TuplePacker.SerializeBoxedTo)); + Contract.Assert(m != null); + + var writer = Expression.Parameter(typeof(TupleWriter).MakeByRefType(), "writer"); + var value = Expression.Parameter(typeof(object), "value"); + + var body = Expression.Call(m, writer, value); + return Expression.Lambda>(body, writer, value).Compile(); + } + + /// Writes a slice as a byte[] array + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, Slice value) + { + TupleParser.WriteBytes(ref writer, value); + } + + /// Writes a byte[] array + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, byte[] value) + { + TupleParser.WriteBytes(ref writer, value); + } + + /// Writes an array segment as a byte[] array + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, ArraySegment value) + { + TupleParser.WriteBytes(ref writer, value); + } + + /// Writes a char as Unicode string + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, char value) + { + TupleParser.WriteChar(ref writer, value); + } + + /// Writes a boolean as an integer + /// Uses 0 for false, and -1 for true + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, bool value) + { + TupleParser.WriteBool(ref writer, value); + } + + /// Writes a boolean as an integer or null + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, bool? value) + { + //REVIEW: only method for a nullable type? add others? or remove this one? + TupleParser.WriteBool(ref writer, value); + } + + /// Writes a signed byte as an integer + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, sbyte value) + { + TupleParser.WriteInt32(ref writer, value); + } + + /// Writes an unsigned byte as an integer + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, byte value) + { + TupleParser.WriteByte(ref writer, value); + } + + /// Writes a signed word as an integer + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, short value) + { + TupleParser.WriteInt32(ref writer, value); + } + + /// Writes an unsigned word as an integer + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, ushort value) + { + TupleParser.WriteUInt32(ref writer, value); + } + + /// Writes a signed int as an integer + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, int value) + { + TupleParser.WriteInt32(ref writer, value); + } + + /// Writes an unsigned int as an integer + public static void SerializeTo(ref TupleWriter writer, uint value) + { + TupleParser.WriteUInt32(ref writer, value); + } + + /// Writes a signed long as an integer + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, long value) + { + TupleParser.WriteInt64(ref writer, value); + } + + /// Writes an unsigned long as an integer + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, ulong value) + { + TupleParser.WriteUInt64(ref writer, value); + } + + /// Writes a 32-bit IEEE floating point number + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, float value) + { + TupleParser.WriteSingle(ref writer, value); + } + + /// Writes a 64-bit IEEE floating point number + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, double value) + { + TupleParser.WriteDouble(ref writer, value); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, decimal value) + { + TupleParser.WriteDecimal(ref writer, value); + } + + /// Writes a string as an Unicode string + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, string value) + { + TupleParser.WriteString(ref writer, value); + } + + /// Writes a DateTime converted to the number of days since the Unix Epoch and stored as a 64-bit decimal + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, DateTime value) + { + // The problem of serializing DateTime: TimeZone? Precision? + // - Since we are going to lose the TimeZone infos anyway, we can just store everything in UTC and let the caller deal with it + // - DateTime in .NET uses Ticks which produce numbers too large to fit in the 56 bits available in JavaScript + // - Most other *nix uses the number of milliseconds since 1970-Jan-01 UTC, but if we store as an integer we will lose some precision (rounded to nearest millisecond) + // - We could store the number of milliseconds as a floating point value, which would require support of Floating Points in the Tuple Encoding (currently a Draft) + // - Other database engines store dates as a number of DAYS since Epoch, using a floating point number. This allows for quickly extracting the date by truncating the value, and the time by using the decimal part + + // Right now, we will store the date as the number of DAYS since Epoch, using a 64-bit float. + // => storing a number of ticks would be MS-only anyway (56-bit limit in JS) + // => JS binding MAY support decoding of 64-bit floats in the future, in which case the value would be preserved exactly. + + const long UNIX_EPOCH_EPOCH = 621355968000000000L; + TupleParser.WriteDouble(ref writer, (value.ToUniversalTime().Ticks - UNIX_EPOCH_EPOCH) / (double)TimeSpan.TicksPerDay); + } + + /// Writes a TimeSpan converted to to a number seconds encoded as a 64-bit decimal + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, TimeSpan value) + { + // We have the same precision problem with storing DateTimes: + // - Storing the number of ticks keeps the exact value, but is Windows-centric + // - Storing the number of milliseconds as an integer will round the precision to 1 millisecond, which is not acceptable + // - We could store the the number of milliseconds as a floating point value, which would require support of Floating Points in the Tuple Encoding (currently a Draft) + // - It is frequent for JSON APIs and other database engines to represent durations as a number of SECONDS, using a floating point number. + + // Right now, we will store the duration as the number of seconds, using a 64-bit float + + TupleParser.WriteDouble(ref writer, value.TotalSeconds); + } + + /// Writes a Guid as a 128-bit UUID + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, Guid value) + { + //REVIEW: should we consider serializing Guid.Empty as <14> (integer 0) ? or maybe <01><00> (empty bytestring) ? + // => could spare ~16 bytes per key in indexes on GUID properties that are frequently missing or empty (== default(Guid)) + TupleParser.WriteGuid(ref writer, value); + } + + /// Writes a Uuid as a 128-bit UUID + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, Uuid128 value) + { + TupleParser.WriteUuid128(ref writer, value); + } + + /// Writes a Uuid as a 64-bit UUID + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void SerializeTo(ref TupleWriter writer, Uuid64 value) + { + TupleParser.WriteUuid64(ref writer, value); + } + + public static void SerializeTo(ref TupleWriter writer, VersionStamp value) + { + TupleParser.WriteVersionStamp(ref writer, value); + } + + /// Writes an IPaddress as a 32-bit (IPv4) or 128-bit (IPv6) byte array + public static void SerializeTo(ref TupleWriter writer, System.Net.IPAddress value) + { + TupleParser.WriteBytes(ref writer, value?.GetAddressBytes()); + } + + /// Serialize an embedded tuples + public static void SerializeTupleTo(ref TupleWriter writer, TTuple tuple) + where TTuple : ITuple + { + Contract.Requires(tuple != null); + + TupleParser.BeginTuple(ref writer); + TupleEncoder.WriteTo(ref writer, tuple); + TupleParser.EndTuple(ref writer); + } + + public static void SerializeSTupleTo(ref TupleWriter writer, STuple tuple) + { + TupleParser.BeginTuple(ref writer); + SerializeTo(ref writer, tuple.Item1); + TupleParser.EndTuple(ref writer); + } + + public static void SerializeSTupleTo(ref TupleWriter writer, STuple tuple) + { + TupleParser.BeginTuple(ref writer); + SerializeTo(ref writer, tuple.Item1); + SerializeTo(ref writer, tuple.Item2); + TupleParser.EndTuple(ref writer); + } + + public static void SerializeSTupleTo(ref TupleWriter writer, STuple tuple) + { + TupleParser.BeginTuple(ref writer); + SerializeTo(ref writer, tuple.Item1); + SerializeTo(ref writer, tuple.Item2); + SerializeTo(ref writer, tuple.Item3); + TupleParser.EndTuple(ref writer); + } + + public static void SerializeSTupleTo(ref TupleWriter writer, STuple tuple) + { + TupleParser.BeginTuple(ref writer); + SerializeTo(ref writer, tuple.Item1); + SerializeTo(ref writer, tuple.Item2); + SerializeTo(ref writer, tuple.Item3); + SerializeTo(ref writer, tuple.Item4); + TupleParser.EndTuple(ref writer); + } + + public static void SerializeSTupleTo(ref TupleWriter writer, STuple tuple) + { + TupleParser.BeginTuple(ref writer); + SerializeTo(ref writer, tuple.Item1); + SerializeTo(ref writer, tuple.Item2); + SerializeTo(ref writer, tuple.Item3); + SerializeTo(ref writer, tuple.Item4); + SerializeTo(ref writer, tuple.Item5); + TupleParser.EndTuple(ref writer); + } + + public static void SerializeSTupleTo(ref TupleWriter writer, STuple tuple) + { + TupleParser.BeginTuple(ref writer); + SerializeTo(ref writer, tuple.Item1); + SerializeTo(ref writer, tuple.Item2); + SerializeTo(ref writer, tuple.Item3); + SerializeTo(ref writer, tuple.Item4); + SerializeTo(ref writer, tuple.Item5); + SerializeTo(ref writer, tuple.Item6); + TupleParser.EndTuple(ref writer); + } + + public static void SerializeTupleFormattableTo(ref TupleWriter writer, TFormattable formattable) + where TFormattable : ITupleFormattable + { + var tuple = formattable.ToTuple(); + if (tuple == null) throw new InvalidOperationException($"An instance of type '{formattable.GetType().Name}' returned a null Tuple while serialiazing"); + + TupleParser.BeginTuple(ref writer); + TupleEncoder.WriteTo(ref writer, tuple); + TupleParser.EndTuple(ref writer); + } + + /// Serialize an embedded tuple formattable + public static void SerializeFormattableTo(ref TupleWriter writer, ITupleFormattable formattable) + { + if (formattable == null) + { + TupleParser.WriteNil(ref writer); + return; + } + + var tuple = formattable.ToTuple(); + if (tuple == null) throw new InvalidOperationException($"Custom formatter {formattable.GetType().Name}.ToTuple() cannot return null"); + + TupleParser.BeginTuple(ref writer); + TupleEncoder.WriteTo(ref writer, tuple); + TupleParser.EndTuple(ref writer); + } + + public static void SerializeValueTupleTo(ref TupleWriter writer, ValueTuple tuple) + { + TupleParser.BeginTuple(ref writer); + SerializeTo(ref writer, tuple.Item1); + TupleParser.EndTuple(ref writer); + } + + public static void SerializeValueTupleTo(ref TupleWriter writer, (T1, T2) tuple) + { + TupleParser.BeginTuple(ref writer); + SerializeTo(ref writer, tuple.Item1); + SerializeTo(ref writer, tuple.Item2); + TupleParser.EndTuple(ref writer); + } + + public static void SerializeValueTupleTo(ref TupleWriter writer, (T1, T2, T3) tuple) + { + TupleParser.BeginTuple(ref writer); + SerializeTo(ref writer, tuple.Item1); + SerializeTo(ref writer, tuple.Item2); + SerializeTo(ref writer, tuple.Item3); + TupleParser.EndTuple(ref writer); + } + + public static void SerializeValueTupleTo(ref TupleWriter writer, (T1, T2, T3, T4) tuple) + { + TupleParser.BeginTuple(ref writer); + SerializeTo(ref writer, tuple.Item1); + SerializeTo(ref writer, tuple.Item2); + SerializeTo(ref writer, tuple.Item3); + SerializeTo(ref writer, tuple.Item4); + TupleParser.EndTuple(ref writer); + } + + public static void SerializeValueTupleTo(ref TupleWriter writer, (T1, T2, T3, T4, T5) tuple) + { + TupleParser.BeginTuple(ref writer); + SerializeTo(ref writer, tuple.Item1); + SerializeTo(ref writer, tuple.Item2); + SerializeTo(ref writer, tuple.Item3); + SerializeTo(ref writer, tuple.Item4); + SerializeTo(ref writer, tuple.Item5); + TupleParser.EndTuple(ref writer); + } + + public static void SerializeValueTupleTo(ref TupleWriter writer, (T1, T2, T3, T4, T5, T6) tuple) + { + TupleParser.BeginTuple(ref writer); + SerializeTo(ref writer, tuple.Item1); + SerializeTo(ref writer, tuple.Item2); + SerializeTo(ref writer, tuple.Item3); + SerializeTo(ref writer, tuple.Item4); + SerializeTo(ref writer, tuple.Item5); + SerializeTo(ref writer, tuple.Item6); + TupleParser.EndTuple(ref writer); + } + + #endregion + + #region Deserializers... + + private static readonly Dictionary WellKnownUnpackers = InitializeDefaultUnpackers(); + + [NotNull] + private static Dictionary InitializeDefaultUnpackers() + { + var map = new Dictionary + { + [typeof(Slice)] = new Func(TuplePackers.DeserializeSlice), + [typeof(byte[])] = new Func(TuplePackers.DeserializeBytes), + [typeof(bool)] = new Func(TuplePackers.DeserializeBoolean), + [typeof(string)] = new Func(TuplePackers.DeserializeString), + [typeof(char)] = new Func(TuplePackers.DeserializeChar), + [typeof(sbyte)] = new Func(TuplePackers.DeserializeSByte), + [typeof(short)] = new Func(TuplePackers.DeserializeInt16), + [typeof(int)] = new Func(TuplePackers.DeserializeInt32), + [typeof(long)] = new Func(TuplePackers.DeserializeInt64), + [typeof(byte)] = new Func(TuplePackers.DeserializeByte), + [typeof(ushort)] = new Func(TuplePackers.DeserializeUInt16), + [typeof(uint)] = new Func(TuplePackers.DeserializeUInt32), + [typeof(ulong)] = new Func(TuplePackers.DeserializeUInt64), + [typeof(float)] = new Func(TuplePackers.DeserializeSingle), + [typeof(double)] = new Func(TuplePackers.DeserializeDouble), + [typeof(Guid)] = new Func(TuplePackers.DeserializeGuid), + [typeof(Uuid128)] = new Func(TuplePackers.DeserializeUuid128), + [typeof(Uuid64)] = new Func(TuplePackers.DeserializeUuid64), + [typeof(TimeSpan)] = new Func(TuplePackers.DeserializeTimeSpan), + [typeof(DateTime)] = new Func(TuplePackers.DeserializeDateTime), + [typeof(System.Net.IPAddress)] = new Func(TuplePackers.DeserializeIPAddress), + [typeof(VersionStamp)] = new Func(TuplePackers.DeserializeVersionStamp), + [typeof(ITuple)] = new Func(TuplePackers.DeserializeTuple), + }; + + // add Nullable versions for all these types + return map; + } + + /// Returns a lambda that will be able to serialize values of type + /// Type of values to serialize + /// Reusable action that knows how to serialize values of type into binary buffers, or an exception if the type is not supported + [NotNull] + internal static Func GetDeserializer(bool required) + { + Type type = typeof(T); + + if (WellKnownUnpackers.TryGetValue(type, out var decoder)) + { // We already know how to decode this type + return (Func) decoder; + } + + // Nullable + var underlyingType = Nullable.GetUnderlyingType(typeof(T)); + if (underlyingType != null && WellKnownUnpackers.TryGetValue(underlyingType, out decoder)) + { + return (Func) MakeNullableDeserializer(type, underlyingType, decoder); + } + + // STuple<...> + if (typeof(ITuple).IsAssignableFrom(type)) + { + if (type.IsValueType && type.IsGenericType && type.Name.StartsWith(nameof(STuple) + "`", StringComparison.Ordinal)) + return (Func) MakeSTupleDeserializer(type); + } + + if ((type.Name == nameof(ValueTuple) || type.Name.StartsWith(nameof(ValueTuple) + "`", StringComparison.Ordinal)) && type.Namespace == "System") + { + return (Func) MakeValueTupleDeserializer(type); + } + + if (required) + { // will throw at runtime + return MakeNotSupportedDeserializer(); + } + // when all else fails... + return MakeConvertBoxedDeserializer(); + } + + [Pure, NotNull] + private static Func MakeNotSupportedDeserializer() + { + return (_) => throw new InvalidOperationException($"Does not know how to deserialize keys into values of type {typeof(T).Name}"); + } + + [Pure, NotNull] + private static Func MakeConvertBoxedDeserializer() + { + return (value) => TypeConverters.ConvertBoxed(DeserializeBoxed(value)); + } + + /// Check if a tuple segment is the equivalent of 'Nil' + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static bool IsNilSegment(Slice slice) + { + return slice.IsNullOrEmpty || slice[0] == TupleTypes.Nil; + } + + [Pure, NotNull] + private static Delegate MakeNullableDeserializer([NotNull] Type nullableType, [NotNull] Type type, [NotNull] Delegate decoder) + { + Contract.Requires(nullableType != null && type != null && decoder != null); + // We have a Decoder of T, but we have to transform it into a Decoder for Nullable, which returns null if the slice is "nil", or falls back to the underlying decoder if the slice contains something + + var prmSlice = Expression.Parameter(typeof(Slice), "slice"); + var body = Expression.Condition( + // IsNilSegment(slice) ? + Expression.Call(typeof(TuplePackers).GetMethod(nameof(IsNilSegment), BindingFlags.Static | BindingFlags.NonPublic), prmSlice), + // True => default(Nullable) + Expression.Default(nullableType), + // False => decoder(slice) + Expression.Convert(Expression.Invoke(Expression.Constant(decoder), prmSlice), nullableType) + ); + + return Expression.Lambda(body, prmSlice).Compile(); + } + + [Pure, NotNull] + private static Delegate MakeSTupleDeserializer(Type type) + { + Contract.Requires(type != null); + + // (slice) => TuPack.DeserializeTuple(slice) + + var targs = type.GetGenericArguments(); + var method = typeof(TuplePackers) + .GetMethods() + .Single(m => + { // find the matching "DeserializeTuple(Slice)" method that we want to call + if (m.Name != nameof(DeserializeTuple)) return false; + if (!m.IsGenericMethod || m.GetGenericArguments().Length != targs.Length) return false; + var args = m.GetParameters(); + if (args.Length != 1 && args[0].ParameterType != typeof(Slice)) return false; + return true; + }) + .MakeGenericMethod(targs); + + var prmSlice = Expression.Parameter(typeof(Slice), "slice"); + var body = Expression.Call(method, prmSlice); + + return Expression.Lambda(body, prmSlice).Compile(); + } + + [Pure, NotNull] + private static Delegate MakeValueTupleDeserializer(Type type) + { + Contract.Requires(type != null); + + // (slice) => TuPack.DeserializeValueTuple(slice) + + var targs = type.GetGenericArguments(); + var method = typeof(TuplePackers) + .GetMethods() + .Single(m => + { // find the matching "DeserializeValueTuple(Slice)" method that we want to call + if (m.Name != nameof(DeserializeValueTuple)) return false; + if (!m.IsGenericMethod || m.GetGenericArguments().Length != targs.Length) return false; + var args = m.GetParameters(); + if (args.Length != 1 && args[0].ParameterType != typeof(Slice)) return false; + return true; + }) + .MakeGenericMethod(targs); + + var prmSlice = Expression.Parameter(typeof(Slice), "slice"); + var body = Expression.Call(method, prmSlice); + + return Expression.Lambda(body, prmSlice).Compile(); + } + + /// Deserialize a packed element into an object by choosing the most appropriate type at runtime + /// Slice that contains a single packed element + /// Decoded element, in the type that is the best fit. + /// You should avoid working with untyped values as much as possible! Blindly casting the returned object may be problematic because this method may need to return very large intergers as Int64 or even UInt64. + [CanBeNull] + public static object DeserializeBoxed(Slice slice) + { + if (slice.IsNullOrEmpty) return null; + + int type = slice[0]; + if (type <= TupleTypes.IntPos8) + { + if (type >= TupleTypes.IntNeg8) return TupleParser.ParseInt64(type, slice); + + switch (type) + { + case TupleTypes.Nil: return null; + case TupleTypes.Bytes: return TupleParser.ParseBytes(slice); + case TupleTypes.Utf8: return TupleParser.ParseUnicode(slice); + case TupleTypes.TupleStart: return TupleParser.ParseTuple(slice); + } + } + else + { + switch (type) + { + case TupleTypes.Single: return TupleParser.ParseSingle(slice); + case TupleTypes.Double: return TupleParser.ParseDouble(slice); + //TODO: Triple + case TupleTypes.Decimal: return TupleParser.ParseDecimal(slice); + case TupleTypes.Uuid128: return TupleParser.ParseGuid(slice); + case TupleTypes.Uuid64: return TupleParser.ParseUuid64(slice); + case TupleTypes.VersionStamp80: return TupleParser.ParseVersionStamp(slice); + case TupleTypes.VersionStamp96: return TupleParser.ParseVersionStamp(slice); + } + } + + throw new FormatException($"Cannot convert tuple segment with unknown type code 0x{type:X}"); + } + + /// Deserialize a slice into a type that implements ITupleFormattable + /// Type of a class that must implement ITupleFormattable and have a default constructor + /// Slice that contains a single packed element + /// Decoded value of type + /// The type must have a default parameter-less constructor in order to be created. + public static T DeserializeFormattable(Slice slice) + where T : ITupleFormattable, new() + { + if (TuplePackers.IsNilSegment(slice)) + { + return default(T); + } + + var tuple = TupleParser.ParseTuple(slice); + var value = new T(); + value.FromTuple(tuple); + return value; + } + + /// Deserialize a slice into a type that implements ITupleFormattable, using a custom factory method + /// Type of a class that must implement ITupleFormattable + /// Slice that contains a single packed element + /// Lambda that will be called to construct a new instance of values of type + /// Decoded value of type + public static T DeserializeFormattable(Slice slice, [NotNull] Func factory) + where T : ITupleFormattable + { + var tuple = TupleParser.ParseTuple(slice); + var value = factory(); + value.FromTuple(tuple); + return value; + } + + /// Deserialize a tuple segment into a Slice + public static Slice DeserializeSlice(Slice slice) + { + // Convert the tuple value into a sensible Slice representation. + // The behavior should be equivalent to calling the corresponding Slice.From{TYPE}(TYPE value) + + if (slice.IsNullOrEmpty) return Slice.Nil; //TODO: fail ? + + byte type = slice[0]; + switch(type) + { + case TupleTypes.Nil: return Slice.Nil; + case TupleTypes.Bytes: return TupleParser.ParseBytes(slice); + case TupleTypes.Utf8: return Slice.FromString(TupleParser.ParseUnicode(slice)); + + case TupleTypes.Single: return Slice.FromSingle(TupleParser.ParseSingle(slice)); + case TupleTypes.Double: return Slice.FromDouble(TupleParser.ParseDouble(slice)); + //TODO: triple + case TupleTypes.Decimal: return Slice.FromDecimal(TupleParser.ParseDecimal(slice)); + + case TupleTypes.Uuid128: return Slice.FromGuid(TupleParser.ParseGuid(slice)); + case TupleTypes.Uuid64: return Slice.FromUuid64(TupleParser.ParseUuid64(slice)); + } + + if (type <= TupleTypes.IntPos8 && type >= TupleTypes.IntNeg8) + { + if (type >= TupleTypes.IntBase) return Slice.FromInt64(DeserializeInt64(slice)); + return Slice.FromUInt64(DeserializeUInt64(slice)); + } + + throw new FormatException($"Cannot convert tuple segment of type 0x{type:X} into a Slice"); + } + + /// Deserialize a tuple segment into a byte array + [CanBeNull, MethodImpl(MethodImplOptions.AggressiveInlining)] //REVIEW: because of Slice.GetBytes() + public static byte[] DeserializeBytes(Slice slice) + { + return DeserializeSlice(slice).GetBytes(); + } + + /// Deserialize a tuple segment into a tuple + [CanBeNull] + public static ITuple DeserializeTuple(Slice slice) + { + if (slice.IsNullOrEmpty) return null; + + byte type = slice[0]; + switch(type) + { + case TupleTypes.Nil: + { + return null; + } + case TupleTypes.Bytes: + { + return TupleEncoder.Unpack(TupleParser.ParseBytes(slice)); + } + case TupleTypes.TupleStart: + { + return TupleParser.ParseTuple(slice); + } + default: + { + throw new FormatException("Cannot convert tuple segment into a Tuple"); + } + } + } + + [Pure] + public static STuple DeserializeTuple(Slice slice) + { + var res = default(STuple); + if (slice.IsPresent) + { + byte type = slice[0]; + switch (type) + { + case TupleTypes.Nil: + { + break; + } + case TupleTypes.Bytes: + { + TupleEncoder.DecodeKey(TupleParser.ParseBytes(slice), out res); + break; + } + case TupleTypes.TupleStart: + { + var reader = TupleReader.Embedded(slice); + TupleEncoder.DecodeKey(ref reader, out res); + break; + } + default: + { + throw new FormatException($"Cannot convert tuple segment into a {res.GetType().Name}"); + } + } + } + return res; + } + + [Pure] + public static STuple DeserializeTuple(Slice slice) + { + var res = default(STuple); + if (slice.IsPresent) + { + byte type = slice[0]; + switch (type) + { + case TupleTypes.Nil: + { + break; + } + case TupleTypes.Bytes: + { + TupleEncoder.DecodeKey(TupleParser.ParseBytes(slice), out res); + break; + } + case TupleTypes.TupleStart: + { + var reader = TupleReader.Embedded(slice); + TupleEncoder.DecodeKey(ref reader, out res); + break; + } + default: + { + throw new FormatException($"Cannot convert tuple segment into a {res.GetType().Name}"); + } + } + } + return res; + } + + [Pure] + public static STuple DeserializeTuple(Slice slice) + { + var res = default(STuple); + if (slice.IsPresent) + { + byte type = slice[0]; + switch (type) + { + case TupleTypes.Nil: + { + break; + } + case TupleTypes.Bytes: + { + TupleEncoder.DecodeKey(TupleParser.ParseBytes(slice), out res); + break; + } + case TupleTypes.TupleStart: + { + var reader = TupleReader.Embedded(slice); + TupleEncoder.DecodeKey(ref reader, out res); + break; + } + default: + { + throw new FormatException($"Cannot convert tuple segment into a {res.GetType().Name}"); + } + } + } + return res; + + } + + [Pure] + public static STuple DeserializeTuple(Slice slice) + { + var res = default(STuple); + if (slice.IsPresent) + { + byte type = slice[0]; + switch (type) + { + case TupleTypes.Nil: + { + break; + } + case TupleTypes.Bytes: + { + TupleEncoder.DecodeKey(TupleParser.ParseBytes(slice), out res); + break; + } + case TupleTypes.TupleStart: + { + var reader = TupleReader.Embedded(slice); + TupleEncoder.DecodeKey(ref reader, out res); + break; + } + default: + { + throw new FormatException($"Cannot convert tuple segment into a {res.GetType().Name}"); + } + } + } + return res; + + } + + [Pure] + public static STuple DeserializeTuple(Slice slice) + { + var res = default(STuple); + if (slice.IsPresent) + { + byte type = slice[0]; + switch (type) + { + case TupleTypes.Nil: + { + break; + } + case TupleTypes.Bytes: + { + TupleEncoder.DecodeKey(TupleParser.ParseBytes(slice), out res); + break; + } + case TupleTypes.TupleStart: + { + var reader = TupleReader.Embedded(slice); + TupleEncoder.DecodeKey(ref reader, out res); + break; + } + default: + { + throw new FormatException($"Cannot convert tuple segment into a {res.GetType().Name}"); + } + } + } + return res; + } + + [Pure] + public static STuple DeserializeTuple(Slice slice) + { + var res = default(STuple); + if (slice.IsPresent) + { + byte type = slice[0]; + switch (type) + { + case TupleTypes.Nil: + { + break; + } + case TupleTypes.Bytes: + { + TupleEncoder.DecodeKey(TupleParser.ParseBytes(slice), out res); + break; + } + case TupleTypes.TupleStart: + { + var reader = TupleReader.Embedded(slice); + TupleEncoder.DecodeKey(ref reader, out res); + break; + } + default: + { + throw new FormatException($"Cannot convert tuple segment into a {res.GetType().Name}"); + } + } + } + return res; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static ValueTuple DeserializeValueTuple(Slice slice) + { + return DeserializeTuple(slice).ToValueTuple(); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static (T1, T2) DeserializeValueTuple(Slice slice) + { + return DeserializeTuple(slice).ToValueTuple(); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static (T1, T2, T3) DeserializeValueTuple(Slice slice) + { + return DeserializeTuple(slice).ToValueTuple(); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static (T1, T2, T3, T4) DeserializeValueTuple(Slice slice) + { + return DeserializeTuple(slice).ToValueTuple(); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static (T1, T2, T3, T4, T5) DeserializeValueTuple(Slice slice) + { + return DeserializeTuple(slice).ToValueTuple(); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static (T1, T2, T3, T4, T5, T6) DeserializeValueTuple(Slice slice) + { + return DeserializeTuple(slice).ToValueTuple(); + } + + /// Deserialize a tuple segment into a Boolean + /// Slice that contains a single packed element + public static bool DeserializeBoolean(Slice slice) + { + if (slice.IsNullOrEmpty) return false; //TODO: fail ? + + byte type = slice[0]; + + // Booleans are usually encoded as integers, with 0 for False (<14>) and 1 for True (<15><01>) + if (type <= TupleTypes.IntPos8 && type >= TupleTypes.IntNeg8) + { + //note: DeserializeInt64 handles most cases + return 0 != DeserializeInt64(slice); + } + + switch (type) + { + case TupleTypes.Bytes: + { // empty is false, all other is true + return slice.Count != 2; // <01><00> + } + case TupleTypes.Utf8: + {// empty is false, all other is true + return slice.Count != 2; // <02><00> + } + case TupleTypes.Single: + { + //TODO: should NaN considered to be false ? + //=> it is the "null" of the floats, so if we do, 'null' should also be considered false + // ReSharper disable once CompareOfFloatsByEqualityOperator + return 0f != TupleParser.ParseSingle(slice); + } + case TupleTypes.Double: + { + //TODO: should NaN considered to be false ? + //=> it is the "null" of the floats, so if we do, 'null' should also be considered false + // ReSharper disable once CompareOfFloatsByEqualityOperator + return 0d != TupleParser.ParseDouble(slice); + } + //TODO: triple + case TupleTypes.Decimal: + { + return 0m != TupleParser.ParseDecimal(slice); + } + } + + //TODO: should we handle weird cases like strings "True" and "False"? + + throw new FormatException($"Cannot convert tuple segment of type 0x{type:X} into a boolean"); + } + + /// Deserialize a tuple segment into an Int16 + /// Slice that contains a single packed element + public static sbyte DeserializeSByte(Slice slice) + { + return checked((sbyte)DeserializeInt64(slice)); + } + + /// Deserialize a tuple segment into an Int16 + /// Slice that contains a single packed element + public static short DeserializeInt16(Slice slice) + { + return checked((short)DeserializeInt64(slice)); + } + + /// Deserialize a tuple segment into an Int32 + /// Slice that contains a single packed element + public static int DeserializeInt32(Slice slice) + { + return checked((int)DeserializeInt64(slice)); + } + + /// Deserialize a tuple segment into an Int64 + /// Slice that contains a single packed element + public static long DeserializeInt64(Slice slice) + { + if (slice.IsNullOrEmpty) return 0L; //TODO: fail ? + + int type = slice[0]; + if (type <= TupleTypes.IntPos8) + { + if (type >= TupleTypes.IntNeg8) return TupleParser.ParseInt64(type, slice); + + switch (type) + { + case TupleTypes.Nil: return 0; + case TupleTypes.Bytes: return long.Parse(TupleParser.ParseAscii(slice), CultureInfo.InvariantCulture); + case TupleTypes.Utf8: return long.Parse(TupleParser.ParseUnicode(slice), CultureInfo.InvariantCulture); + } + } + + throw new FormatException($"Cannot convert tuple segment of type 0x{type:X} into a signed integer"); + } + + /// Deserialize a tuple segment into an UInt32 + /// Slice that contains a single packed element + public static byte DeserializeByte(Slice slice) + { + return checked((byte)DeserializeUInt64(slice)); + } + + /// Deserialize a tuple segment into an UInt32 + /// Slice that contains a single packed element + public static ushort DeserializeUInt16(Slice slice) + { + return checked((ushort)DeserializeUInt64(slice)); + } + + /// Deserialize a slice into an UInt32 + /// Slice that contains a single packed element + public static uint DeserializeUInt32(Slice slice) + { + return checked((uint)DeserializeUInt64(slice)); + } + + /// Deserialize a tuple segment into an UInt64 + /// Slice that contains a single packed element + public static ulong DeserializeUInt64(Slice slice) + { + if (slice.IsNullOrEmpty) return 0UL; //TODO: fail ? + + int type = slice[0]; + if (type <= TupleTypes.IntPos8) + { + if (type >= TupleTypes.IntZero) return (ulong)TupleParser.ParseInt64(type, slice); + if (type < TupleTypes.IntZero) throw new OverflowException(); // negative values + + switch (type) + { + case TupleTypes.Nil: return 0; + case TupleTypes.Bytes: return ulong.Parse(TupleParser.ParseAscii(slice), CultureInfo.InvariantCulture); + case TupleTypes.Utf8: return ulong.Parse(TupleParser.ParseUnicode(slice), CultureInfo.InvariantCulture); + } + } + + throw new FormatException($"Cannot convert tuple segment of type 0x{type:X} into an unsigned integer"); + } + + public static float DeserializeSingle(Slice slice) + { + if (slice.IsNullOrEmpty) return 0; + + byte type = slice[0]; + switch (type) + { + case TupleTypes.Nil: + { + //REVIEW: or should we retourne NaN? + return 0; + } + case TupleTypes.Utf8: + { + return float.Parse(TupleParser.ParseUnicode(slice), CultureInfo.InvariantCulture); + } + case TupleTypes.Single: + { + return TupleParser.ParseSingle(slice); + } + case TupleTypes.Double: + { + return (float) TupleParser.ParseDouble(slice); + } + case TupleTypes.Decimal: + { + return (float) TupleParser.ParseDecimal(slice); + } + } + + if (type <= TupleTypes.IntPos8 && type >= TupleTypes.IntNeg8) + { + return DeserializeInt64(slice); + } + + throw new FormatException($"Cannot convert tuple segment of type 0x{type:X} into a Single"); + } + + public static double DeserializeDouble(Slice slice) + { + if (slice.IsNullOrEmpty) return 0; + + byte type = slice[0]; + switch(type) + { + case TupleTypes.Nil: + { + //REVIEW: or should we retourne NaN? + return 0; + } + case TupleTypes.Utf8: + { + return double.Parse(TupleParser.ParseUnicode(slice), CultureInfo.InvariantCulture); + } + case TupleTypes.Single: + { + return TupleParser.ParseSingle(slice); + } + case TupleTypes.Double: + { + return TupleParser.ParseDouble(slice); + } + case TupleTypes.Decimal: + { + return (double) TupleParser.ParseDecimal(slice); + } + } + + if (type <= TupleTypes.IntPos8 && type >= TupleTypes.IntNeg8) + { + return DeserializeInt64(slice); + } + + throw new FormatException($"Cannot convert tuple segment of type 0x{type:X} into a Double"); + } + + public static decimal DeserializeDecimal(Slice slice) + { + throw new NotImplementedException(); + } + + /// Deserialize a tuple segment into a DateTime (UTC) + /// Slice that contains a single packed element + /// DateTime in UTC + /// The returned DateTime will be in UTC, because the original TimeZone details are lost. + public static DateTime DeserializeDateTime(Slice slice) + { + if (slice.IsNullOrEmpty) return DateTime.MinValue; //TODO: fail ? + + byte type = slice[0]; + + switch(type) + { + case TupleTypes.Nil: + { + return DateTime.MinValue; + } + + case TupleTypes.Utf8: + { // we only support ISO 8601 dates. For ex: YYYY-MM-DDTHH:MM:SS.fffff" + string str = TupleParser.ParseUnicode(slice); + return DateTime.Parse(str, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind); + } + + case TupleTypes.Double: + { // Number of days since Epoch + const long UNIX_EPOCH_TICKS = 621355968000000000L; + //note: we can't user TimeSpan.FromDays(...) because it rounds to the nearest millisecond! + long ticks = UNIX_EPOCH_TICKS + (long)(TupleParser.ParseDouble(slice) * TimeSpan.TicksPerDay); + return new DateTime(ticks, DateTimeKind.Utc); + } + + case TupleTypes.Decimal: + { + const long UNIX_EPOCH_TICKS = 621355968000000000L; + //note: we can't user TimeSpan.FromDays(...) because it rounds to the nearest millisecond! + long ticks = UNIX_EPOCH_TICKS + (long)(TupleParser.ParseDecimal(slice) * TimeSpan.TicksPerDay); + return new DateTime(ticks, DateTimeKind.Utc); + } + } + + // If we have an integer, we consider it to be a number of Ticks (Windows Only) + if (type <= TupleTypes.IntPos8 && type >= TupleTypes.IntNeg8) + { + return new DateTime(DeserializeInt64(slice), DateTimeKind.Utc); + } + + throw new FormatException($"Cannot convert tuple segment of type 0x{type:X} into a DateTime"); + } + + /// Deserialize a tuple segment into a TimeSpan + /// Slice that contains a single packed element + public static TimeSpan DeserializeTimeSpan(Slice slice) + { + if (slice.IsNullOrEmpty) return TimeSpan.Zero; //TODO: fail ? + + byte type = slice[0]; + + // We serialize TimeSpans as number of seconds in a 64-bit float. + + switch(type) + { + case TupleTypes.Nil: + { + return TimeSpan.Zero; + } + case TupleTypes.Utf8: + { // "HH:MM:SS.fffff" + return TimeSpan.Parse(TupleParser.ParseUnicode(slice), CultureInfo.InvariantCulture); + } + case TupleTypes.Single: + { // Number of seconds + //note: We can't use TimeSpan.FromSeconds(...) because it rounds to the nearest millisecond! + return new TimeSpan((long) (TupleParser.ParseSingle(slice) * TimeSpan.TicksPerSecond)); + } + case TupleTypes.Double: + { // Number of seconds + //note: We can't use TimeSpan.FromSeconds(...) because it rounds to the nearest millisecond! + return new TimeSpan((long) (TupleParser.ParseDouble(slice) * TimeSpan.TicksPerSecond)); + } + case TupleTypes.Decimal: + { // Number of seconds + //note: We can't use TimeSpan.FromSeconds(...) because it rounds to the nearest millisecond! + return new TimeSpan((long) (TupleParser.ParseDecimal(slice) * TimeSpan.TicksPerSecond)); + } + } + + // If we have an integer, we consider it to be a number of Ticks (Windows Only) + if (type <= TupleTypes.IntPos8 && type >= TupleTypes.IntNeg8) + { + return new TimeSpan(DeserializeInt64(slice)); + } + + throw new FormatException($"Cannot convert tuple segment of type 0x{type:X} into a TimeSpan"); + } + + /// Deserialize a tuple segment into a Unicode character + /// Slice that contains a single packed element + public static char DeserializeChar(Slice slice) + { + if (slice.IsNullOrEmpty) return '\0'; + + byte type = slice[0]; + switch (type) + { + case TupleTypes.Nil: + { + return '\0'; + } + case TupleTypes.Bytes: + { + var s = TupleParser.ParseBytes(slice); + if (s.Count == 0) return '\0'; + if (s.Count == 1) return (char) s[0]; + throw new FormatException($"Cannot convert buffer of size {s.Count} into a Char"); + } + case TupleTypes.Utf8: + { + var s = TupleParser.ParseUnicode(slice); + if (s.Length == 0) return '\0'; + if (s.Length == 1) return s[0]; + throw new FormatException($"Cannot convert string of size {s.Length} into a Char"); + } + } + + if (type <= TupleTypes.IntPos8 && type >= TupleTypes.IntNeg8) + { + return (char) TupleParser.ParseInt64(type, slice); + } + + throw new FormatException($"Cannot convert tuple segment of type 0x{type:X} into a Char"); + } + + /// Deserialize a tuple segment into a Unicode string + /// Slice that contains a single packed element + [CanBeNull] + public static string DeserializeString(Slice slice) + { + if (slice.IsNullOrEmpty) return null; + + byte type = slice[0]; + switch (type) + { + case TupleTypes.Nil: + { + return null; + } + case TupleTypes.Bytes: + { + return TupleParser.ParseAscii(slice); + } + case TupleTypes.Utf8: + { + return TupleParser.ParseUnicode(slice); + } + case TupleTypes.Single: + { + return TupleParser.ParseSingle(slice).ToString(CultureInfo.InvariantCulture); + } + case TupleTypes.Double: + { + return TupleParser.ParseDouble(slice).ToString(CultureInfo.InvariantCulture); + } + case TupleTypes.Decimal: + { + return TupleParser.ParseDecimal(slice).ToString(CultureInfo.InvariantCulture); + } + case TupleTypes.Uuid128: + { + return TupleParser.ParseGuid(slice).ToString(); + } + case TupleTypes.Uuid64: + { + return TupleParser.ParseUuid64(slice).ToString(); + } + } + + if (type <= TupleTypes.IntPos8 && type >= TupleTypes.IntNeg8) + { + return TupleParser.ParseInt64(type, slice).ToString(CultureInfo.InvariantCulture); + } + + throw new FormatException($"Cannot convert tuple segment of type 0x{type:X} into a String"); + } + + /// Deserialize a tuple segment into Guid + /// Slice that contains a single packed element + public static Guid DeserializeGuid(Slice slice) + { + if (slice.IsNullOrEmpty) return Guid.Empty; + + int type = slice[0]; + + switch (type) + { + case TupleTypes.Bytes: + { + return Guid.Parse(TupleParser.ParseAscii(slice)); + } + case TupleTypes.Utf8: + { + return Guid.Parse(TupleParser.ParseUnicode(slice)); + } + case TupleTypes.Uuid128: + { + return TupleParser.ParseGuid(slice); + } + //REVIEW: should we allow converting a Uuid64 into a Guid? This looks more like a bug than an expected behavior... + } + + throw new FormatException($"Cannot convert tuple segment of type 0x{type:X} into a System.Guid"); + } + + /// Deserialize a tuple segment into 128-bit UUID + /// Slice that contains a single packed element + public static Uuid128 DeserializeUuid128(Slice slice) + { + if (slice.IsNullOrEmpty) return Uuid128.Empty; + + int type = slice[0]; + + switch (type) + { + case TupleTypes.Bytes: + { // expect binary representation as a 16-byte array + return new Uuid128(TupleParser.ParseBytes(slice)); + } + case TupleTypes.Utf8: + { // expect text representation + return new Uuid128(TupleParser.ParseUnicode(slice)); + } + case TupleTypes.Uuid128: + { + return TupleParser.ParseUuid128(slice); + } + //REVIEW: should we allow converting a Uuid64 into a Uuid128? This looks more like a bug than an expected behavior... + } + + throw new FormatException($"Cannot convert tuple segment of type 0x{type:X} into an Uuid128"); + } + + /// Deserialize a tuple segment into 64-bit UUID + /// Slice that contains a single packed element + public static Uuid64 DeserializeUuid64(Slice slice) + { + if (slice.IsNullOrEmpty) return Uuid64.Empty; + + int type = slice[0]; + + switch (type) + { + case TupleTypes.Bytes: + { // expect binary representation as a 16-byte array + return Uuid64.Read(TupleParser.ParseBytes(slice)); + } + case TupleTypes.Utf8: + { // expect text representation + return Uuid64.Parse(TupleParser.ParseUnicode(slice)); + } + case TupleTypes.Uuid64: + { + return TupleParser.ParseUuid64(slice); + } + } + + if (type >= TupleTypes.IntZero && type <= TupleTypes.IntPos8) + { // expect 64-bit number + return new Uuid64(TupleParser.ParseInt64(type, slice)); + } + // we don't support negative numbers! + + throw new FormatException($"Cannot convert tuple segment of type 0x{type:X} into an Uuid64"); + } + + public static VersionStamp DeserializeVersionStamp(Slice slice) + { + if (slice.IsNullOrEmpty) return default(VersionStamp); + + int type = slice[0]; + + if (type == TupleTypes.VersionStamp80 || type == TupleTypes.VersionStamp96) + { + if (VersionStamp.TryParse(slice.Substring(1), out var stamp)) + { + return stamp; + } + throw new FormatException("Cannot convert malformed tuple segment into a VersionStamp"); + } + + throw new FormatException($"Cannot convert tuple segment of type 0x{type:X} into a VersionStamp"); + } + + /// Deserialize a tuple segment into Guid + /// Slice that contains a single packed element + [CanBeNull] + public static System.Net.IPAddress DeserializeIPAddress(Slice slice) + { + if (slice.IsNullOrEmpty) return null; + + int type = slice[0]; + + switch (type) + { + case TupleTypes.Bytes: + { + return new System.Net.IPAddress(TupleParser.ParseBytes(slice).GetBytesOrEmpty()); + } + case TupleTypes.Utf8: + { + return System.Net.IPAddress.Parse(TupleParser.ParseUnicode(slice)); + } + case TupleTypes.Uuid128: + { // could be an IPv6 encoded as a 128-bits UUID + return new System.Net.IPAddress(slice.GetBytesOrEmpty()); + } + } + + if (type >= TupleTypes.IntPos1 && type <= TupleTypes.IntPos4) + { // could be an IPv4 encoded as a 32-bit unsigned integer + var value = TupleParser.ParseInt64(type, slice); + Contract.Assert(value >= 0 && value <= uint.MaxValue); + return new System.Net.IPAddress(value); + } + + throw new FormatException($"Cannot convert tuple segment of type 0x{type:X} into System.Net.IPAddress"); + } + + /// Unpack a tuple from a buffer + /// Slice that contains the packed representation of a tuple with zero or more elements + /// + /// Decoded tuple + [NotNull] + internal static SlicedTuple Unpack(Slice buffer, bool embedded) + { + var reader = new TupleReader(buffer); + if (embedded) reader.Depth = 1; + return Unpack(ref reader); + } + + /// Unpack a tuple from a buffer + /// Reader positionned on the start of the packed representation of a tuple with zero or more elements + /// Decoded tuple + internal static SlicedTuple Unpack(ref TupleReader reader) + { + // most tuples will probably fit within (prefix, sub-prefix, id, key) so pre-allocating with 4 should be ok... + var items = new Slice[4]; + + Slice item; + int p = 0; + while ((item = TupleParser.ParseNext(ref reader)).HasValue) + { + if (p >= items.Length) + { + // note: do not grow exponentially, because tuples will never but very large... + Array.Resize(ref items, p + 4); + } + items[p++] = item; + } + + if (reader.Input.HasMore) throw new FormatException("Parsing of tuple failed failed before reaching the end of the key"); + return new SlicedTuple(p == 0 ? Array.Empty() : items, 0, p); + } + + + /// Ensure that a slice is a packed tuple that contains a single and valid element + /// Slice that should contain the packed representation of a singleton tuple + /// Decoded slice of the single element in the singleton tuple + public static Slice UnpackSingle(Slice buffer) + { + var slicer = new TupleReader(buffer); + + var current = TupleParser.ParseNext(ref slicer); + if (slicer.Input.HasMore) throw new FormatException("Parsing of singleton tuple failed before reaching the end of the key"); + + return current; + } + + /// Only returns the first item of a packed tuple + /// Slice that contains the packed representation of a tuple with one or more elements + /// Raw slice corresponding to the first element of the tuple + public static Slice UnpackFirst(Slice buffer) + { + var slicer = new TupleReader(buffer); + + return TupleParser.ParseNext(ref slicer); + } + + /// Only returns the last item of a packed tuple + /// Slice that contains the packed representation of a tuple with one or more elements + /// Raw slice corresponding to the last element of the tuple + public static Slice UnpackLast(Slice buffer) + { + var slicer = new TupleReader(buffer); + + Slice item = Slice.Nil; + + Slice current; + while ((current = TupleParser.ParseNext(ref slicer)).HasValue) + { + item = current; + } + + if (slicer.Input.HasMore) throw new FormatException("Parsing of tuple failed failed before reaching the end of the key"); + return item; + } + + #endregion + + } + +} diff --git a/FoundationDB.Client/Layers/Tuples/FdbTupleParser.cs b/FoundationDB.Client/Tuples/Encoding/TupleParser.cs similarity index 72% rename from FoundationDB.Client/Layers/Tuples/FdbTupleParser.cs rename to FoundationDB.Client/Tuples/Encoding/TupleParser.cs index 15baae74c..4b969eb4f 100644 --- a/FoundationDB.Client/Layers/Tuples/FdbTupleParser.cs +++ b/FoundationDB.Client/Tuples/Encoding/TupleParser.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,16 +26,19 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples.Encoding { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; + using System.Runtime.CompilerServices; using System.Text; + using Doxense.Collections.Tuples; + using Doxense.Diagnostics.Contracts; + using Doxense.Memory; + using FoundationDB.Client; + using JetBrains.Annotations; /// Helper class that contains low-level encoders for the tuple binary format - public static class FdbTupleParser + public static class TupleParser { #region Serialization... @@ -44,11 +47,11 @@ public static void WriteNil(ref TupleWriter writer) { if (writer.Depth == 0) { // at the top level, NILs are escaped as <00> - writer.Output.WriteByte(FdbTupleTypes.Nil); + writer.Output.WriteByte(TupleTypes.Nil); } else { // inside a tuple, NILs are escaped as <00> - writer.Output.WriteByte2(FdbTupleTypes.Nil, 0xFF); + writer.Output.WriteBytes(TupleTypes.Nil, 0xFF); } } @@ -57,11 +60,28 @@ public static void WriteBool(ref TupleWriter writer, bool value) // To be compatible with other bindings, we will encode False as the number 0, and True as the number 1 if (value) { // true => 15 01 - writer.Output.WriteByte2(FdbTupleTypes.IntPos1, 1); + writer.Output.WriteBytes(TupleTypes.IntPos1, 1); + } + else + { // false => 14 + writer.Output.WriteByte(TupleTypes.IntZero); + } + } + + public static void WriteBool(ref TupleWriter writer, bool? value) + { + // To be compatible with other bindings, we will encode False as the number 0, and True as the number 1 + if (value == null) + { // null => 00 + writer.Output.WriteByte(TupleTypes.Nil); + } + else if (value.Value) + { // true => 15 01 + writer.Output.WriteBytes(TupleTypes.IntPos1, 1); } else { // false => 14 - writer.Output.WriteByte(FdbTupleTypes.IntZero); + writer.Output.WriteByte(TupleTypes.IntZero); } } @@ -72,11 +92,11 @@ public static void WriteByte(ref TupleWriter writer, byte value) { if (value == 0) { // zero - writer.Output.WriteByte(FdbTupleTypes.IntZero); + writer.Output.WriteByte(TupleTypes.IntZero); } else { // 1..255: frequent for array index - writer.Output.WriteByte2(FdbTupleTypes.IntPos1, value); + writer.Output.WriteBytes(TupleTypes.IntPos1, value); } } @@ -89,19 +109,19 @@ public static void WriteInt32(ref TupleWriter writer, int value) { if (value == 0) { // zero - writer.Output.WriteByte(FdbTupleTypes.IntZero); + writer.Output.WriteByte(TupleTypes.IntZero); return; } if (value > 0) { // 1..255: frequent for array index - writer.Output.WriteByte2(FdbTupleTypes.IntPos1, (byte)value); + writer.Output.WriteBytes(TupleTypes.IntPos1, (byte)value); return; } if (value > -256) { // -255..-1 - writer.Output.WriteByte2(FdbTupleTypes.IntNeg1, (byte)(255 + value)); + writer.Output.WriteBytes(TupleTypes.IntNeg1, (byte)(255 + value)); return; } } @@ -109,6 +129,12 @@ public static void WriteInt32(ref TupleWriter writer, int value) WriteInt64Slow(ref writer, value); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void WriteInt32(ref TupleWriter writer, int? value) + { + if (!value.HasValue) WriteNil(ref writer); else WriteInt32(ref writer, value.Value); + } + /// Writes an Int64 at the end, and advance the cursor /// Target buffer /// Signed QWORD, 64 bits, High Endian @@ -118,19 +144,19 @@ public static void WriteInt64(ref TupleWriter writer, long value) { if (value == 0) { // zero - writer.Output.WriteByte(FdbTupleTypes.IntZero); + writer.Output.WriteByte(TupleTypes.IntZero); return; } if (value > 0) { // 1..255: frequent for array index - writer.Output.WriteByte2(FdbTupleTypes.IntPos1, (byte)value); + writer.Output.WriteBytes(TupleTypes.IntPos1, (byte)value); return; } if (value > -256) { // -255..-1 - writer.Output.WriteByte2(FdbTupleTypes.IntNeg1, (byte)(255 + value)); + writer.Output.WriteBytes(TupleTypes.IntNeg1, (byte)(255 + value)); return; } } @@ -138,6 +164,13 @@ public static void WriteInt64(ref TupleWriter writer, long value) WriteInt64Slow(ref writer, value); } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void WriteInt64(ref TupleWriter writer, long? value) + { + if (!value.HasValue) WriteNil(ref writer); else WriteInt64(ref writer, value.Value); + } + private static void WriteInt64Slow(ref TupleWriter writer, long value) { // we are only called for values <= -256 or >= 256 @@ -153,7 +186,7 @@ private static void WriteInt64Slow(ref TupleWriter writer, long value) ulong v; if (value > 0) { // simple case - buffer[p++] = (byte)(FdbTupleTypes.IntBase + bytes); + buffer[p++] = (byte)(TupleTypes.IntBase + bytes); v = (ulong)value; } else @@ -161,7 +194,7 @@ private static void WriteInt64Slow(ref TupleWriter writer, long value) // -1 => 0xFE // -256 => 0xFFFE // -65536 => 0xFFFFFE - buffer[p++] = (byte)(FdbTupleTypes.IntBase - bytes); + buffer[p++] = (byte)(TupleTypes.IntBase - bytes); v = (ulong)(~(-value)); } @@ -191,11 +224,11 @@ public static void WriteUInt32(ref TupleWriter writer, uint value) { if (value == 0) { // 0 - writer.Output.WriteByte(FdbTupleTypes.IntZero); + writer.Output.WriteByte(TupleTypes.IntZero); } else { // 1..255 - writer.Output.WriteByte2(FdbTupleTypes.IntPos1, (byte)value); + writer.Output.WriteBytes(TupleTypes.IntPos1, (byte)value); } } else @@ -204,6 +237,12 @@ public static void WriteUInt32(ref TupleWriter writer, uint value) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void WriteUInt32(ref TupleWriter writer, uint? value) + { + if (!value.HasValue) WriteNil(ref writer); else WriteUInt32(ref writer, value.Value); + } + /// Writes an UInt64 at the end, and advance the cursor /// Target buffer /// Signed QWORD, 64 bits, High Endian @@ -213,11 +252,11 @@ public static void WriteUInt64(ref TupleWriter writer, ulong value) { if (value == 0) { // 0 - writer.Output.WriteByte(FdbTupleTypes.IntZero); + writer.Output.WriteByte(TupleTypes.IntZero); } else { // 1..255 - writer.Output.WriteByte2(FdbTupleTypes.IntPos1, (byte)value); + writer.Output.WriteBytes(TupleTypes.IntPos1, (byte)value); } } else @@ -226,6 +265,12 @@ public static void WriteUInt64(ref TupleWriter writer, ulong value) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void WriteUInt64(ref TupleWriter writer, ulong? value) + { + if (!value.HasValue) WriteNil(ref writer); else WriteUInt64(ref writer, value.Value); + } + private static void WriteUInt64Slow(ref TupleWriter writer, ulong value) { // We are only called for values >= 256 @@ -239,7 +284,7 @@ private static void WriteUInt64Slow(ref TupleWriter writer, ulong value) int p = writer.Output.Position; // simple case (ulong can only be positive) - buffer[p++] = (byte)(FdbTupleTypes.IntBase + bytes); + buffer[p++] = (byte)(TupleTypes.IntBase + bytes); if (bytes > 0) { @@ -287,7 +332,7 @@ public static void WriteSingle(ref TupleWriter writer, float value) writer.Output.EnsureBytes(5); var buffer = writer.Output.Buffer; int p = writer.Output.Position; - buffer[p + 0] = FdbTupleTypes.Single; + buffer[p + 0] = TupleTypes.Single; buffer[p + 1] = (byte)(bits >> 24); buffer[p + 2] = (byte)(bits >> 16); buffer[p + 3] = (byte)(bits >> 8); @@ -295,6 +340,13 @@ public static void WriteSingle(ref TupleWriter writer, float value) writer.Output.Position = p + 5; } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void WriteSingle(ref TupleWriter writer, float? value) + { + if (!value.HasValue) WriteNil(ref writer); else WriteSingle(ref writer, value.Value); + } + /// Writes an Double at the end, and advance the cursor /// Target buffer /// IEEE Floating point, 64 bits, High Endian @@ -323,7 +375,7 @@ public static void WriteDouble(ref TupleWriter writer, double value) writer.Output.EnsureBytes(9); var buffer = writer.Output.Buffer; int p = writer.Output.Position; - buffer[p] = FdbTupleTypes.Double; + buffer[p] = TupleTypes.Double; buffer[p + 1] = (byte)(bits >> 56); buffer[p + 2] = (byte)(bits >> 48); buffer[p + 3] = (byte)(bits >> 40); @@ -335,17 +387,21 @@ public static void WriteDouble(ref TupleWriter writer, double value) writer.Output.Position = p + 9; } - /// Writes a binary string - public static void WriteBytes(ref TupleWriter writer, byte[] value) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void WriteDouble(ref TupleWriter writer, double? value) { - if (value == null) - { - WriteNil(ref writer); - } - else - { - WriteNulEscapedBytes(ref writer, FdbTupleTypes.Bytes, value); - } + if (!value.HasValue) WriteNil(ref writer); else WriteDouble(ref writer, value.Value); + } + + public static void WriteDecimal(ref TupleWriter writer, decimal value) + { + throw new NotImplementedException(); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void WriteDecimal(ref TupleWriter writer, decimal? value) + { + if (!value.HasValue) WriteNil(ref writer); else WriteDecimal(ref writer, value.Value); } /// Writes a string encoded in UTF-8 @@ -357,7 +413,7 @@ public static unsafe void WriteString(ref TupleWriter writer, string value) } else if (value.Length == 0) { // "02 00" - writer.Output.WriteByte2(FdbTupleTypes.Utf8, 0x00); + writer.Output.WriteBytes(TupleTypes.Utf8, 0x00); } else { @@ -365,7 +421,7 @@ public static unsafe void WriteString(ref TupleWriter writer, string value) { if (!TryWriteUnescapedUtf8String(ref writer, chars, value.Length)) { // the string contains \0 chars, we need to do it the hard way - WriteNulEscapedBytes(ref writer, FdbTupleTypes.Utf8, Encoding.UTF8.GetBytes(value)); + WriteNulEscapedBytes(ref writer, TupleTypes.Utf8, Encoding.UTF8.GetBytes(value)); } } } @@ -384,7 +440,7 @@ internal static unsafe void WriteChars(ref TupleWriter writer, char[] value, int } else { // "02 00" - writer.Output.WriteByte2(FdbTupleTypes.Utf8, 0x00); + writer.Output.WriteBytes(TupleTypes.Utf8, 0x00); } } else @@ -393,7 +449,7 @@ internal static unsafe void WriteChars(ref TupleWriter writer, char[] value, int { if (!TryWriteUnescapedUtf8String(ref writer, chars + offset, count)) { // the string contains \0 chars, we need to do it the hard way - WriteNulEscapedBytes(ref writer, FdbTupleTypes.Utf8, Encoding.UTF8.GetBytes(value, 0, count)); + WriteNulEscapedBytes(ref writer, TupleTypes.Utf8, Encoding.UTF8.GetBytes(value, 0, count)); } } } @@ -410,7 +466,7 @@ private static unsafe void WriteUnescapedAsciiChars(ref TupleWriter writer, char char* end = chars + count; fixed (byte* buffer = writer.Output.Buffer) { - buffer[pos++] = FdbTupleTypes.Utf8; + buffer[pos++] = TupleTypes.Utf8; //OPTIMIZE: copy 2 or 4 chars at once, unroll loop? while(chars < end) { @@ -478,17 +534,16 @@ private static unsafe bool TryWriteUnescapedUtf8String(ref TupleWriter writer, c // * Western languages have a few chars that usually need 2 bytes. If we pre-allocate 50% more bytes, it should fit most of the time, without too much waste // * Eastern langauges will have all chars encoded to 3 bytes. If we also pre-allocated 50% more, we should only need one resize of the buffer (150% x 2 = 300%), which is acceptable writer.Output.EnsureBytes(checked(2 + count + (count >> 1))); // preallocate 150% of the string + 2 bytes - writer.Output.UnsafeWriteByte(FdbTupleTypes.Utf8); + writer.Output.UnsafeWriteByte(TupleTypes.Utf8); var encoder = Encoding.UTF8.GetEncoder(); // note: encoder.Convert() tries to fill up the buffer as much as possible with complete chars, and will set 'done' to true when all chars have been converted. do { - int charsUsed, bytesUsed; - encoder.Convert(ptr, remaining, buf, bufLen, true, out charsUsed, out bytesUsed, out done); + encoder.Convert(ptr, remaining, buf, bufLen, true, out int charsUsed, out int bytesUsed, out done); if (bytesUsed > 0) { - writer.Output.WriteBytes(buf, bytesUsed); + writer.Output.WriteBytes(buf, (uint) bytesUsed); } remaining -= charsUsed; ptr += charsUsed; @@ -510,15 +565,15 @@ public static void WriteChar(ref TupleWriter writer, char value) if (value == 0) { // NUL => "00 0F" // note: \0 is the only unicode character that will produce a zero byte when converted in UTF-8 - writer.Output.WriteByte4(FdbTupleTypes.Utf8, 0x00, 0xFF, 0x00); + writer.Output.WriteBytes(TupleTypes.Utf8, 0x00, 0xFF, 0x00); } else if (value < 0x80) { // 0x00..0x7F => 0xxxxxxx - writer.Output.WriteByte3(FdbTupleTypes.Utf8, (byte)value, 0x00); + writer.Output.WriteBytes(TupleTypes.Utf8, (byte)value, 0x00); } else if (value < 0x800) { // 0x80..0x7FF => 110xxxxx 10xxxxxx => two bytes - writer.Output.WriteByte4(FdbTupleTypes.Utf8, (byte)(0xC0 | (value >> 6)), (byte)(0x80 | (value & 0x3F)), 0x00); + writer.Output.WriteBytes(TupleTypes.Utf8, (byte)(0xC0 | (value >> 6)), (byte)(0x80 | (value & 0x3F)), 0x00); } else { // 0x800..0xFFFF => 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx @@ -526,22 +581,65 @@ public static void WriteChar(ref TupleWriter writer, char value) // => This means that a System.Char will never take more than 3 bytes in UTF-8 ! var tmp = Encoding.UTF8.GetBytes(new string(value, 1)); writer.Output.EnsureBytes(tmp.Length + 2); - writer.Output.UnsafeWriteByte(FdbTupleTypes.Utf8); + writer.Output.UnsafeWriteByte(TupleTypes.Utf8); writer.Output.UnsafeWriteBytes(tmp, 0, tmp.Length); writer.Output.UnsafeWriteByte(0x00); } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void WriteChar(ref TupleWriter writer, char? value) + { + if (!value.HasValue) WriteNil(ref writer); else WriteChar(ref writer, value.Value); + } + + /// Writes a binary string + public static void WriteBytes(ref TupleWriter writer, byte[] value) + { + if (value == null) + { + WriteNil(ref writer); + } + else + { + WriteNulEscapedBytes(ref writer, TupleTypes.Bytes, value); + } + } + /// Writes a binary string public static void WriteBytes(ref TupleWriter writer, [NotNull] byte[] value, int offset, int count) { - WriteNulEscapedBytes(ref writer, FdbTupleTypes.Bytes, value, offset, count); + WriteNulEscapedBytes(ref writer, TupleTypes.Bytes, value, offset, count); } /// Writes a binary string public static void WriteBytes(ref TupleWriter writer, ArraySegment value) { - WriteNulEscapedBytes(ref writer, FdbTupleTypes.Bytes, value.Array, value.Offset, value.Count); + if (value.Count == 0 && value.Array == null) + { // default(ArraySegment) ~= null + WriteNil(ref writer); + } + else + { + WriteNulEscapedBytes(ref writer, TupleTypes.Bytes, value.Array, value.Offset, value.Count); + } + } + + /// Writes a binary string + public static void WriteBytes(ref TupleWriter writer, Slice value) + { + if (value.IsNull) + { + WriteNil(ref writer); + } + else if (value.Offset == 0 && value.Count == value.Array.Length) + { + WriteNulEscapedBytes(ref writer, TupleTypes.Bytes, value.Array); + } + else + { + WriteNulEscapedBytes(ref writer, TupleTypes.Bytes, value.Array, value.Offset, value.Count); + } } /// Writes a buffer with all instances of 0 escaped as '00 FF' @@ -554,6 +652,7 @@ internal static void WriteNulEscapedBytes(ref TupleWriter writer, byte type, [No for (int i = offset, end = offset + count; i < end; ++i) { if (value[i] == 0) ++n; + //TODO: optimize this! } writer.Output.EnsureBytes(n + 2); @@ -564,13 +663,14 @@ internal static void WriteNulEscapedBytes(ref TupleWriter writer, byte type, [No { if (n == count) { // no NULs in the string, can copy all at once - SliceHelpers.CopyBytesUnsafe(buffer, p, value, offset, n); + UnsafeHelpers.CopyUnsafe(buffer, p, value, offset, n); p += n; } else { // we need to escape all NULs for(int i = offset, end = offset + count; i < end; ++i) { + //TODO: optimize this! byte b = value[i]; buffer[p++] = b; if (b == 0) buffer[p++] = 0xFF; @@ -600,7 +700,7 @@ private static void WriteNulEscapedBytes(ref TupleWriter writer, byte type, [Not { if (n == value.Length) { // no NULs in the string, can copy all at once - SliceHelpers.CopyBytesUnsafe(buffer, p, value, 0, n); + UnsafeHelpers.CopyUnsafe(buffer, p, value, 0, n); p += n; } else @@ -620,48 +720,72 @@ private static void WriteNulEscapedBytes(ref TupleWriter writer, byte type, [Not public static void WriteGuid(ref TupleWriter writer, Guid value) { writer.Output.EnsureBytes(17); - writer.Output.UnsafeWriteByte(FdbTupleTypes.Uuid128); - unsafe - { - // UUIDs are stored using the RFC 4122 standard, so we need to swap some parts of the System.Guid + writer.Output.UnsafeWriteByte(TupleTypes.Uuid128); + // Guids should be stored using the RFC 4122 standard, so we need to swap some parts of the System.Guid (handled by Uuid128) + writer.Output.UnsafeWriteUuid128(new Uuid128(value)); + } - byte* ptr = stackalloc byte[16]; - Uuid128.Write(value, ptr); - writer.Output.UnsafeWriteBytes(ptr, 16); - } + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void WriteGuid(ref TupleWriter writer, Guid? value) + { + if (!value.HasValue) WriteNil(ref writer); else WriteGuid(ref writer, value.Value); } /// Writes a RFC 4122 encoded 128-bit UUID public static void WriteUuid128(ref TupleWriter writer, Uuid128 value) { writer.Output.EnsureBytes(17); - writer.Output.UnsafeWriteByte(FdbTupleTypes.Uuid128); - unsafe - { - byte* ptr = stackalloc byte[16]; - value.WriteTo(ptr); - writer.Output.UnsafeWriteBytes(ptr, 16); - } + writer.Output.UnsafeWriteByte(TupleTypes.Uuid128); + writer.Output.UnsafeWriteUuid128(value); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void WriteUuid128(ref TupleWriter writer, Uuid128? value) + { + if (!value.HasValue) WriteNil(ref writer); else WriteUuid128(ref writer, value.Value); } /// Writes a 64-bit UUID public static void WriteUuid64(ref TupleWriter writer, Uuid64 value) { writer.Output.EnsureBytes(9); - writer.Output.UnsafeWriteByte(FdbTupleTypes.Uuid64); - unsafe - { - byte* ptr = stackalloc byte[8]; - value.WriteTo(ptr); - writer.Output.UnsafeWriteBytes(ptr, 8); + writer.Output.UnsafeWriteByte(TupleTypes.Uuid64); + writer.Output.UnsafeWriteUuid64(value); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void WriteUuid64(ref TupleWriter writer, Uuid64? value) + { + if (!value.HasValue) WriteNil(ref writer); else WriteUuid64(ref writer, value.Value); + } + + public static void WriteVersionStamp(ref TupleWriter writer, VersionStamp value) + { + if (value.HasUserVersion) + { // 96-bits Versionstamp + writer.Output.EnsureBytes(13); + writer.Output.UnsafeWriteByte(TupleTypes.VersionStamp96); + value.WriteTo(writer.Output.Allocate(12)); } + else + { // 80-bits Versionstamp + writer.Output.EnsureBytes(11); + writer.Output.UnsafeWriteByte(TupleTypes.VersionStamp80); + value.WriteTo(writer.Output.Allocate(10)); + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void WriteVersionStamp(ref TupleWriter writer, VersionStamp? value) + { + if (!value.HasValue) WriteNil(ref writer); else WriteVersionStamp(ref writer, value.Value); } /// Mark the start of a new embedded tuple public static void BeginTuple(ref TupleWriter writer) { writer.Depth++; - writer.Output.WriteByte(FdbTupleTypes.TupleStart); + writer.Output.WriteByte(TupleTypes.TupleStart); } /// Mark the end of an embedded tuple @@ -679,7 +803,7 @@ public static void EndTuple(ref TupleWriter writer) /// This method should only be used by custom decoders. public static long ParseInt64(int type, Slice slice) { - int bytes = type - FdbTupleTypes.IntBase; + int bytes = type - TupleTypes.IntBase; if (bytes == 0) return 0L; bool neg = false; @@ -702,7 +826,7 @@ public static long ParseInt64(int type, Slice slice) return value; } - internal static ArraySegment UnescapeByteString([NotNull] byte[] buffer, int offset, int count) + internal static Slice UnescapeByteString([NotNull] byte[] buffer, int offset, int count) { Contract.Requires(buffer != null && offset >= 0 && count >= 0); @@ -719,10 +843,10 @@ internal static ArraySegment UnescapeByteString([NotNull] byte[] buffer, i ++p; } // buffer is clean, we can return it as-is - return new ArraySegment(buffer, offset, count); + return buffer.AsSlice(offset, count); } - internal static ArraySegment UnescapeByteStringSlow([NotNull] byte[] buffer, int offset, int count, int offsetOfFirstZero = 0) + internal static Slice UnescapeByteStringSlow([NotNull] byte[] buffer, int offset, int count, int offsetOfFirstZero = 0) { Contract.Requires(buffer != null && offset >= 0 && count >= 0); @@ -733,7 +857,7 @@ internal static ArraySegment UnescapeByteStringSlow([NotNull] byte[] buffe int i = 0; if (offsetOfFirstZero > 0) { - SliceHelpers.CopyBytesUnsafe(tmp, 0, buffer, offset, offsetOfFirstZero); + UnsafeHelpers.CopyUnsafe(tmp, 0, buffer, offset, offsetOfFirstZero); p += offsetOfFirstZero; i = offsetOfFirstZero; } @@ -749,56 +873,58 @@ internal static ArraySegment UnescapeByteStringSlow([NotNull] byte[] buffe tmp[i++] = b; } - return new ArraySegment(tmp, 0, i); + return tmp.AsSlice(0, i); } /// Parse a tuple segment containing a byte array + [Pure] public static Slice ParseBytes(Slice slice) { - Contract.Requires(slice.HasValue && slice[0] == FdbTupleTypes.Bytes && slice[-1] == 0); + Contract.Requires(slice.HasValue && slice[0] == TupleTypes.Bytes && slice[-1] == 0); if (slice.Count <= 2) return Slice.Empty; - var decoded = UnescapeByteString(slice.Array, slice.Offset + 1, slice.Count - 2); - - return new Slice(decoded.Array, decoded.Offset, decoded.Count); + return UnescapeByteString(slice.Array, slice.Offset + 1, slice.Count - 2); } /// Parse a tuple segment containing an ASCII string stored as a byte array + [Pure] public static string ParseAscii(Slice slice) { - Contract.Requires(slice.HasValue && slice[0] == FdbTupleTypes.Bytes && slice[-1] == 0); + Contract.Requires(slice.HasValue && slice[0] == TupleTypes.Bytes && slice[-1] == 0); if (slice.Count <= 2) return String.Empty; var decoded = UnescapeByteString(slice.Array, slice.Offset + 1, slice.Count - 2); - - return Slice.DefaultEncoding.GetString(decoded.Array, decoded.Offset, decoded.Count); + return decoded.ToStringAscii(); } /// Parse a tuple segment containing a unicode string + [Pure] public static string ParseUnicode(Slice slice) { - Contract.Requires(slice.HasValue && slice[0] == FdbTupleTypes.Utf8 && slice[-1] == 0); + Contract.Requires(slice.HasValue && slice[0] == TupleTypes.Utf8 && slice[-1] == 0); if (slice.Count <= 2) return String.Empty; //TODO: check args var decoded = UnescapeByteString(slice.Array, slice.Offset + 1, slice.Count - 2); - return Encoding.UTF8.GetString(decoded.Array, decoded.Offset, decoded.Count); + return decoded.ToUnicode(); } /// Parse a tuple segment containing an embedded tuple - public static IFdbTuple ParseTuple(Slice slice) + [Pure] + public static ITuple ParseTuple(Slice slice) { - Contract.Requires(slice.HasValue && slice[0] == FdbTupleTypes.TupleStart && slice[-1] == 0); - if (slice.Count <= 2) return FdbTuple.Empty; + Contract.Requires(slice.HasValue && slice[0] == TupleTypes.TupleStart && slice[-1] == 0); + if (slice.Count <= 2) return STuple.Empty; - return FdbTuplePackers.Unpack(slice.Substring(1, slice.Count - 2), true); + return TuplePackers.Unpack(slice.Substring(1, slice.Count - 2), true); } /// Parse a tuple segment containing a single precision number (float32) + [Pure] public static float ParseSingle(Slice slice) { - Contract.Requires(slice.HasValue && slice[0] == FdbTupleTypes.Single); + Contract.Requires(slice.HasValue && slice[0] == TupleTypes.Single); if (slice.Count != 5) { @@ -826,9 +952,10 @@ public static float ParseSingle(Slice slice) } /// Parse a tuple segment containing a double precision number (float64) + [Pure] public static double ParseDouble(Slice slice) { - Contract.Requires(slice.HasValue && slice[0] == FdbTupleTypes.Double); + Contract.Requires(slice.HasValue && slice[0] == TupleTypes.Double); if (slice.Count != 9) { @@ -856,10 +983,25 @@ public static double ParseDouble(Slice slice) return value; } + /// Parse a tuple segment containing a quadruple precision number (float128) + [Pure] + public static decimal ParseDecimal(Slice slice) + { + Contract.Requires(slice.HasValue && slice[0] == TupleTypes.Decimal); + + if (slice.Count != 17) + { + throw new FormatException("Slice has invalid size for a Decimal"); + } + + throw new NotImplementedException(); + } + /// Parse a tuple segment containing a 128-bit GUID + [Pure] public static Guid ParseGuid(Slice slice) { - Contract.Requires(slice.HasValue && slice[0] == FdbTupleTypes.Uuid128); + Contract.Requires(slice.HasValue && slice[0] == TupleTypes.Uuid128); if (slice.Count != 17) { @@ -867,33 +1009,49 @@ public static Guid ParseGuid(Slice slice) } // We store them in RFC 4122 under the hood, so we need to reverse them to the MS format - return Uuid128.Convert(new Slice(slice.Array, slice.Offset + 1, 16)); + return Uuid128.Convert(slice.Substring(1, 16)); } /// Parse a tuple segment containing a 128-bit UUID + [Pure] public static Uuid128 ParseUuid128(Slice slice) { - Contract.Requires(slice.HasValue && slice[0] == FdbTupleTypes.Uuid128); + Contract.Requires(slice.HasValue && slice[0] == TupleTypes.Uuid128); if (slice.Count != 17) { throw new FormatException("Slice has invalid size for a 128-bit UUID"); } - return new Uuid128(new Slice(slice.Array, slice.Offset + 1, 16)); + return new Uuid128(slice.Substring(1, 16)); } /// Parse a tuple segment containing a 64-bit UUID + [Pure] public static Uuid64 ParseUuid64(Slice slice) { - Contract.Requires(slice.HasValue && slice[0] == FdbTupleTypes.Uuid64); + Contract.Requires(slice.HasValue && slice[0] == TupleTypes.Uuid64); if (slice.Count != 9) { throw new FormatException("Slice has invalid size for a 64-bit UUID"); } - return new Uuid64(new Slice(slice.Array, slice.Offset + 1, 8)); + return Uuid64.Read(slice.Substring(1, 8)); + } + + /// Parse a tuple segment containing an 80-bit or 96-bit VersionStamp + [Pure] + public static VersionStamp ParseVersionStamp(Slice slice) + { + Contract.Requires(slice.HasValue && (slice[0] == TupleTypes.VersionStamp80 || slice[0] == TupleTypes.VersionStamp96)); + + if (slice.Count != 11 && slice.Count != 13) + { + throw new FormatException("Slice has invalid size for a VersionStamp"); + } + + return VersionStamp.Parse(slice.Substring(1)); } #endregion @@ -913,7 +1071,7 @@ public static Slice ParseNext(ref TupleReader reader) return Slice.Nil; } - case FdbTupleTypes.Nil: + case TupleTypes.Nil: { // <00> / <00> => null if (reader.Depth > 0) { // must be <00> inside an embedded tuple @@ -935,59 +1093,79 @@ public static Slice ParseNext(ref TupleReader reader) } } - case FdbTupleTypes.Bytes: + case TupleTypes.Bytes: { // <01>(bytes)<00> return reader.Input.ReadByteString(); } - case FdbTupleTypes.Utf8: + case TupleTypes.Utf8: { // <02>(utf8 bytes)<00> return reader.Input.ReadByteString(); } - case FdbTupleTypes.TupleStart: + case TupleTypes.TupleStart: { // <03>(packed tuple)<04> //PERF: currently, we will first scan to get all the bytes of this tuple, and parse it later. // This means that we may need to scan multiple times the bytes, which may not be efficient if there are multiple embedded tuples inside each other return ReadEmbeddedTupleBytes(ref reader); } - case FdbTupleTypes.Single: + case TupleTypes.Single: { // <20>(4 bytes) return reader.Input.ReadBytes(5); } - case FdbTupleTypes.Double: + case TupleTypes.Double: { // <21>(8 bytes) return reader.Input.ReadBytes(9); } - case FdbTupleTypes.Uuid128: + case TupleTypes.Triple: + { // <22>(10 bytes) + return reader.Input.ReadBytes(11); + } + + case TupleTypes.Decimal: + { // <23>(16 bytes) + return reader.Input.ReadBytes(17); + } + + case TupleTypes.Uuid128: { // <30>(16 bytes) return reader.Input.ReadBytes(17); } - case FdbTupleTypes.Uuid64: + case TupleTypes.Uuid64: { // <31>(8 bytes) return reader.Input.ReadBytes(9); } - case FdbTupleTypes.AliasDirectory: - case FdbTupleTypes.AliasSystem: + case TupleTypes.VersionStamp80: + { // <32>(10 bytes) + return reader.Input.ReadBytes(11); + } + + case TupleTypes.VersionStamp96: + { // <33>(12 bytes) + return reader.Input.ReadBytes(13); + } + + case TupleTypes.AliasDirectory: + case TupleTypes.AliasSystem: { // or return reader.Input.ReadBytes(1); } } - if (type <= FdbTupleTypes.IntPos8 && type >= FdbTupleTypes.IntNeg8) + if (type <= TupleTypes.IntPos8 && type >= TupleTypes.IntNeg8) { - int bytes = type - FdbTupleTypes.IntZero; + int bytes = type - TupleTypes.IntZero; if (bytes < 0) bytes = -bytes; return reader.Input.ReadBytes(1 + bytes); } - throw new FormatException(String.Format("Invalid tuple type byte {0} at index {1}/{2}", type, reader.Input.Position, reader.Input.Buffer.Count)); + throw new FormatException($"Invalid tuple type byte {type} at index {reader.Input.Position}/{reader.Input.Buffer.Count}"); } /// Read an embedded tuple, without parsing it @@ -1015,7 +1193,7 @@ internal static Slice ReadEmbeddedTupleBytes(ref TupleReader reader) // else: ignore this token, it will be processed later if the tuple is unpacked and accessed } - throw new FormatException(String.Format("Truncated embedded tuple started at index {0}/{1}", start, reader.Input.Buffer.Count)); + throw new FormatException($"Truncated embedded tuple started at index {start}/{reader.Input.Buffer.Count}"); } /// Skip a number of tokens @@ -1028,7 +1206,7 @@ public static bool Skip(ref TupleReader reader, int count) while (count-- > 0) { if (!reader.Input.HasMore) return false; - var token = FdbTupleParser.ParseNext(ref reader); + var token = TupleParser.ParseNext(ref reader); if (token.IsNull) return false; } return true; @@ -1038,11 +1216,11 @@ public static bool Skip(ref TupleReader reader, int count) /// Reader positionned at the start of a packed tuple /// Lambda called for each segment of a tuple. Returns true to continue parsing, or false to stop /// Number of tokens that have been visited until either returned false, or reached the end. - public static T VisitNext(ref TupleReader reader, Func visitor) + public static T VisitNext(ref TupleReader reader, Func visitor) { if (!reader.Input.HasMore) throw new InvalidOperationException("The reader has already reached the end"); - var token = FdbTupleParser.ParseNext(ref reader); - return visitor(token, FdbTupleTypes.DecodeSegmentType(ref token)); + var token = TupleParser.ParseNext(ref reader); + return visitor(token, TupleTypes.DecodeSegmentType(token)); } #endregion @@ -1092,7 +1270,7 @@ public static int MostSignificantBit(uint v) { // from: http://graphics.stanford.edu/~seander/bithacks.html#IntegerLogDeBruijn - v |= v >> 1; // first round down to one less than a power of 2 + v |= v >> 1; // first round down to one less than a power of 2 v |= v >> 2; v |= v >> 4; v |= v >> 8; diff --git a/FoundationDB.Client/Layers/Tuples/TupleReader.cs b/FoundationDB.Client/Tuples/Encoding/TupleReader.cs similarity index 71% rename from FoundationDB.Client/Layers/Tuples/TupleReader.cs rename to FoundationDB.Client/Tuples/Encoding/TupleReader.cs index 14e1b9895..5b0d4d8de 100644 --- a/FoundationDB.Client/Layers/Tuples/TupleReader.cs +++ b/FoundationDB.Client/Tuples/Encoding/TupleReader.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,11 +26,14 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples.Encoding { - using FoundationDB.Client; using System; using System.Diagnostics; + using System.Runtime.CompilerServices; + using Doxense.Diagnostics.Contracts; + using Doxense.Memory; + using JetBrains.Annotations; [DebuggerDisplay("{Input.Position}/{Input.Buffer.Count} @ {Depth}")] public struct TupleReader @@ -38,17 +41,32 @@ public struct TupleReader public SliceReader Input; public int Depth; + [ MethodImpl(MethodImplOptions.AggressiveInlining)] public TupleReader(Slice buffer) { this.Input = new SliceReader(buffer); this.Depth = 0; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public TupleReader(Slice buffer, int depth) + { + this.Input = new SliceReader(buffer); + this.Depth = depth; + } + public TupleReader(SliceReader input) { this.Input = input; this.Depth = 0; } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static TupleReader Embedded(Slice packed) + { + Contract.Requires(packed.Count >= 2 && packed[0] == TupleTypes.TupleStart && packed[-1] == 0); + return new TupleReader(packed.Substring(1, packed.Count - 2), 1); + } } } diff --git a/FoundationDB.Client/Layers/Tuples/FdbTupleTypes.cs b/FoundationDB.Client/Tuples/Encoding/TupleTypes.cs similarity index 71% rename from FoundationDB.Client/Layers/Tuples/FdbTupleTypes.cs rename to FoundationDB.Client/Tuples/Encoding/TupleTypes.cs index 3fd25bc45..99c736761 100644 --- a/FoundationDB.Client/Layers/Tuples/FdbTupleTypes.cs +++ b/FoundationDB.Client/Tuples/Encoding/TupleTypes.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,15 +26,14 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples.Encoding { - using FoundationDB.Client; using System; /// /// Constants for the various tuple value types /// - internal static class FdbTupleTypes + internal static class TupleTypes { /// Null/Empty/Void internal const byte Nil = 0; @@ -45,8 +44,12 @@ internal static class FdbTupleTypes /// UTF-8 String internal const byte Utf8 = 2; - /// Nested tuple [DRAFT] + /// Nested tuple start [OBSOLETE] internal const byte TupleStart = 3; + /// Nested tuple end [OBSOLETE] + internal const byte TupleEnd = 4; + + internal const byte TupleStartNew = 5; internal const byte IntNeg8 = 12; internal const byte IntNeg7 = 13; @@ -73,12 +76,21 @@ internal static class FdbTupleTypes internal const byte Single = 32; /// Double precision decimals (64-bit, Big-Endian) [DRAFT] internal const byte Double = 33; + /// Triple precision decimals (80-bit, Big-Endian) [DRAFT] + internal const byte Triple = 34; //note: javascript numbers + /// Quadruple precision decimals (128-bit, Big-Endian) [DRAFT] + internal const byte Decimal = 35; /// RFC4122 UUID (128 bits) [DRAFT] internal const byte Uuid128 = 48; /// UUID (64 bits) [DRAFT] internal const byte Uuid64 = 49; //TODO: this is not official yet! may change! + //TODO: xmldoc + internal const byte VersionStamp80 = 0x32; + //TODO: xmldoc + internal const byte VersionStamp96 = 0x33; + /// Standard prefix of the Directory Layer /// This is not a part of the tuple encoding itself, but helps the tuple decoder pretty-print tuples that would otherwise be unparsable. internal const byte AliasDirectory = 254; @@ -88,34 +100,38 @@ internal static class FdbTupleTypes internal const byte AliasSystem = 255; /// Return the type of a tuple segment, from its header - public static FdbTupleSegmentType DecodeSegmentType(ref Slice segment) + public static TupleSegmentType DecodeSegmentType(Slice segment) { - if (segment.Count == 0) return FdbTupleSegmentType.Nil; + if (segment.Count == 0) return TupleSegmentType.Nil; int type = segment[0]; switch(type) { - case Nil: return FdbTupleSegmentType.Nil; - case Bytes: return FdbTupleSegmentType.ByteString; - case Utf8: return FdbTupleSegmentType.UnicodeString; - case TupleStart: return FdbTupleSegmentType.Tuple; - case Single: return FdbTupleSegmentType.Single; - case Double: return FdbTupleSegmentType.Double; - case Uuid128: return FdbTupleSegmentType.Uuid128; - case Uuid64: return FdbTupleSegmentType.Uuid64; + case Nil: return TupleSegmentType.Nil; + case Bytes: return TupleSegmentType.ByteString; + case Utf8: return TupleSegmentType.UnicodeString; + case TupleStart: return TupleSegmentType.Tuple; + case Single: return TupleSegmentType.Single; + case Double: return TupleSegmentType.Double; + case Triple: return TupleSegmentType.Triple; + case Decimal: return TupleSegmentType.Decimal; + case Uuid128: return TupleSegmentType.Uuid128; + case Uuid64: return TupleSegmentType.Uuid64; + case VersionStamp80: return TupleSegmentType.VersionStamp80; + case VersionStamp96: return TupleSegmentType.VersionStamp96; } if (type <= IntPos8 && type >= IntNeg8) { - return FdbTupleSegmentType.Integer; + return TupleSegmentType.Integer; } - return FdbTupleSegmentType.Invalid; + return TupleSegmentType.Invalid; } } /// Logical type of packed element of a tuple - public enum FdbTupleSegmentType + public enum TupleSegmentType { Invalid = -1, Nil = 0, @@ -125,8 +141,12 @@ public enum FdbTupleSegmentType Integer = 20, Single = 32, Double = 33, + Triple = 34, + Decimal = 35, Uuid128 = 48, Uuid64 = 49, + VersionStamp80 = 0x32, + VersionStamp96 = 0x33, } } diff --git a/FoundationDB.Client/Layers/Tuples/TupleWriter.cs b/FoundationDB.Client/Tuples/Encoding/TupleWriter.cs similarity index 85% rename from FoundationDB.Client/Layers/Tuples/TupleWriter.cs rename to FoundationDB.Client/Tuples/Encoding/TupleWriter.cs index 9d33d227a..d43716688 100644 --- a/FoundationDB.Client/Layers/Tuples/TupleWriter.cs +++ b/FoundationDB.Client/Tuples/Encoding/TupleWriter.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,15 +26,18 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples.Encoding { - using FoundationDB.Client; using System; using System.Diagnostics; + using System.Runtime.CompilerServices; + using Doxense.Memory; [DebuggerDisplay("{Output.Position}/{Output.Buffer.Length} @ {Depth}")] public struct TupleWriter { + //TODO: Could we use "ref SliceWriter" here with C# 7? + public SliceWriter Output; public int Depth; @@ -49,7 +52,13 @@ public TupleWriter(int capacity) this.Output = new SliceWriter(capacity); this.Depth = 0; } - + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public Slice ToSlice() + { + return this.Output.ToSlice(); + } + } } diff --git a/FoundationDB.Client/Layers/Tuples/FdbTupleAlias.cs b/FoundationDB.Client/Tuples/FdbTupleAlias.cs similarity index 96% rename from FoundationDB.Client/Layers/Tuples/FdbTupleAlias.cs rename to FoundationDB.Client/Tuples/FdbTupleAlias.cs index 18f377534..cadcc0d53 100644 --- a/FoundationDB.Client/Layers/Tuples/FdbTupleAlias.cs +++ b/FoundationDB.Client/Tuples/FdbTupleAlias.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,7 +26,7 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples { using System; diff --git a/FoundationDB.Client/Layers/Tuples/FdbMemoizedTuple.cs b/FoundationDB.Client/Tuples/MemoizedTuple.cs similarity index 73% rename from FoundationDB.Client/Layers/Tuples/FdbMemoizedTuple.cs rename to FoundationDB.Client/Tuples/MemoizedTuple.cs index 2e03ac6d6..f00eff56c 100644 --- a/FoundationDB.Client/Layers/Tuples/FdbMemoizedTuple.cs +++ b/FoundationDB.Client/Tuples/MemoizedTuple.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,27 +26,27 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Layers.Tuples +namespace Doxense.Collections.Tuples { - using FoundationDB.Client; - using FoundationDB.Client.Converters; - using FoundationDB.Client.Utils; using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; + using Doxense.Collections.Tuples.Encoding; + using Doxense.Diagnostics.Contracts; + using Doxense.Runtime.Converters; /// Represents an immutable tuple where the packed bytes are cached [DebuggerDisplay("{ToString()}")] - public sealed class FdbMemoizedTuple : IFdbTuple + public sealed class MemoizedTuple : ITuple { /// Items of the tuple private readonly object[] m_items; /// Packed version of the tuple - private Slice m_packed; //PERF: readonly struct + private readonly Slice m_packed; - internal FdbMemoizedTuple(object[] items, Slice packed) + internal MemoizedTuple(object[] items, Slice packed) { Contract.Requires(items != null); Contract.Requires(packed.HasValue); @@ -67,12 +67,12 @@ public int Count public object this[int index] { - get { return m_items[FdbTuple.MapIndex(index, m_items.Length)]; } + get { return m_items[TupleHelpers.MapIndex(index, m_items.Length)]; } } - public IFdbTuple this[int? fromIncluded, int? toExcluded] + public ITuple this[int? fromIncluded, int? toExcluded] { - get { return FdbTuple.Splice(this, fromIncluded, toExcluded); } + get { return TupleHelpers.Splice(this, fromIncluded, toExcluded); } } public void PackTo(ref TupleWriter writer) @@ -88,14 +88,9 @@ public Slice ToSlice() return m_packed; } - Slice IFdbKey.ToFoundationDbKey() + public MemoizedTuple Copy() { - return this.ToSlice(); - } - - public FdbMemoizedTuple Copy() - { - return new FdbMemoizedTuple( + return new MemoizedTuple( (object[])(m_items.Clone()), m_packed.Memoize() ); @@ -110,29 +105,29 @@ public object[] ToArray() public R Get(int index) { - return FdbConverters.ConvertBoxed(this[index]); + return TypeConverters.ConvertBoxed(this[index]); } public R Last() { int n = m_items.Length; if (n == 0) throw new InvalidOperationException("Tuple is emtpy"); - return FdbConverters.ConvertBoxed(m_items[n - 1]); + return TypeConverters.ConvertBoxed(m_items[n - 1]); } - IFdbTuple IFdbTuple.Append(T value) + ITuple ITuple.Append(T value) { return this.Append(value); } - public FdbLinkedTuple Append(T value) + public LinkedTuple Append(T value) { - return new FdbLinkedTuple(this, value); + return new LinkedTuple(this, value); } - public IFdbTuple Concat(IFdbTuple tuple) + public ITuple Concat(ITuple tuple) { - return FdbTuple.Concat(this, tuple); + return STuple.Concat(this, tuple); } public void CopyTo(object[] array, int offset) @@ -152,25 +147,25 @@ System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() public override string ToString() { - return FdbTuple.ToString(m_items, 0, m_items.Length); + return STuple.Formatter.ToString(m_items, 0, m_items.Length); } public override bool Equals(object obj) { - return Equals(obj as IFdbTuple); + return Equals(obj as ITuple); } - public bool Equals(IFdbTuple other) + public bool Equals(ITuple other) { if (object.ReferenceEquals(other, null)) return false; - var memoized = other as FdbMemoizedTuple; + var memoized = other as MemoizedTuple; if (!object.ReferenceEquals(memoized, null)) { return m_packed.Equals(memoized.m_packed); } - return FdbTuple.Equals(this, other, SimilarValueComparer.Default); + return TupleHelpers.Equals(this, other, SimilarValueComparer.Default); } public override int GetHashCode() @@ -180,12 +175,12 @@ public override int GetHashCode() bool IStructuralEquatable.Equals(object other, System.Collections.IEqualityComparer comparer) { - return FdbTuple.Equals(this, other, comparer); + return TupleHelpers.Equals(this, other, comparer); } int System.Collections.IStructuralEquatable.GetHashCode(System.Collections.IEqualityComparer comparer) { - return FdbTuple.StructuralGetHashCode(this, comparer); + return TupleHelpers.StructuralGetHashCode(this, comparer); } } diff --git a/FoundationDB.Client/Tuples/TuPack.cs b/FoundationDB.Client/Tuples/TuPack.cs new file mode 100644 index 000000000..6788bab1d --- /dev/null +++ b/FoundationDB.Client/Tuples/TuPack.cs @@ -0,0 +1,1234 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Collections.Tuples +{ + using System; + using System.Collections.Generic; + using System.Runtime.CompilerServices; + using Doxense.Diagnostics.Contracts; + using Doxense.Collections.Tuples.Encoding; + using Doxense.Memory; + using Doxense.Serialization.Encoders; + using FoundationDB.Client; + using JetBrains.Annotations; + + /// Tuple Binary Encoding + [PublicAPI] + public static class TuPack + { + + /// Key Encoding that use the Tuple Binary Encoding + [NotNull] + public static ITypeSystem Encoding => TupleKeyEncoding.Instance; + + #region Packing... + + // Without prefix + + /// Pack a tuple into a slice + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack([CanBeNull] TTuple tuple) + where TTuple : ITuple + { + return TupleEncoder.Pack(tuple); + } + + /// Pack a tuple into a slice + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack(STuple tuple) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, ref tuple); + } + + /// Pack a tuple into a slice + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack(STuple tuple) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, ref tuple); + } + + /// Pack a tuple into a slice + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack(STuple tuple) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, ref tuple); + } + + /// Pack a tuple into a slice + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack(STuple tuple) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, ref tuple); + } + + /// Pack a tuple into a slice + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack(STuple tuple) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, ref tuple); + } + + /// Pack a tuple into a slice + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack(STuple tuple) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, ref tuple); + } + + /// Pack a tuple into a slice + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack(ValueTuple tuple) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, tuple.ToSTuple()); + } + + /// Pack a tuple into a slice + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack((T1, T2) tuple) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, tuple.ToSTuple()); + } + + /// Pack a tuple into a slice + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack((T1, T2, T3) tuple) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, tuple.ToSTuple()); + } + + /// Pack a tuple into a slice + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack((T1, T2, T3, T4) tuple) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, tuple.ToSTuple()); + } + + /// Pack a tuple into a slice + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack((T1, T2, T3, T4, T5) tuple) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, tuple.ToSTuple()); + } + + /// Pack a tuple into a slice + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack((T1, T2, T3, T4, T5, T6) tuple) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, tuple.ToSTuple()); + } + + /// Pack an array of N-tuples, all sharing the same buffer + /// Sequence of N-tuples to pack + /// Array containing the buffer segment of each packed tuple + /// BatchPack([ ("Foo", 1), ("Foo", 2) ]) => [ "\x02Foo\x00\x15\x01", "\x02Foo\x00\x15\x02" ] + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] PackTuples([NotNull] params ITuple[] tuples) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, tuples); + } + + /// Pack an array of 1-tuples, all sharing the same buffer + /// Sequence of N-tuples to pack + /// Array containing the buffer segment of each packed tuple + /// BatchPack([ ("Foo", 1), ("Foo", 2) ]) => [ "\x02Foo\x00\x15\x01", "\x02Foo\x00\x15\x02" ] + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] PackTuples([NotNull] params STuple[] tuples) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, tuples); + } + + /// Pack an array of 2-tuples, all sharing the same buffer + /// Sequence of N-tuples to pack + /// Array containing the buffer segment of each packed tuple + /// BatchPack([ ("Foo", 1), ("Foo", 2) ]) => [ "\x02Foo\x00\x15\x01", "\x02Foo\x00\x15\x02" ] + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] PackTuples([NotNull] params STuple[] tuples) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, tuples); + } + + /// Pack an array of 3-tuples, all sharing the same buffer + /// Sequence of N-tuples to pack + /// Array containing the buffer segment of each packed tuple + /// BatchPack([ ("Foo", 1), ("Foo", 2) ]) => [ "\x02Foo\x00\x15\x01", "\x02Foo\x00\x15\x02" ] + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] PackTuples([NotNull] params STuple[] tuples) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, tuples); + } + + /// Pack an array of 4-tuples, all sharing the same buffer + /// Sequence of N-tuples to pack + /// Array containing the buffer segment of each packed tuple + /// BatchPack([ ("Foo", 1), ("Foo", 2) ]) => [ "\x02Foo\x00\x15\x01", "\x02Foo\x00\x15\x02" ] + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] PackTuples([NotNull] params STuple[] tuples) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, tuples); + } + + /// Pack an array of 5-tuples, all sharing the same buffer + /// Sequence of N-tuples to pack + /// Array containing the buffer segment of each packed tuple + /// BatchPack([ ("Foo", 1), ("Foo", 2) ]) => [ "\x02Foo\x00\x15\x01", "\x02Foo\x00\x15\x02" ] + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] PackTuples([NotNull] params STuple[] tuples) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, tuples); + } + + /// Pack an array of 6-tuples, all sharing the same buffer + /// Sequence of N-tuples to pack + /// Array containing the buffer segment of each packed tuple + /// BatchPack([ ("Foo", 1), ("Foo", 2) ]) => [ "\x02Foo\x00\x15\x01", "\x02Foo\x00\x15\x02" ] + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] PackTuples([NotNull] params STuple[] tuples) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, tuples); + } + + /// Pack a sequence of N-tuples, all sharing the same buffer + /// Sequence of N-tuples to pack + /// Array containing the buffer segment of each packed tuple + /// BatchPack([ ("Foo", 1), ("Foo", 2) ]) => [ "\x02Foo\x00\x15\x01", "\x02Foo\x00\x15\x02" ] + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] PackTuples([NotNull, InstantHandle] this IEnumerable tuples) + { + var empty = default(Slice); + return TupleEncoder.Pack(empty, tuples); + } + + /// Efficiently write the packed representation of a tuple + /// Output buffer + /// Tuple that must be serialized into a binary slice + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void PackTo(ref SliceWriter writer, [CanBeNull] TTuple tuple) + where TTuple : ITuple + { + TupleEncoder.PackTo(ref writer, tuple); + } + + // With prefix + + /// Efficiently concatenate a prefix with the packed representation of a tuple + /// Prefix added to the start of the packed slice + /// Pack a tuple into a slice + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack(Slice prefix, [CanBeNull] TTuple tuple) + where TTuple : ITuple + { + return TupleEncoder.Pack(prefix, tuple); + } + + /// Pack a tuple into a slice + /// Prefix added to the start of the packed slice + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack(Slice prefix, STuple tuple) + { + return TupleEncoder.Pack(prefix, ref tuple); + } + + /// Pack a tuple into a slice + /// Prefix added to the start of the packed slice + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack(Slice prefix, STuple tuple) + { + return TupleEncoder.Pack(prefix, ref tuple); + } + + /// Pack a tuple into a slice + /// Prefix added to the start of the packed slice + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack(Slice prefix, STuple tuple) + { + return TupleEncoder.Pack(prefix, ref tuple); + } + + /// Pack a tuple into a slice + /// Prefix added to the start of the packed slice + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack(Slice prefix, STuple tuple) + { + return TupleEncoder.Pack(prefix, ref tuple); + } + + /// Pack a tuple into a slice + /// Prefix added to the start of the packed slice + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack(Slice prefix, STuple tuple) + { + return TupleEncoder.Pack(prefix, ref tuple); + } + + /// Pack a tuple into a slice + /// Common prefix added to all the tuples + /// Tuple that must be serialized into a binary slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice Pack(Slice prefix, STuple tuple) + { + return TupleEncoder.Pack(prefix, ref tuple); + } + + /// Pack an array of N-tuples, all sharing the same buffer + /// Common prefix added to all the tuples + /// Sequence of N-tuples to pack + /// Array containing the buffer segment of each packed tuple + /// BatchPack("abc", [ ("Foo", 1), ("Foo", 2) ]) => [ "abc\x02Foo\x00\x15\x01", "abc\x02Foo\x00\x15\x02" ] + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] PackTuples(Slice prefix, [NotNull] params ITuple[] tuples) + { + return TupleEncoder.Pack(prefix, tuples); + } + + /// Pack a sequence of N-tuples, all sharing the same buffer + /// Common prefix added to all the tuples + /// Sequence of N-tuples to pack + /// Array containing the buffer segment of each packed tuple + /// BatchPack("abc", [ ("Foo", 1), ("Foo", 2) ]) => [ "abc\x02Foo\x00\x15\x01", "abc\x02Foo\x00\x15\x02" ] + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] PackTuples(Slice prefix, [NotNull] IEnumerable tuples) + { + return TupleEncoder.Pack(prefix, tuples); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] PackTuples(Slice prefix, [NotNull] TElement[] elements, Func transform) + where TTuple : ITuple + { + return TupleEncoder.Pack(prefix, elements, transform); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] PackTuples(Slice prefix, [NotNull] IEnumerable elements, Func transform) + where TTuple : ITuple + { + return TupleEncoder.Pack(prefix, elements, transform); + } + + #endregion + + #region Encode + + //REVIEW: EncodeKey/EncodeKeys? Encode/EncodeRange? EncodeValues? EncodeItems? + + /// Pack a 1-tuple directly into a slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice EncodeKey(T1 item1) + { + return TupleEncoder.EncodeKey(default(Slice), item1); + } + + /// Pack a 2-tuple directly into a slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice EncodeKey(T1 item1, T2 item2) + { + return TupleEncoder.EncodeKey(default(Slice), item1, item2); + } + + /// Pack a 3-tuple directly into a slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice EncodeKey(T1 item1, T2 item2, T3 item3) + { + return TupleEncoder.EncodeKey(default(Slice), item1, item2, item3); + } + + /// Pack a 4-tuple directly into a slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice EncodeKey(T1 item1, T2 item2, T3 item3, T4 item4) + { + return TupleEncoder.EncodeKey(default(Slice), item1, item2, item3, item4); + } + + /// Pack a 5-tuple directly into a slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice EncodeKey(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) + { + return TupleEncoder.EncodeKey(default(Slice), item1, item2, item3, item4, item5); + } + + /// Pack a 6-tuple directly into a slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice EncodeKey(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6) + { + return TupleEncoder.EncodeKey(default(Slice), item1, item2, item3, item4, item5, item6); + } + + /// Pack a 6-tuple directly into a slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice EncodeKey(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7) + { + return TupleEncoder.EncodeKey(default(Slice), item1, item2, item3, item4, item5, item6, item7); + } + + /// Pack a 6-tuple directly into a slice + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice EncodeKey(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7, T8 item8) + { + return TupleEncoder.EncodeKey(default(Slice), item1, item2, item3, item4, item5, item6, item7, item8); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] EncodeKeys([NotNull] IEnumerable keys) + { + var empty = default(Slice); + return TupleEncoder.EncodeKeys(empty, keys); + } + + /// Merge a sequence of keys with a same prefix, all sharing the same buffer + /// Type of the keys + /// Prefix shared by all keys + /// Sequence of keys to pack + /// Array of slices (for all keys) that share the same underlying buffer + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] EncodePrefixedKeys(Slice prefix, [NotNull] IEnumerable keys) + { + return TupleEncoder.EncodeKeys(prefix, keys); + } + + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] EncodeKeys([NotNull] params T[] keys) + { + var empty = default(Slice); + return TupleEncoder.EncodeKeys(empty, keys); + } + + /// Merge an array of keys with a same prefix, all sharing the same buffer + /// Type of the keys + /// Prefix shared by all keys + /// Sequence of keys to pack + /// Array of slices (for all keys) that share the same underlying buffer + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] EncodePrefixedKeys(Slice prefix, [NotNull] params T[] keys) + { + return TupleEncoder.EncodeKeys(prefix, keys); + } + + /// Merge an array of elements, all sharing the same buffer + /// Type of the elements + /// Type of the keys extracted from the elements + /// Sequence of elements to pack + /// Lambda that extract the key from each element + /// Array of slices (for all keys) that share the same underlying buffer + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] EncodeKeys([NotNull] TElement[] elements, [NotNull] Func selector) + { + var empty = default(Slice); + return TupleEncoder.EncodeKeys(empty, elements, selector); + } + + /// Merge an array of elements with a same prefix, all sharing the same buffer + /// Type of the elements + /// Type of the keys extracted from the elements + /// Prefix shared by all keys (can be empty) + /// Sequence of elements to pack + /// Lambda that extract the key from each element + /// Array of slices (for all keys) that share the same underlying buffer + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] EncodePrefixedKeys(Slice prefix, [NotNull] TElement[] elements, [NotNull] Func selector) + { + return TupleEncoder.EncodeKeys(prefix, elements, selector); + } + + /// Pack a sequence of keys with a same prefix, all sharing the same buffer + /// Type of the keys + /// Prefix shared by all keys + /// Sequence of keys to pack + /// Array of slices (for all keys) that share the same underlying buffer + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] EncodePrefixedKeys([NotNull] ITuple prefix, [NotNull] IEnumerable keys) + { + Contract.NotNull(prefix, nameof(prefix)); + + return EncodePrefixedKeys(Pack(prefix), keys); + } + + /// Pack a sequence of keys with a same prefix, all sharing the same buffer + /// Type of the keys + /// Prefix shared by all keys + /// Sequence of keys to pack + /// Array of slices (for all keys) that share the same underlying buffer + [Pure, NotNull, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice[] EncodePrefixedKeys([NotNull] ITuple prefix, [NotNull] params T[] keys) + { + Contract.NotNull(prefix, nameof(prefix)); + + return EncodePrefixedKeys(Pack(prefix), keys); + } + + #endregion + + #region Ranges... + + /// Create a range that selects all tuples that are stored under the specified subspace: 'prefix\x00' <= k < 'prefix\xFF' + /// Subspace binary prefix (that will be excluded from the range) + /// Range including all possible tuples starting with the specified prefix. + /// TuPack.ToRange(Slice.FromAscii("abc")) returns the range [ 'abc\x00', 'abc\xFF' ) + [Pure] + public static (Slice Begin, Slice End) ToRange(Slice prefix) + { + if (prefix.IsNull) throw new ArgumentNullException(nameof(prefix)); + //note: there is no guarantee that prefix is a valid packed tuple (could be any exotic binary prefix) + + // prefix => [ prefix."\0", prefix."\xFF" ) + return ( + prefix + 0x00, + prefix + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + /// TuPack.ToRange(STuple.Create("a", "b")) includes all tuples ("a", "b", ...), but not the tuple ("a", "b") itself. + [Pure] + public static (Slice Begin, Slice End) ToRange([NotNull] TTuple tuple) + where TTuple : ITuple + { + Contract.NotNullAllowStructs(tuple, nameof(tuple)); + + // tuple => [ packed."\0", packed."\xFF" ) + var packed = TupleEncoder.Pack(tuple); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + /// ToRange(STuple.Create("a", "b")) includes all tuples ("a", "b", ...), but not the tuple ("a", "b") itself. + [Pure] + public static (Slice Begin, Slice End) ToRange(STuple tuple) + { + Contract.NotNullAllowStructs(tuple, nameof(tuple)); + + // tuple => [ packed."\0", packed."\xFF" ) + var packed = TupleEncoder.EncodeKey(default(Slice), tuple.Item1); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + /// ToRange(STuple.Create("a", "b")) includes all tuples ("a", "b", ...), but not the tuple ("a", "b") itself. + [Pure] + public static (Slice Begin, Slice End) ToRange(ValueTuple tuple) + { + Contract.NotNullAllowStructs(tuple, nameof(tuple)); + + // tuple => [ packed."\0", packed."\xFF" ) + var packed = TupleEncoder.EncodeKey(default(Slice), tuple.Item1); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified element, and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + /// ToRange(STuple.Create("a", "b")) includes all tuples ("a", "b", ...), but not the tuple ("a", "b") itself. + [Pure] + public static (Slice Begin, Slice End) ToKeyRange(T1 item1) + { + // tuple => [ packed."\0", packed."\xFF" ) + var packed = TupleEncoder.EncodeKey(default(Slice), item1); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified element, and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + /// ToRange(STuple.Create("a", "b")) includes all tuples ("a", "b", ...), but not the tuple ("a", "b") itself. + [Pure] + public static (Slice Begin, Slice End) ToPrefixedKeyRange(Slice prefix, T1 item1) + { + // tuple => [ prefix.packed."\0", prefix.packed."\xFF" ) + var packed = TupleEncoder.EncodeKey(prefix, item1); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + /// ToRange(STuple.Create("a", "b")) includes all tuples ("a", "b", ...), but not the tuple ("a", "b") itself. + [Pure] + public static (Slice Begin, Slice End) ToRange(STuple tuple) + { + Contract.NotNullAllowStructs(tuple, nameof(tuple)); + + // tuple => [ packed."\0", packed."\xFF" ) + var empty = default(Slice); + var packed = TupleEncoder.Pack(empty, ref tuple); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToRange((T1, T2) tuple) + { + Contract.NotNullAllowStructs(tuple, nameof(tuple)); + + // tuple => [ packed."\0", packed."\xFF" ) + var empty = default(Slice); + var packed = TupleEncoder.Pack(empty, ref tuple); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified items, and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + /// ToKeyRange("a", "b") includes all tuples ("a", "b", ...), but not the tuple ("a", "b") itself. + [Pure] + public static (Slice Begin, Slice End) ToKeyRange(T1 item1, T2 item2) + { + // tuple => [ packed."\0", packed."\xFF" ) + var packed = TupleEncoder.EncodeKey(default(Slice), item1, item2); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified items, and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + /// ToPrefixedKeyRange(..., "a", "b")) includes all tuples ("a", "b", ...), but not the tuple ("a", "b") itself. + [Pure] + public static (Slice Begin, Slice End) ToPrefixedKeyRange(Slice prefix, T1 item1, T2 item2) + { + // tuple => [ prefix.packed."\0", prefix.packed."\xFF" ) + var packed = TupleEncoder.EncodeKey(prefix, item1, item2); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToRange(STuple tuple) + { + Contract.NotNullAllowStructs(tuple, nameof(tuple)); + + // tuple => [ packed."\0", packed."\xFF" ) + var empty = default(Slice); + var packed = TupleEncoder.Pack(empty, ref tuple); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToRange((T1, T2, T3) tuple) + { + // tuple => [ packed."\0", packed."\xFF" ) + var empty = default(Slice); + var packed = TupleEncoder.Pack(empty, ref tuple); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified items, and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToKeyRange(T1 item1, T2 item2, T3 item3) + { + // tuple => [ packed."\0", packed."\xFF" ) + var packed = TupleEncoder.EncodeKey(default(Slice), item1, item2, item3); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified items, and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToPrefixedKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3) + { + // tuple => [ prefix.packed."\0", prefix.packed."\xFF" ) + var packed = TupleEncoder.EncodeKey(prefix, item1, item2, item3); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToRange(STuple tuple) + { + Contract.NotNullAllowStructs(tuple, nameof(tuple)); + + // tuple => [ packed."\0", packed."\xFF" ) + var empty = default(Slice); + var packed = TupleEncoder.Pack(empty, ref tuple); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToRange((T1, T2, T3, T4) tuple) + { + // tuple => [ packed."\0", packed."\xFF" ) + var empty = default(Slice); + var packed = TupleEncoder.Pack(empty, ref tuple); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified items, and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToKeyRange(T1 item1, T2 item2, T3 item3, T4 item4) + { + // tuple => [ packed."\0", packed."\xFF" ) + var packed = TupleEncoder.EncodeKey(default(Slice), item1, item2, item3, item4); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified items, and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToPrefixedKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4) + { + // tuple => [ prefix.packed."\0", prefix.packed."\xFF" ) + var packed = TupleEncoder.EncodeKey(prefix, item1, item2, item3, item4); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToRange(STuple tuple) + { + Contract.NotNullAllowStructs(tuple, nameof(tuple)); + + // tuple => [ packed."\0", packed."\xFF" ) + var empty = default(Slice); + var packed = TupleEncoder.Pack(empty, ref tuple); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToRange((T1, T2, T3, T4, T5) tuple) + { + // tuple => [ packed."\0", packed."\xFF" ) + var empty = default(Slice); + var packed = TupleEncoder.Pack(empty, ref tuple); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified items, and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToKeyRange(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) + { + // tuple => [ packed."\0", packed."\xFF" ) + var packed = TupleEncoder.EncodeKey(default(Slice), item1, item2, item3, item4, item5); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified items, and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToPrefixedKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) + { + // tuple => [ prefix.packed."\0", prefix.packed."\xFF" ) + var packed = TupleEncoder.EncodeKey(prefix, item1, item2, item3, item4, item5); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToRange(STuple tuple) + { + Contract.NotNullAllowStructs(tuple, nameof(tuple)); + + // tuple => [ packed."\0", packed."\xFF" ) + var empty = default(Slice); + var packed = TupleEncoder.Pack(empty, ref tuple); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToRange((T1, T2, T3, T4, T5, T6) tuple) + { + // tuple => [ packed."\0", packed."\xFF" ) + var empty = default(Slice); + var packed = TupleEncoder.Pack(empty, ref tuple); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified items, and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToKeyRange(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6) + { + // tuple => [ packed."\0", packed."\xFF" ) + var packed = TupleEncoder.EncodeKey(default(Slice), item1, item2, item3, item4, item5, item6); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified items, and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToPrefixedKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6) + { + // tuple => [ prefix.packed."\0", prefix.packed."\xFF" ) + var packed = TupleEncoder.EncodeKey(prefix, item1, item2, item3, item4, item5, item6); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified items, and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToKeyRange(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7) + { + // tuple => [ packed."\0", packed."\xFF" ) + var packed = TupleEncoder.EncodeKey(default(Slice), item1, item2, item3, item4, item5, item6, item7); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified items, and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToPrefixedKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7) + { + // tuple => [ prefix.packed."\0", prefix.packed."\xFF" ) + var packed = TupleEncoder.EncodeKey(prefix, item1, item2, item3, item4, item5, item6, item7); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified items, and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToKeyRange(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7, T8 item8) + { + // tuple => [ packed."\0", packed."\xFF" ) + var packed = TupleEncoder.EncodeKey(default(Slice), item1, item2, item3, item4, item5, item6, item7, item8); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified items, and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToPrefixedKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7, T8 item8) + { + // tuple => [ packed."\0", packed."\xFF" ) + var packed = TupleEncoder.EncodeKey(prefix, item1, item2, item3, item4, item5, item6, item7, item8); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + /// TuPack.ToRange(Slice.FromInt32(42), Stuple.Create("a", "b")) includes all tuples \x2A.("a", "b", ...), but not the tuple \x2A.("a", "b") itself. + /// If is the packed representation of a tuple, then unpacking the resulting key will produce a valid tuple. If not, then the resulting key will need to be truncated first before unpacking. + [Pure] + public static (Slice Begin, Slice End) ToRange(Slice prefix, [NotNull] TTuple tuple) + where TTuple : ITuple + { + Contract.NotNullAllowStructs(tuple, nameof(tuple)); + + // tuple => [ prefix.packed."\0", prefix.packed."\xFF" ) + var packed = TupleEncoder.Pack(prefix, tuple); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + /// TuPack.ToRange(STuple.Create("a")) includes all tuples ("a", ...), but not the tuple ("a") itself. + [Pure] + public static (Slice Begin, Slice End) ToRange(Slice prefix, STuple tuple) + { + Contract.NotNullAllowStructs(tuple, nameof(tuple)); + + // tuple => [ packed."\0", packed."\xFF" ) + var packed = TupleEncoder.Pack(prefix, tuple); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + /// TuPack.ToRange(STuple.Create("a", "b")) includes all tuples ("a", "b", ...), but not the tuple ("a", "b") itself. + [Pure] + public static (Slice Begin, Slice End) ToRange(Slice prefix, STuple tuple) + { + Contract.NotNullAllowStructs(tuple, nameof(tuple)); + + // tuple => [ packed."\0", packed."\xFF" ) + var packed = TupleEncoder.Pack(prefix, ref tuple); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToRange(Slice prefix, STuple tuple) + { + Contract.NotNullAllowStructs(tuple, nameof(tuple)); + + // tuple => [ packed."\0", packed."\xFF" ) + var packed = TupleEncoder.Pack(prefix, ref tuple); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToRange(Slice prefix, STuple tuple) + { + Contract.NotNullAllowStructs(tuple, nameof(tuple)); + + // tuple => [ packed."\0", packed."\xFF" ) + var packed = TupleEncoder.Pack(prefix, ref tuple); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToRange(Slice prefix, STuple tuple) + { + Contract.NotNullAllowStructs(tuple, nameof(tuple)); + + // tuple => [ packed."\0", packed."\xFF" ) + var packed = TupleEncoder.Pack(prefix, ref tuple); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + /// Create a range that selects all the tuples of greater length than the specified , and that start with the specified elements: packed(tuple)+'\x00' <= k < packed(tuple)+'\xFF' + [Pure] + public static (Slice Begin, Slice End) ToRange(Slice prefix, STuple tuple) + { + Contract.NotNullAllowStructs(tuple, nameof(tuple)); + + // tuple => [ packed."\0", packed."\xFF" ) + var packed = TupleEncoder.Pack(prefix, ref tuple); + return ( + packed + 0x00, + packed + 0xFF + ); + } + + #endregion + + #region Unpacking... + + /// Unpack a tuple from a serialied key blob + /// Binary key containing a previously packed tuple + /// Unpacked tuple, or the empty tuple if the key is + /// If is equal to + [Pure, NotNull] + public static ITuple Unpack(Slice packedKey) + { + if (packedKey.IsNull) throw new ArgumentNullException(nameof(packedKey), "Cannot unpack tuple from Nil"); + if (packedKey.Count == 0) return STuple.Empty; + + return TuplePackers.Unpack(packedKey, embedded: false); + } + + /// Unpack a tuple from a binary representation + /// Binary key containing a previously packed tuple, or Slice.Nil + /// Unpacked tuple, the empty tuple if is equal to , or null if the key is + [Pure, CanBeNull] + public static ITuple UnpackOrDefault(Slice packedKey) + { + if (packedKey.IsNull) return null; + if (packedKey.Count == 0) return STuple.Empty; + return TuplePackers.Unpack(packedKey, embedded: false); + } + + /// Unpack a tuple and only return its first element + /// Type of the first value in the decoded tuple + /// Slice that should be entirely parsable as a tuple + /// Decoded value of the first item in the tuple + [Pure] + public static T DecodeFirst(Slice packedKey) + { + if (packedKey.IsNullOrEmpty) throw new InvalidOperationException("Cannot unpack the first element of an empty tuple"); + + var slice = TuplePackers.UnpackFirst(packedKey); + if (slice.IsNull) throw new InvalidOperationException("Failed to unpack tuple"); + + return TuplePacker.Deserialize(slice); + } + + /// Unpack a tuple and only return its last element + /// Type of the last value in the decoded tuple + /// Slice that should be entirely parsable as a tuple + /// Decoded value of the last item in the tuple + [Pure] + public static T DecodeLast(Slice packedKey) + { + if (packedKey.IsNullOrEmpty) throw new InvalidOperationException("Cannot unpack the last element of an empty tuple"); + + var slice = TuplePackers.UnpackLast(packedKey); + if (slice.IsNull) throw new InvalidOperationException("Failed to unpack tuple"); + + return TuplePacker.Deserialize(slice); + } + + /// Unpack the value of a singleton tuple + /// Type of the single value in the decoded tuple + /// Slice that should contain the packed representation of a tuple with a single element + /// Decoded value of the only item in the tuple. Throws an exception if the tuple is empty of has more than one element. + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static T1 DecodeKey(Slice packedKey) + { + TupleEncoder.DecodeKey(packedKey, out STuple tuple); + return tuple.Item1; + } + + /// Unpack a key containing two elements + /// Slice that should contain the packed representation of a tuple with two elements + /// Decoded value of the elements int the tuple. Throws an exception if the tuple is empty of has more than elements. + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static STuple DecodeKey(Slice packedKey) + { + TupleEncoder.DecodeKey(packedKey, out STuple tuple); + return tuple; + } + + /// Unpack a key containing three elements + /// Slice that should contain the packed representation of a tuple with three elements + /// Decoded value of the elements int the tuple. Throws an exception if the tuple is empty of has more than elements. + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static STuple DecodeKey(Slice packedKey) + { + TupleEncoder.DecodeKey(packedKey, out STuple tuple); + return tuple; + } + + /// Unpack a key containing four elements + /// Slice that should contain the packed representation of a tuple with four elements + /// Decoded value of the elements int the tuple. Throws an exception if the tuple is empty of has more than elements. + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static STuple DecodeKey(Slice packedKey) + { + TupleEncoder.DecodeKey(packedKey, out STuple tuple); + return tuple; + } + + /// Unpack a key containing five elements + /// Slice that should contain the packed representation of a tuple with five elements + /// Decoded value of the elements int the tuple. Throws an exception if the tuple is empty of has more than elements. + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static STuple DecodeKey(Slice packedKey) + { + TupleEncoder.DecodeKey(packedKey, out STuple tuple); + return tuple; + } + + /// Unpack a key containing six elements + /// Slice that should contain the packed representation of a tuple with six elements + /// Decoded value of the elements int the tuple. Throws an exception if the tuple is empty of has more than elements. + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static STuple DecodeKey(Slice packedKey) + { + TupleEncoder.DecodeKey(packedKey, out STuple tuple); + return tuple; + } + + /// Unpack the next item in the tuple, and advance the cursor + /// Type of the next value in the tuple + /// Reader positionned at the start of the next item to read + /// If decoding succeedsd, receives the decoded value. + /// True if the decoded succeeded (and receives the decoded value). False if the tuple has reached the end. + public static bool DecodeNext(ref TupleReader input, out T value) + { + if (!input.Input.HasMore) + { + value = default(T); + return false; + } + + var slice = TupleParser.ParseNext(ref input); + value = TuplePacker.Deserialize(slice); + return true; + } + + #endregion + + #region EncodePrefixedKey... + + //note: they are equivalent to the Pack<...>() methods, they only take a binary prefix + + /// Efficiently concatenate a prefix with the packed representation of a 1-tuple + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice EncodePrefixedKey(Slice prefix, T1 value) + { + return TupleEncoder.EncodeKey(prefix, value); + } + + /// Efficiently concatenate a prefix with the packed representation of a 2-tuple + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice EncodePrefixedKey(Slice prefix, T1 value1, T2 value2) + { + return TupleEncoder.EncodeKey(prefix, value1, value2); + } + + /// Efficiently concatenate a prefix with the packed representation of a 3-tuple + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice EncodePrefixedKey(Slice prefix, T1 value1, T2 value2, T3 value3) + { + return TupleEncoder.EncodeKey(prefix, value1, value2, value3); + } + + /// Efficiently concatenate a prefix with the packed representation of a 4-tuple + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice EncodePrefixedKey(Slice prefix, T1 value1, T2 value2, T3 value3, T4 value4) + { + return TupleEncoder.EncodeKey(prefix, value1, value2, value3, value4); + } + + /// Efficiently concatenate a prefix with the packed representation of a 5-tuple + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice EncodePrefixedKey(Slice prefix, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5) + { + return TupleEncoder.EncodeKey(prefix, value1, value2, value3, value4, value5); + } + + /// Efficiently concatenate a prefix with the packed representation of a 6-tuple + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice EncodePrefixedKey(Slice prefix, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6) + { + return TupleEncoder.EncodeKey(prefix, value1, value2, value3, value4, value5, value6); + } + + /// Efficiently concatenate a prefix with the packed representation of a 7-tuple + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice EncodePrefixedKey(Slice prefix, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6, T7 value7) + { + return TupleEncoder.EncodeKey(prefix, value1, value2, value3, value4, value5, value6, value7); + } + + /// Efficiently concatenate a prefix with the packed representation of a 8-tuple + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Slice EncodePrefixedKey(Slice prefix, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6, T7 value7, T8 value8) + { + return TupleEncoder.EncodeKey(prefix, value1, value2, value3, value4, value5, value6, value7, value8); + } + + #endregion + + } + +} diff --git a/FoundationDB.Client/Tuples/TupleSerializer.cs b/FoundationDB.Client/Tuples/TupleSerializer.cs new file mode 100644 index 000000000..4f38a920a --- /dev/null +++ b/FoundationDB.Client/Tuples/TupleSerializer.cs @@ -0,0 +1,138 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace Doxense.Collections.Tuples.Encoding +{ + using System; + using JetBrains.Annotations; + + public sealed class TupleSerializer : ITupleSerializer> + { + public static TupleSerializer Default { [NotNull] get; } = new TupleSerializer(); + + public void PackTo(ref TupleWriter writer, in STuple tuple) + { + TuplePackers.SerializeTo(ref writer, tuple.Item1); + } + + public void UnpackFrom(ref TupleReader reader, out STuple tuple) + { + TupleEncoder.DecodeKey(ref reader, out tuple); + } + } + + public sealed class TupleSerializer : ITupleSerializer> + { + public static TupleSerializer Default { [NotNull] get; } = new TupleSerializer(); + + public void PackTo(ref TupleWriter writer, in STuple tuple) + { + TuplePackers.SerializeTo(ref writer, tuple.Item1); + TuplePackers.SerializeTo(ref writer, tuple.Item2); + } + + public void UnpackFrom(ref TupleReader reader, out STuple tuple) + { + TupleEncoder.DecodeKey(ref reader, out tuple); + } + } + + public sealed class TupleSerializer : ITupleSerializer> + { + public static TupleSerializer Default { [NotNull] get; } = new TupleSerializer(); + + public void PackTo(ref TupleWriter writer, in STuple tuple) + { + TuplePackers.SerializeTo(ref writer, tuple.Item1); + TuplePackers.SerializeTo(ref writer, tuple.Item2); + TuplePackers.SerializeTo(ref writer, tuple.Item3); + } + + public void UnpackFrom(ref TupleReader reader, out STuple tuple) + { + TupleEncoder.DecodeKey(ref reader, out tuple); + } + } + + public sealed class TupleSerializer : ITupleSerializer> + { + public static TupleSerializer Default { [NotNull] get; } = new TupleSerializer(); + + public void PackTo(ref TupleWriter writer, in STuple tuple) + { + TuplePackers.SerializeTo(ref writer, tuple.Item1); + TuplePackers.SerializeTo(ref writer, tuple.Item2); + TuplePackers.SerializeTo(ref writer, tuple.Item3); + TuplePackers.SerializeTo(ref writer, tuple.Item4); + } + + public void UnpackFrom(ref TupleReader reader, out STuple tuple) + { + TupleEncoder.DecodeKey(ref reader, out tuple); + } + } + + public sealed class TupleSerializer : ITupleSerializer> + { + public static TupleSerializer Default { [NotNull] get; } = new TupleSerializer(); + + public void PackTo(ref TupleWriter writer, in STuple tuple) + { + TuplePackers.SerializeTo(ref writer, tuple.Item1); + TuplePackers.SerializeTo(ref writer, tuple.Item2); + TuplePackers.SerializeTo(ref writer, tuple.Item3); + TuplePackers.SerializeTo(ref writer, tuple.Item4); + TuplePackers.SerializeTo(ref writer, tuple.Item5); + } + + public void UnpackFrom(ref TupleReader reader, out STuple tuple) + { + TupleEncoder.DecodeKey(ref reader, out tuple); + } + } + + public sealed class TupleSerializer : ITupleSerializer> + { + public static TupleSerializer Default { [NotNull] get; } = new TupleSerializer(); + + public void PackTo(ref TupleWriter writer, in STuple tuple) + { + TuplePackers.SerializeTo(ref writer, tuple.Item1); + TuplePackers.SerializeTo(ref writer, tuple.Item2); + TuplePackers.SerializeTo(ref writer, tuple.Item3); + TuplePackers.SerializeTo(ref writer, tuple.Item4); + TuplePackers.SerializeTo(ref writer, tuple.Item5); + TuplePackers.SerializeTo(ref writer, tuple.Item6); + } + + public void UnpackFrom(ref TupleReader reader, out STuple tuple) + { + TupleEncoder.DecodeKey(ref reader, out tuple); + } + } +} diff --git a/FoundationDB.Client/TypeSystem/Encoders/DynamicKeyEncoderBase.cs b/FoundationDB.Client/TypeSystem/Encoders/DynamicKeyEncoderBase.cs deleted file mode 100644 index b5921b49a..000000000 --- a/FoundationDB.Client/TypeSystem/Encoders/DynamicKeyEncoderBase.cs +++ /dev/null @@ -1,142 +0,0 @@ -using System; -using FoundationDB.Layers.Tuples; - -namespace FoundationDB.Client -{ - public abstract class DynamicKeyEncoderBase : IDynamicKeyEncoder - { - - public abstract IFdbKeyEncoding Encoding { get; } - - public virtual FdbKeyRange ToRange(Slice prefix) - { - return FdbKeyRange.StartsWith(prefix); - } - - public abstract void PackKey(ref SliceWriter writer, IFdbTuple items); - - public virtual void EncodeKey(ref SliceWriter writer, T1 item1) - { - PackKey(ref writer, FdbTuple.Create(item1)); - } - - public virtual void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2) - { - PackKey(ref writer, FdbTuple.Create(item1, item2)); - } - - public virtual void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3) - { - PackKey(ref writer, FdbTuple.Create(item1, item2, item3)); - } - - public virtual void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4) - { - PackKey(ref writer, FdbTuple.Create(item1, item2, item3, item4)); - } - - public virtual void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) - { - PackKey(ref writer, FdbTuple.Create(item1, item2, item3, item4, item5)); - } - - public virtual void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6) - { - PackKey(ref writer, FdbTuple.Create(item1, item2, item3, item4, item5, item6)); - } - - public virtual void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7) - { - PackKey(ref writer, FdbTuple.Create(item1, item2, item3, item4, item5, item6, item7)); - } - - public virtual void EncodeKey(ref SliceWriter writer, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7, T8 item8) - { - PackKey(ref writer, FdbTuple.Create(item1, item2, item3, item4, item5, item6, item7, item8)); - } - - public abstract IFdbTuple UnpackKey(Slice packed); - - public virtual T DecodeKey(Slice packed) - { - return UnpackKey(packed).OfSize(1).Get(0); - } - - public virtual T DecodeKeyFirst(Slice packed) - { - return UnpackKey(packed).OfSizeAtLeast(1).Get(0); - } - - public virtual T DecodeKeyLast(Slice packed) - { - return UnpackKey(packed).OfSizeAtLeast(1).Get(-1); - } - - public virtual FdbTuple DecodeKey(Slice packed) - { - return UnpackKey(packed).With((T1 a, T2 b) => FdbTuple.Create(a, b)); - } - - public virtual FdbTuple DecodeKey(Slice packed) - { - return UnpackKey(packed).With((T1 a, T2 b, T3 c) => FdbTuple.Create(a, b, c)); - } - - public virtual FdbTuple DecodeKey(Slice packed) - { - return UnpackKey(packed).With((T1 a, T2 b, T3 c, T4 d) => FdbTuple.Create(a, b, c, d)); - } - - public virtual FdbTuple DecodeKey(Slice packed) - { - return UnpackKey(packed).With((T1 a, T2 b, T3 c, T4 d, T5 e) => FdbTuple.Create(a, b, c, d, e)); - } - - public virtual FdbKeyRange ToRange(Slice prefix, IFdbTuple items) - { - var writer = new SliceWriter(prefix, 16); - PackKey(ref writer, items); - return ToRange(writer.ToSlice()); - } - - public virtual FdbKeyRange ToKeyRange(Slice prefix, T1 item1) - { - return ToRange(prefix, FdbTuple.Create(item1)); - } - - public virtual FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2) - { - return ToRange(prefix, FdbTuple.Create(item1, item2)); - } - - public virtual FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3) - { - return ToRange(prefix, FdbTuple.Create(item1, item3, item3)); - } - - public virtual FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4) - { - return ToRange(prefix, FdbTuple.Create(item1, item3, item3, item4)); - } - - public virtual FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5) - { - return ToRange(prefix, FdbTuple.Create(item1, item3, item3, item4, item5)); - } - - public virtual FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6) - { - return ToRange(prefix, FdbTuple.Create(item1, item3, item3, item4, item5, item6)); - } - - public virtual FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7) - { - return ToRange(prefix, FdbTuple.Create(item1, item3, item3, item4, item5, item6, item7)); - } - - public virtual FdbKeyRange ToKeyRange(Slice prefix, T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7, T8 item8) - { - return ToRange(prefix, FdbTuple.Create(item1, item3, item3, item4, item5, item6, item7, item8)); - } - } -} \ No newline at end of file diff --git a/FoundationDB.Client/TypeSystem/Encoders/KeyValueEncoders.cs b/FoundationDB.Client/TypeSystem/Encoders/KeyValueEncoders.cs deleted file mode 100644 index ab82481c6..000000000 --- a/FoundationDB.Client/TypeSystem/Encoders/KeyValueEncoders.cs +++ /dev/null @@ -1,1149 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Client -{ - using FoundationDB.Client.Utils; - using FoundationDB.Layers.Tuples; - using JetBrains.Annotations; - using System; - using System.Collections.Generic; - using System.Linq; - - /// Helper class for all key/value encoders - public static class KeyValueEncoders - { - /// Identity function for binary slices - public static readonly IdentityEncoder Binary = new IdentityEncoder(); - - #region Nested Classes - - /// Identity encoder - public sealed class IdentityEncoder : IKeyEncoder, IValueEncoder - { - - internal IdentityEncoder() { } - - public Slice EncodeKey(Slice key) - { - return key; - } - - public Slice DecodeKey(Slice encoded) - { - return encoded; - } - - public Slice EncodeValue(Slice value) - { - return value; - } - - public Slice DecodeValue(Slice encoded) - { - return encoded; - } - } - - /// Wrapper for encoding and decoding a singleton with lambda functions - internal sealed class Singleton : IKeyEncoder, IValueEncoder - { - private readonly Func m_encoder; - private readonly Func m_decoder; - - public Singleton(Func encoder, Func decoder) - { - Contract.Requires(encoder != null && decoder != null); - - m_encoder = encoder; - m_decoder = decoder; - } - - public Type[] GetTypes() - { - return new[] { typeof(T) }; - } - - public Slice EncodeKey(T value) - { - return m_encoder(value); - } - - public T DecodeKey(Slice encoded) - { - return m_decoder(encoded); - } - - public Slice EncodeValue(T value) - { - return m_encoder(value); - } - - public T DecodeValue(Slice encoded) - { - return m_decoder(encoded); - } - - } - - /// Wrapper for encoding and decoding a pair with lambda functions - public abstract class CompositeKeyEncoder : ICompositeKeyEncoder - { - - public abstract Slice EncodeComposite(FdbTuple key, int items); - - public abstract FdbTuple DecodeComposite(Slice encoded, int items); - - public Slice EncodeKey(FdbTuple key) - { - return EncodeComposite(key, 2); - } - - public virtual Slice EncodeKey(T1 item1, T2 item2) - { - return EncodeComposite(FdbTuple.Create(item1, item2), 2); - } - - public virtual FdbTuple DecodeKey(Slice encoded) - { - return DecodeComposite(encoded, 2); - } - - public T1 DecodePartialKey(Slice encoded) - { - return DecodeComposite(encoded, 1).Item1; - } - - public HeadEncoder Head() - { - return new HeadEncoder(this); - } - - } - - /// Wrapper for encoding and decoding a triplet with lambda functions - public abstract class CompositeKeyEncoder : ICompositeKeyEncoder - { - - public abstract Slice EncodeComposite(FdbTuple items, int count); - - public abstract FdbTuple DecodeComposite(Slice encoded, int count); - - public Slice EncodeKey(FdbTuple key) - { - return EncodeComposite(key, 3); - } - - public virtual Slice EncodeKey(T1 item1, T2 item2, T3 item3) - { - return EncodeComposite(FdbTuple.Create(item1, item2, item3), 3); - } - - public virtual FdbTuple DecodeKey(Slice encoded) - { - return DecodeComposite(encoded, 3); - } - - public FdbTuple DecodeKey(Slice encoded, int items) - { - return DecodeComposite(encoded, items); - } - - public HeadEncoder Head() - { - return new HeadEncoder(this); - } - - public PairEncoder Pair() - { - return new PairEncoder(this); - } - - } - - /// Wrapper for encoding and decoding a quad with lambda functions - public abstract class CompositeKeyEncoder : ICompositeKeyEncoder - { - - public abstract Slice EncodeComposite(FdbTuple items, int count); - - public abstract FdbTuple DecodeComposite(Slice encoded, int count); - - public Slice EncodeKey(FdbTuple key) - { - return EncodeComposite(key, 4); - } - - public virtual Slice EncodeKey(T1 item1, T2 item2, T3 item3, T4 item4) - { - return EncodeComposite(FdbTuple.Create(item1, item2, item3, item4), 4); - } - - public virtual FdbTuple DecodeKey(Slice encoded) - { - return DecodeComposite(encoded, 4); - } - - public FdbTuple DecodeKey(Slice encoded, int items) - { - return DecodeComposite(encoded, items); - } - } - - /// Wrapper for a composite encoder that will only output the first key - public struct HeadEncoder : IKeyEncoder - { - [NotNull] - public readonly ICompositeKeyEncoder Encoder; - - public HeadEncoder([NotNull] ICompositeKeyEncoder encoder) - { - Contract.Requires(encoder != null); - this.Encoder = encoder; - } - - public Slice EncodeKey(T1 value) - { - return this.Encoder.EncodeComposite(new FdbTuple(value, default(T2)), 1); - } - - public T1 DecodeKey(Slice encoded) - { - return this.Encoder.DecodeComposite(encoded, 1).Item1; - } - } - - /// Wrapper for a composite encoder that will only output the first key - public struct HeadEncoder : IKeyEncoder - { - [NotNull] - public readonly ICompositeKeyEncoder Encoder; - - public HeadEncoder([NotNull] ICompositeKeyEncoder encoder) - { - Contract.Requires(encoder != null); - this.Encoder = encoder; - } - - public Slice EncodeKey(T1 value) - { - return this.Encoder.EncodeComposite(new FdbTuple(value, default(T2), default(T3)), 1); - } - - public T1 DecodeKey(Slice encoded) - { - return this.Encoder.DecodeComposite(encoded, 1).Item1; - } - } - - /// Wrapper for a composite encoder that will only output the first key - public struct HeadEncoder : IKeyEncoder - { - [NotNull] - public readonly ICompositeKeyEncoder Encoder; - - public HeadEncoder([NotNull] ICompositeKeyEncoder encoder) - { - Contract.Requires(encoder != null); - this.Encoder = encoder; - } - - public Slice EncodeKey(T1 value) - { - return this.Encoder.EncodeComposite(new FdbTuple(value, default(T2), default(T3), default(T4)), 1); - } - - public T1 DecodeKey(Slice encoded) - { - return this.Encoder.DecodeComposite(encoded, 1).Item1; - } - } - - /// Wrapper for a composite encoder that will only output the first and second keys - public struct PairEncoder : ICompositeKeyEncoder - { - [NotNull] - public readonly ICompositeKeyEncoder Encoder; - - public PairEncoder([NotNull] ICompositeKeyEncoder encoder) - { - Contract.Requires(encoder != null); - this.Encoder = encoder; - } - - public Slice EncodeKey(T1 value1, T2 value2) - { - return this.Encoder.EncodeComposite(new FdbTuple(value1, value2, default(T3)), 2); - } - - public Slice EncodeComposite(FdbTuple key, int items) - { - return this.Encoder.EncodeComposite(new FdbTuple(key.Item1, key.Item2, default(T3)), items); - } - - public FdbTuple DecodeComposite(Slice encoded, int items) - { - var t = this.Encoder.DecodeComposite(encoded, items); - return new FdbTuple(t.Item1, t.Item2); - } - - public Slice EncodeKey(FdbTuple value) - { - return EncodeComposite(value, 2); - } - - public FdbTuple DecodeKey(Slice encoded) - { - return DecodeComposite(encoded, 2); - } - - public HeadEncoder Head() - { - return new HeadEncoder(this.Encoder); - } - } - - /// Wrapper for a composite encoder that will only output the first and second keys - public struct PairEncoder : ICompositeKeyEncoder - { - [NotNull] - public readonly ICompositeKeyEncoder Encoder; - - public PairEncoder([NotNull] ICompositeKeyEncoder encoder) - { - Contract.Requires(encoder != null); - this.Encoder = encoder; - } - - public Slice EncodeKey(T1 value1, T2 value2) - { - return this.Encoder.EncodeComposite(new FdbTuple(value1, value2, default(T3), default(T4)), 2); - } - - public Slice EncodeComposite(FdbTuple key, int items) - { - return this.Encoder.EncodeComposite(new FdbTuple(key.Item1, key.Item2, default(T3), default(T4)), items); - } - - public FdbTuple DecodeComposite(Slice encoded, int items) - { - var t = this.Encoder.DecodeComposite(encoded, items); - return new FdbTuple(t.Item1, t.Item2); - } - - public Slice EncodeKey(FdbTuple value) - { - return EncodeComposite(value, 2); - } - - public FdbTuple DecodeKey(Slice encoded) - { - return DecodeComposite(encoded, 2); - } - - public HeadEncoder Head() - { - return new HeadEncoder(this.Encoder); - } - } - - #endregion - - /// Encoders that produce lexicographically ordered slices, suitable for keys where lexicographical ordering is required - public static class Ordered - { - public static IKeyEncoder BinaryEncoder - { - [NotNull] - get { return Tuples.Key(); } - } - - public static IKeyEncoder StringEncoder - { - [NotNull] - get { return Tuples.Key(); } - } - - public static IKeyEncoder Int32Encoder - { - [NotNull] - get { return Tuples.Key(); } - } - - public static IKeyEncoder Int64Encoder - { - [NotNull] - get { return Tuples.Key(); } - } - - public static IKeyEncoder UInt64Encoder - { - [NotNull] - get { return Tuples.Key(); } - } - - public sealed class OrderedKeyEncoder : IKeyEncoder - { - private readonly IOrderedTypeCodec m_codec; - - public OrderedKeyEncoder(IOrderedTypeCodec codec) - { - Contract.Requires(codec != null); - m_codec = codec; - } - - public Slice EncodeKey(T key) - { - return m_codec.EncodeOrdered(key); - } - - public T DecodeKey(Slice encoded) - { - return m_codec.DecodeOrdered(encoded); - } - } - - public sealed class CodecCompositeKeyEncoder : CompositeKeyEncoder - { - private readonly IOrderedTypeCodec m_codec1; - private readonly IOrderedTypeCodec m_codec2; - - public CodecCompositeKeyEncoder(IOrderedTypeCodec codec1, IOrderedTypeCodec codec2) - { - m_codec1 = codec1; - m_codec2 = codec2; - } - - public override Slice EncodeComposite(FdbTuple items, int count) - { - Contract.Requires(count > 0); - - var writer = SliceWriter.Empty; - if (count >= 1) m_codec1.EncodeOrderedSelfTerm(ref writer, items.Item1); - if (count >= 2) m_codec2.EncodeOrderedSelfTerm(ref writer, items.Item2); - return writer.ToSlice(); - } - - public override FdbTuple DecodeComposite(Slice encoded, int count) - { - Contract.Requires(count > 0); - - var reader = new SliceReader(encoded); - T1 key1 = default(T1); - T2 key2 = default(T2); - if (count >= 1) key1 = m_codec1.DecodeOrderedSelfTerm(ref reader); - if (count >= 2) key2 = m_codec2.DecodeOrderedSelfTerm(ref reader); - if (reader.HasMore) throw new InvalidOperationException(String.Format("Unexpected data at the end of composite key after {0} items", count)); - return FdbTuple.Create(key1, key2); - } - - } - - public sealed class CodecCompositeKeyEncoder : CompositeKeyEncoder - { - private readonly IOrderedTypeCodec m_codec1; - private readonly IOrderedTypeCodec m_codec2; - private readonly IOrderedTypeCodec m_codec3; - - public CodecCompositeKeyEncoder(IOrderedTypeCodec codec1, IOrderedTypeCodec codec2, IOrderedTypeCodec codec3) - { - m_codec1 = codec1; - m_codec2 = codec2; - m_codec3 = codec3; - } - - public override Slice EncodeComposite(FdbTuple items, int count) - { - Contract.Requires(count > 0 && count <= 3); - - var writer = SliceWriter.Empty; - if (count >= 1) m_codec1.EncodeOrderedSelfTerm(ref writer, items.Item1); - if (count >= 2) m_codec2.EncodeOrderedSelfTerm(ref writer, items.Item2); - if (count >= 3) m_codec3.EncodeOrderedSelfTerm(ref writer, items.Item3); - return writer.ToSlice(); - } - - public override FdbTuple DecodeComposite(Slice encoded, int count) - { - Contract.Requires(count > 0); - - var reader = new SliceReader(encoded); - T1 key1 = default(T1); - T2 key2 = default(T2); - T3 key3 = default(T3); - if (count >= 1) key1 = m_codec1.DecodeOrderedSelfTerm(ref reader); - if (count >= 2) key2 = m_codec2.DecodeOrderedSelfTerm(ref reader); - if (count >= 3) key3 = m_codec3.DecodeOrderedSelfTerm(ref reader); - if (reader.HasMore) throw new InvalidOperationException(String.Format("Unexpected data at the end of composite key after {0} items", count)); - return FdbTuple.Create(key1, key2, key3); - } - - } - - /// Create a simple encoder from a codec - [NotNull] - public static IKeyEncoder Bind([NotNull] IOrderedTypeCodec codec) - { - if (codec == null) throw new ArgumentNullException("codec"); - - return new OrderedKeyEncoder(codec); - } - - /// Create a composite encoder from a pair of codecs - [NotNull] - public static ICompositeKeyEncoder Bind([NotNull] IOrderedTypeCodec codec1, [NotNull] IOrderedTypeCodec codec2) - { - if (codec1 == null) throw new ArgumentNullException("codec1"); - if (codec2 == null) throw new ArgumentNullException("codec2"); - - return new CodecCompositeKeyEncoder(codec1, codec2); - } - - /// Create a composite encoder from a triplet of codecs - [NotNull] - public static ICompositeKeyEncoder Bind([NotNull] IOrderedTypeCodec codec1, [NotNull] IOrderedTypeCodec codec2, [NotNull] IOrderedTypeCodec codec3) - { - if (codec1 == null) throw new ArgumentNullException("codec1"); - if (codec2 == null) throw new ArgumentNullException("codec2"); - if (codec3 == null) throw new ArgumentNullException("codec2"); - - return new CodecCompositeKeyEncoder(codec1, codec2, codec3); - } - - public static void Partial(ref SliceWriter writer, IOrderedTypeCodec codec1, T1 value1) - { - Contract.Assert(codec1 != null); - codec1.EncodeOrderedSelfTerm(ref writer, value1); - } - - public static void Encode(ref SliceWriter writer, [NotNull] IOrderedTypeCodec codec1, T1 value1, [NotNull] IOrderedTypeCodec codec2, T2 value2) - { - Contract.Assert(codec1 != null && codec2 != null); - codec1.EncodeOrderedSelfTerm(ref writer, value1); - codec2.EncodeOrderedSelfTerm(ref writer, value2); - } - - public static void Encode(ref SliceWriter writer, [NotNull] IOrderedTypeCodec codec1, T1 value1, [NotNull] IOrderedTypeCodec codec2, T2 value2, [NotNull] IOrderedTypeCodec codec3, T3 value3) - { - Contract.Assert(codec1 != null && codec2 != null && codec3 != null); - codec1.EncodeOrderedSelfTerm(ref writer, value1); - codec2.EncodeOrderedSelfTerm(ref writer, value2); - codec3.EncodeOrderedSelfTerm(ref writer, value3); - } - - } - - /// Encoders that produce compact but unordered slices, suitable for keys that don't benefit from having lexicographical ordering - public static class Unordered - { - - /// Create a simple encoder from a codec - [NotNull] - public static IKeyEncoder Bind([NotNull] IUnorderedTypeCodec codec) - { - if (codec == null) throw new ArgumentNullException("codec"); - - var encoder = codec as IKeyEncoder; - if (encoder != null) return encoder; - - return new Singleton( - (value) => codec.EncodeUnordered(value), - (encoded) => codec.DecodeUnordered(encoded) - ); - } - - } - - /// Encoders that produce compact but unordered slices, suitable for values - public static class Values - { - private static readonly GenericEncoder s_default = new GenericEncoder(); - - public static IValueEncoder BinaryEncoder - { - [NotNull] - get { return s_default; } - } - - public static IValueEncoder StringEncoder - { - [NotNull] - get { return s_default; } - } - - public static IValueEncoder Int32Encoder - { - [NotNull] - get { return s_default; } - } - - public static IValueEncoder Int64Encoder - { - [NotNull] - get { return s_default; } - } - - public static IValueEncoder GuidEncoder - { - [NotNull] - get { return s_default; } - } - - /// Create a simple encoder from a codec - [NotNull] - public static IValueEncoder Bind([NotNull] IUnorderedTypeCodec codec) - { - if (codec == null) throw new ArgumentNullException("codec"); - - var encoder = codec as IValueEncoder; - if (encoder != null) return encoder; - - return new Singleton( - (value) => codec.EncodeUnordered(value), - (encoded) => codec.DecodeUnordered(encoded) - ); - } - - internal sealed class GenericEncoder : IValueEncoder, IValueEncoder, IValueEncoder, IValueEncoder, IValueEncoder - { - - public Slice EncodeValue(Slice value) - { - return value; - } - - Slice IValueEncoder.DecodeValue(Slice encoded) - { - return encoded; - } - - public Slice EncodeValue(string value) - { - return Slice.FromString(value); - } - - string IValueEncoder.DecodeValue(Slice encoded) - { - return encoded.ToUnicode(); - } - - public Slice EncodeValue(int value) - { - return Slice.FromInt32(value); - } - - int IValueEncoder.DecodeValue(Slice encoded) - { - return encoded.ToInt32(); - } - - public Slice EncodeValue(long value) - { - return Slice.FromInt64(value); - } - - long IValueEncoder.DecodeValue(Slice encoded) - { - return encoded.ToInt64(); - } - - public Slice EncodeValue(Guid value) - { - return Slice.FromGuid(value); - } - - Guid IValueEncoder.DecodeValue(Slice encoded) - { - return encoded.ToGuid(); - } - - } - - } - - /// Encoders that use the Tuple Encoding, suitable for keys - public static class Tuples - { - - //TODO: rename to TupleEncoder! - internal class TupleKeyEncoder : IKeyEncoder, IValueEncoder - { - public static readonly TupleKeyEncoder Default = new TupleKeyEncoder(); - - private TupleKeyEncoder() { } - - public Slice EncodeKey(T key) - { - return FdbTuple.EncodeKey(key); - } - - public T DecodeKey(Slice encoded) - { - if (encoded.IsNullOrEmpty) return default(T); //BUGBUG - return FdbTuple.DecodeKey(encoded); - } - - public Slice EncodeValue(T key) - { - return FdbTuple.EncodeKey(key); - } - - public T DecodeValue(Slice encoded) - { - if (encoded.IsNullOrEmpty) return default(T); //BUGBUG - return FdbTuple.DecodeKey(encoded); - } - - } - - internal class TupleCompositeEncoder : CompositeKeyEncoder - { - - public static readonly TupleCompositeEncoder Default = new TupleCompositeEncoder(); - - private TupleCompositeEncoder() { } - - public override Slice EncodeComposite(FdbTuple key, int items) - { - switch (items) - { - case 2: return key.ToSlice(); - case 1: return FdbTuple.EncodeKey(key.Item1); - default: throw new ArgumentOutOfRangeException("items", items, "Item count must be either 1 or 2"); - } - } - - public override FdbTuple DecodeComposite(Slice encoded, int items) - { - if (items < 1 || items > 2) throw new ArgumentOutOfRangeException("items", items, "Item count must be either 1 or 2"); - - var t = FdbTuple.Unpack(encoded).OfSize(items); - Contract.Assert(t != null); - - return FdbTuple.Create( - t.Get(0), - items >= 2 ? t.Get(1) : default(T2) - ); - } - } - - internal class TupleCompositeEncoder : CompositeKeyEncoder - { - - public static readonly TupleCompositeEncoder Default = new TupleCompositeEncoder(); - - private TupleCompositeEncoder() { } - - public override Slice EncodeComposite(FdbTuple key, int items) - { - switch (items) - { - case 3: return key.ToSlice(); - case 2: return FdbTuple.EncodeKey(key.Item1, key.Item2); - case 1: return FdbTuple.EncodeKey(key.Item1); - default: throw new ArgumentOutOfRangeException("items", items, "Item count must be between 1 and 3"); - } - } - - public override FdbTuple DecodeComposite(Slice encoded, int items) - { - if (items < 1 || items > 3) throw new ArgumentOutOfRangeException("items", items, "Item count must be between 1 and 3"); - - var t = FdbTuple.Unpack(encoded).OfSize(items); - Contract.Assert(t != null); - - return FdbTuple.Create( - t.Get(0), - items >= 2 ? t.Get(1) : default(T2), - items >= 3 ? t.Get(2) : default(T3) - ); - } - } - - internal class TupleCompositeEncoder : CompositeKeyEncoder - { - - public static readonly TupleCompositeEncoder Default = new TupleCompositeEncoder(); - - private TupleCompositeEncoder() { } - - public override Slice EncodeComposite(FdbTuple key, int items) - { - switch (items) - { - case 4: return key.ToSlice(); - case 3: return FdbTuple.EncodeKey(key.Item1, key.Item2, key.Item3); - case 2: return FdbTuple.EncodeKey(key.Item1, key.Item2); - case 1: return FdbTuple.EncodeKey(key.Item1); - default: throw new ArgumentOutOfRangeException("items", items, "Item count must be between 1 and 4"); - } - } - - public override FdbTuple DecodeComposite(Slice encoded, int items) - { - if (items < 1 || items > 4) throw new ArgumentOutOfRangeException("items", items, "Item count must be between 1 and 4"); - - var t = FdbTuple.Unpack(encoded).OfSize(items); - - return FdbTuple.Create( - t.Get(0), - items >= 2 ? t.Get(1) : default(T2), - items >= 3 ? t.Get(2) : default(T3), - items >= 4 ? t.Get(3) : default(T4) - ); - } - } - - #region Keys - - [NotNull] - public static IKeyEncoder Key() - { - return TupleKeyEncoder.Default; - } - - [NotNull] - public static ICompositeKeyEncoder CompositeKey() - { - return TupleCompositeEncoder.Default; - } - - [NotNull] - public static ICompositeKeyEncoder CompositeKey() - { - return TupleCompositeEncoder.Default; - } - - [NotNull] - public static ICompositeKeyEncoder CompositeKey() - { - return TupleCompositeEncoder.Default; - } - - #endregion - - #region Values... - - [NotNull] - public static IValueEncoder Value() - { - return TupleKeyEncoder.Default; - } - - #endregion - - } - - #region Keys... - - /// Binds a pair of lambda functions to a key encoder - /// Type of the key to encode - /// Lambda function called to encode a key into a binary slice - /// Lambda function called to decode a binary slice into a key - /// Key encoder usable by any Layer that works on keys of type - [NotNull] - public static IKeyEncoder Bind([NotNull] Func encoder, [NotNull] Func decoder) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - if (decoder == null) throw new ArgumentNullException("decoder"); - return new Singleton(encoder, decoder); - } - - /// Convert an array of s into an array of slices, using a serializer (or the default serializer if none is provided) - [NotNull] - public static Slice[] EncodeKeys([NotNull] this IKeyEncoder encoder, [NotNull] params T[] values) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - if (values == null) throw new ArgumentNullException("values"); - - var slices = new Slice[values.Length]; - for (int i = 0; i < values.Length; i++) - { - slices[i] = encoder.EncodeKey(values[i]); - } - return slices; - } - - /// Convert an array of s into an array of slices, using a serializer (or the default serializer if none is provided) - [NotNull] - public static Slice[] EncodeKeys([NotNull] this IKeyEncoder encoder, [NotNull] IEnumerable elements, Func selector) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - if (elements == null) throw new ArgumentNullException("elements"); - if (selector == null) throw new ArgumentNullException("selector"); - - TElement[] arr; - ICollection coll; - - if ((arr = elements as TElement[]) != null) - { // fast path for arrays - return EncodeKeys(encoder, arr, selector); - } - else if ((coll = elements as ICollection) != null) - { // we can pre-allocate the result array - var slices = new Slice[coll.Count]; - int p = 0; - foreach(var item in coll) - { - slices[p++] = encoder.EncodeKey(selector(item)); - } - return slices; - } - else - { // slow path - return elements - .Select((item) => encoder.EncodeKey(selector(item))) - .ToArray(); - } - - } - - /// Convert an array of s into an array of slices, using a serializer (or the default serializer if none is provided) - [NotNull] - public static Slice[] EncodeKeys([NotNull] this IKeyEncoder encoder, [NotNull] TElement[] elements, Func selector) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - if (elements == null) throw new ArgumentNullException("elements"); - if (selector == null) throw new ArgumentNullException("selector"); - - var slices = new Slice[elements.Length]; - for (int i = 0; i < elements.Length; i++) - { - slices[i] = encoder.EncodeKey(selector(elements[i])); - } - return slices; - } - - /// Transform a sequence of s into a sequence of slices, using a serializer (or the default serializer if none is provided) - [NotNull] - public static IEnumerable EncodeKeys([NotNull] this IKeyEncoder encoder, [NotNull] IEnumerable values) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - if (values == null) throw new ArgumentNullException("values"); - - // note: T=>Slice usually is used for writing batches as fast as possible, which means that keys will be consumed immediately and don't need to be streamed - - var array = values as T[]; - if (array != null) - { // optimized path for arrays - return EncodeKeys(encoder, array); - } - - var coll = values as ICollection; - if (coll != null) - { // optimized path when we know the count - var slices = new List(coll.Count); - foreach (var value in coll) - { - slices.Add(encoder.EncodeKey(value)); - } - return slices; - } - - // "slow" path - return values.Select(value => encoder.EncodeKey(value)); - } - - /// Convert an array of slices back into an array of s, using a serializer (or the default serializer if none is provided) - [NotNull] - public static T[] DecodeKeys([NotNull] this IKeyEncoder encoder, [NotNull] params Slice[] slices) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - if (slices == null) throw new ArgumentNullException("slices"); - - var values = new T[slices.Length]; - for (int i = 0; i < slices.Length; i++) - { - values[i] = encoder.DecodeKey(slices[i]); - } - return values; - } - - /// Convert the keys of an array of key value pairs of slices back into an array of s, using a serializer (or the default serializer if none is provided) - [NotNull] - public static T[] DecodeKeys([NotNull] this IKeyEncoder encoder, [NotNull] KeyValuePair[] items) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - if (items == null) throw new ArgumentNullException("items"); - - var values = new T[items.Length]; - for (int i = 0; i < items.Length; i++) - { - values[i] = encoder.DecodeKey(items[i].Key); - } - return values; - } - - /// Transform a sequence of slices back into a sequence of s, using a serializer (or the default serializer if none is provided) - [NotNull] - public static IEnumerable DecodeKeys([NotNull] this IKeyEncoder encoder, [NotNull] IEnumerable slices) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - if (slices == null) throw new ArgumentNullException("slices"); - - // Slice=>T may be filtered in LINQ queries, so we should probably stream the values (so no optimization needed) - - return slices.Select(slice => encoder.DecodeKey(slice)); - } - - /// Returns a partial encoder that will only encode the first element - public static HeadEncoder Head([NotNull] this ICompositeKeyEncoder encoder) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - return new HeadEncoder(encoder); - } - - /// Returns a partial encoder that will only encode the first element - public static HeadEncoder Head([NotNull] this ICompositeKeyEncoder encoder) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - - return new HeadEncoder(encoder); - } - - /// Returns a partial encoder that will only encode the first element - public static HeadEncoder Head([NotNull] this ICompositeKeyEncoder encoder) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - - return new HeadEncoder(encoder); - } - - /// Returns a partial encoder that will only encode the first and second elements - public static PairEncoder Pair([NotNull] this ICompositeKeyEncoder encoder) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - - return new PairEncoder(encoder); - } - - /// Returns a partial encoder that will only encode the first and second elements - public static PairEncoder Pair([NotNull] this ICompositeKeyEncoder encoder) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - - return new PairEncoder(encoder); - } - - #endregion - - #region Values... - - /// Convert an array of s into an array of slices, using a serializer (or the default serializer if none is provided) - [NotNull] - public static Slice[] EncodeValues([NotNull] this IValueEncoder encoder, [NotNull] params T[] values) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - if (values == null) throw new ArgumentNullException("values"); - - var slices = new Slice[values.Length]; - for (int i = 0; i < values.Length; i++) - { - slices[i] = encoder.EncodeValue(values[i]); - } - - return slices; - } - - /// Transform a sequence of s into a sequence of slices, using a serializer (or the default serializer if none is provided) - [NotNull] - public static IEnumerable EncodeValues([NotNull] this IValueEncoder encoder, [NotNull] IEnumerable values) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - if (values == null) throw new ArgumentNullException("values"); - - // note: T=>Slice usually is used for writing batches as fast as possible, which means that keys will be consumed immediately and don't need to be streamed - - var array = values as T[]; - if (array != null) - { // optimized path for arrays - return EncodeValues(encoder, array); - } - - var coll = values as ICollection; - if (coll != null) - { // optimized path when we know the count - var slices = new List(coll.Count); - foreach (var value in coll) - { - slices.Add(encoder.EncodeValue(value)); - } - return slices; - } - - return values.Select(value => encoder.EncodeValue(value)); - } - - /// Convert an array of slices back into an array of s, using a serializer (or the default serializer if none is provided) - [NotNull] - public static T[] DecodeValues([NotNull] this IValueEncoder encoder, [NotNull] params Slice[] slices) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - if (slices == null) throw new ArgumentNullException("slices"); - - var values = new T[slices.Length]; - for (int i = 0; i < slices.Length; i++) - { - values[i] = encoder.DecodeValue(slices[i]); - } - - return values; - } - - /// Convert the values of an array of key value pairs of slices back into an array of s, using a serializer (or the default serializer if none is provided) - [NotNull] - public static T[] DecodeValues([NotNull] this IValueEncoder encoder, [NotNull] KeyValuePair[] items) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - if (items == null) throw new ArgumentNullException("items"); - - var values = new T[items.Length]; - for (int i = 0; i < items.Length; i++) - { - values[i] = encoder.DecodeValue(items[i].Value); - } - - return values; - } - - /// Transform a sequence of slices back into a sequence of s, using a serializer (or the default serializer if none is provided) - [NotNull] - public static IEnumerable DecodeValues([NotNull] this IValueEncoder encoder, [NotNull] IEnumerable slices) - { - if (encoder == null) throw new ArgumentNullException("encoder"); - if (slices == null) throw new ArgumentNullException("slices"); - - // Slice=>T may be filtered in LINQ queries, so we should probably stream the values (so no optimization needed) - - return slices.Select(slice => encoder.DecodeValue(slice)); - } - - #endregion - } - -} diff --git a/FoundationDB.Client/TypeSystem/ICompositeKeyEncoder.cs b/FoundationDB.Client/TypeSystem/ICompositeKeyEncoder.cs deleted file mode 100644 index 4b2621897..000000000 --- a/FoundationDB.Client/TypeSystem/ICompositeKeyEncoder.cs +++ /dev/null @@ -1,56 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -using System; -using FoundationDB.Layers.Tuples; - -namespace FoundationDB.Client -{ - public interface ICompositeKeyEncoder : ICompositeKeyEncoder> - { - Slice EncodeKey(T1 value1, T2 value2, T3 value3, T4 value4); - } - - public interface ICompositeKeyEncoder : ICompositeKeyEncoder> - { - Slice EncodeKey(T1 value1, T2 value2, T3 value3); - } - - public interface ICompositeKeyEncoder : ICompositeKeyEncoder> - { - Slice EncodeKey(T1 value1, T2 value2); - } - - public interface ICompositeKeyEncoder : IKeyEncoder - where TTuple : IFdbTuple - { - Slice EncodeComposite(TTuple key, int items); - - TTuple DecodeComposite(Slice encoded, int items); - } -} \ No newline at end of file diff --git a/FoundationDB.Client/Utils/Batched.cs b/FoundationDB.Client/Utils/Batched.cs new file mode 100644 index 000000000..e01b4a300 --- /dev/null +++ b/FoundationDB.Client/Utils/Batched.cs @@ -0,0 +1,90 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace FoundationDB +{ + using System; + using System.Collections.Generic; + using Doxense.Diagnostics.Contracts; + using Doxense.Memory; + using JetBrains.Annotations; + + internal static class Batched + { + + public delegate void Handler(ref SliceWriter writer, TValue item, TState state); + + [NotNull] + public static Slice[] Convert(SliceWriter writer, [NotNull, ItemNotNull] IEnumerable values, Handler handler, TState state) + { + Contract.Requires(values != null && handler != null); + + //Note on performance: + // - we will reuse the same buffer for each temp key, and copy them into a slice buffer + // - doing it this way adds a memory copy (writer => buffer) but reduce the number of byte[] allocations (and reduce the GC overhead) + + int start = writer.Position; + + var buffer = new SliceBuffer(); + + if (values is ICollection coll) + { // pre-allocate the final array with the correct size + var res = new Slice[coll.Count]; + int p = 0; + foreach (var tuple in coll) + { + // reset position to just after the subspace prefix + writer.Position = start; + + handler(ref writer, tuple, state); + + // copy full key in the buffer + res[p++] = buffer.Intern(writer.ToSlice()); + } + Contract.Assert(p == res.Length); + return res; + } + else + { // we won't now the array size until the end... + var res = new List(); + foreach (var tuple in values) + { + // reset position to just after the subspace prefix + writer.Position = start; + + handler(ref writer, tuple, state); + + // copy full key in the buffer + res.Add(buffer.Intern(writer.ToSlice())); + } + return res.ToArray(); + } + } + } + +} diff --git a/FoundationDB.Client/Utils/Contract.cs b/FoundationDB.Client/Utils/Contract.cs deleted file mode 100644 index ff171d5a0..000000000 --- a/FoundationDB.Client/Utils/Contract.cs +++ /dev/null @@ -1,169 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Client.Utils -{ - using JetBrains.Annotations; - using System; - using System.Diagnostics; - using System.Runtime.CompilerServices; - using SDC = System.Diagnostics.Contracts; - - internal static class Contract - { - - #region Requires - - [DebuggerStepThrough, DebuggerHidden] - [Conditional("DEBUG")] -#if !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - [AssertionMethod] - public static void Requires([AssertionCondition(AssertionConditionType.IS_TRUE)] bool condition, [CallerLineNumber] int _line = 0, [CallerFilePath] string _path = "") - { - if (!condition) RaiseContractFailure(SDC.ContractFailureKind.Precondition, null, _path, _line); - } - - [DebuggerStepThrough, DebuggerHidden] - [Conditional("DEBUG")] -#if !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - [AssertionMethod] - public static void Requires([AssertionCondition(AssertionConditionType.IS_TRUE)] bool condition, string message, [CallerLineNumber] int _line = 0, [CallerFilePath] string _path = "") - { - if (!condition) RaiseContractFailure(SDC.ContractFailureKind.Precondition, message, _path, _line); - } - - #endregion - - #region Assert - - /// Assert that a condition is verified, at debug time - /// Condition that must be true - /// Line number of the calling source file - /// Path of the calling source file - /// This method is not compiled on Release builds - [DebuggerStepThrough, DebuggerHidden] - [Conditional("DEBUG")] -#if !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - [AssertionMethod] - public static void Assert([AssertionCondition(AssertionConditionType.IS_TRUE)] bool condition, [CallerLineNumber] int _line = 0, [CallerFilePath] string _path = "") - { - if (!condition) RaiseContractFailure(SDC.ContractFailureKind.Assert, null, _path, _line); - } - - /// Assert that a condition is verified, at debug time - /// Condition that must be true - /// Error message if the condition does not pass - /// Line number of the calling source file - /// Path of the calling source file - /// This method is not compiled on Release builds - [DebuggerStepThrough, DebuggerHidden] - [Conditional("DEBUG")] -#if !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - [AssertionMethod] - public static void Assert([AssertionCondition(AssertionConditionType.IS_TRUE)] bool condition, string message, [CallerLineNumber] int _line = 0, [CallerFilePath] string _path = "") - { - if (!condition) RaiseContractFailure(SDC.ContractFailureKind.Assert, message, _path, _line); - } - - #endregion - - #region Ensures - - /// Assert that a condition is verified, at debug time - /// Condition that must be true - /// Line number of the calling source file - /// Path of the calling source file - /// This method is not compiled on Release builds - [DebuggerStepThrough, DebuggerHidden] - [Conditional("DEBUG")] -#if !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - [AssertionMethod] - public static void Ensures([AssertionCondition(AssertionConditionType.IS_TRUE)] bool condition, [CallerLineNumber] int _line = 0, [CallerFilePath] string _path = "") - { - if (!condition) RaiseContractFailure(SDC.ContractFailureKind.Postcondition, null, _path, _line); - } - - /// Assert that a condition is verified, at debug time - /// Condition that must be true - /// Error message if the condition does not pass - /// Line number of the calling source file - /// Path of the calling source file - /// This method is not compiled on Release builds - [DebuggerStepThrough, DebuggerHidden] - [Conditional("DEBUG")] -#if !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - [AssertionMethod] - public static void Ensures([AssertionCondition(AssertionConditionType.IS_TRUE)] bool condition, string message, [CallerLineNumber] int _line = 0, [CallerFilePath] string _path = "") - { - if (!condition) RaiseContractFailure(SDC.ContractFailureKind.Postcondition, message, _path, _line); - } - - #endregion - - [DebuggerStepThrough, DebuggerHidden] - internal static void RaiseContractFailure(SDC.ContractFailureKind kind, string message, string file, int line) - { - if (message == null) - { - switch(kind) - { - case SDC.ContractFailureKind.Assert: message = "An assertion was not met"; break; - case SDC.ContractFailureKind.Precondition: message = "A pre-requisite was not met"; break; - case SDC.ContractFailureKind.Postcondition: message = "A post-condition was not met"; break; - default: message = "An expectation was not met"; break; - } - } - if (file != null) - { // add the caller infos - message = String.Format("{0} in {1}:line {2}", message, file, line); - } - - //TODO: check if we are running under NUnit, and map to an Assert.Fail() instead ? - - Debug.Fail(message); - // If you break here, that means that an assertion failed somewhere up the stack. - // TODO: find a way to have the debugger break, but show the caller of Contract.Assert(..) method, instead of here ? - if (Debugger.IsAttached) Debugger.Break(); - - throw new InvalidOperationException(message); - } - - } -} diff --git a/FoundationDB.Client/Utils/DebugCounters.cs b/FoundationDB.Client/Utils/DebugCounters.cs index 6c0c84091..02c21c7e0 100644 --- a/FoundationDB.Client/Utils/DebugCounters.cs +++ b/FoundationDB.Client/Utils/DebugCounters.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/FoundationDB.Client/Utils/Logging.cs b/FoundationDB.Client/Utils/Logging.cs index 03e655620..52b316255 100644 --- a/FoundationDB.Client/Utils/Logging.cs +++ b/FoundationDB.Client/Utils/Logging.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,7 +26,7 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client.Utils +namespace FoundationDB.Client { using System; using System.Diagnostics; diff --git a/FoundationDB.Client/Utils/Slice.cs b/FoundationDB.Client/Utils/Slice.cs deleted file mode 100644 index 6606bdca5..000000000 --- a/FoundationDB.Client/Utils/Slice.cs +++ /dev/null @@ -1,2509 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Client -{ - using FoundationDB.Async; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; - using System; - using System.Collections.Generic; - using System.ComponentModel; - using System.Diagnostics; - using System.Globalization; - using System.IO; - using System.Linq; - using System.Text; - using System.Threading; - using System.Threading.Tasks; - - /// Delimits a section of a byte array - [ImmutableObject(true), DebuggerDisplay("Count={Count}, Offset={Offset}"), DebuggerTypeProxy(typeof(Slice.DebugView))] - public struct Slice : IEquatable, IEquatable>, IEquatable, IComparable - { - #region Static Members... - - /// Cached empty array of bytes - internal static readonly byte[] EmptyArray = new byte[0]; - - /// Cached empty array of slices - internal static readonly Slice[] EmptySliceArray = new Slice[0]; - - /// Cached array of bytes from 0 to 255 - internal static readonly byte[] ByteSprite; - - /// Null slice ("no segment") - public static readonly Slice Nil = default(Slice); - - /// Empty slice ("segment of 0 bytes") - public static readonly Slice Empty = new Slice(EmptyArray, 0, 0); - - [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2207:InitializeValueTypeStaticFieldsInline")] - static Slice() - { - var tmp = new byte[256]; - for (int i = 0; i < tmp.Length; i++) tmp[i] = (byte)i; - ByteSprite = tmp; - } - - #endregion - - /// Pointer to the buffer (or null for ) - public readonly byte[] Array; - - /// Offset of the first byte of the slice in the parent buffer - public readonly int Offset; - - /// Number of bytes in the slice - public readonly int Count; - - internal Slice(byte[] array, int offset, int count) - { - Contract.Requires(array != null && offset >= 0 && offset <= array.Length && count >= 0 && offset + count <= array.Length); - this.Array = array; - this.Offset = offset; - this.Count = count; - } - - /// Creates a slice mapping an entire buffer - /// - /// - public static Slice Create(byte[] bytes) - { - return - bytes == null ? Slice.Nil : - bytes.Length == 0 ? Slice.Empty : - new Slice(bytes, 0, bytes.Length); - } - - /// Creates a slice from an Array Segment - /// Segment of buffer to convert - public static Slice Create(ArraySegment arraySegment) - { - return Create(arraySegment.Array, arraySegment.Offset, arraySegment.Count); - } - - /// Creates a slice mapping a section of a buffer - /// Original buffer - /// Offset into buffer - /// Number of bytes - public static Slice Create(byte[] buffer, int offset, int count) - { - SliceHelpers.EnsureBufferIsValid(buffer, offset, count); - if (count == 0) - { - if (offset != 0) throw new ArgumentException("offset"); - return buffer == null ? Nil : Empty; - } - return new Slice(buffer, offset, count); - } - - /// Creates a slice mapping a section of a buffer, either directly or by making a copy. - /// Original buffer - /// Offset into buffer - /// Number of bytes - /// If true, creates a copy of the buffer. If false, maps directly into the buffer. - /// If is false, any change made to will also be visible in this slice. - public static Slice Create(byte[] buffer, int offset, int count, bool copy) - { - SliceHelpers.EnsureBufferIsValid(buffer, offset, count); - if (count == 0) - { - if (offset != 0) throw new ArgumentException("offset"); - return buffer == null ? Nil : Empty; - } - else if (copy) - { - var tmp = new byte[count]; - SliceHelpers.CopyBytesUnsafe(tmp, 0, buffer, offset, count); - return new Slice(tmp, 0, count); - } - else - { - return new Slice(buffer, offset, count); - } - } - - /// Create a new empty slice of a specified size containing all zeroes - /// - /// - public static Slice Create(int size) - { - if (size < 0) throw new ArgumentException("size"); - return size == 0 ? Slice.Empty : new Slice(new byte[size], 0, size); - } - - /// Creates a new slice with a copy of an unmanaged memory buffer - /// Pointer to unmanaged buffer - /// Number of bytes in the buffer - /// Slice with a managed copy of the data - internal static unsafe Slice Create(byte* ptr, int count) - { - if (count == 0) - { - return ptr == null ? Slice.Nil : Slice.Empty; - } - if (ptr == null) throw new ArgumentNullException("ptr"); - if (count < 0) throw new ArgumentOutOfRangeException("count"); - - if (count == 1) - { - return Slice.FromByte(*ptr); - } - - var bytes = new byte[count]; - SliceHelpers.CopyBytesUnsafe(bytes, 0, ptr, count); - return new Slice(bytes, 0, count); - } - - /// Creates a new slice that contains the same byte repeated - /// Byte that will fill the slice - /// Number of bytes - /// New slice that contains times the byte . - public static Slice Repeat(byte value, int count) - { - if (count < 0) throw new ArgumentException("count"); - if (count == 0) return Slice.Empty; - - var res = new byte[count]; - SliceHelpers.SetBytes(res, 0, count, value); - return new Slice(res, 0, res.Length); - } - - /// Create a new slice filled with random bytes taken from a random number generator - /// Pseudo random generator to use (needs locking if instance is shared) - /// Number of random bytes to generate - /// Slice of bytes taken from - /// Warning: is not thread-safe ! If the instance is shared between threads, then it needs to be locked before calling this method. - public static Slice Random([NotNull] Random prng, int count) - { - if (prng == null) throw new ArgumentNullException("prng"); - if (count < 0) throw new ArgumentOutOfRangeException("count", count, "Count cannot be negative"); - if (count == 0) return Slice.Empty; - - var bytes = new byte[count]; - prng.NextBytes(bytes); - return new Slice(bytes, 0, count); - } - - /// Create a new slice filled with random bytes taken from a cryptographic random number generator - /// Random generator to use (needs locking if instance is shared) - /// Number of random bytes to generate - /// If true, produce a sequence of non-zero bytes. - /// Slice of bytes taken from - /// Warning: All RNG implementations may not be thread-safe ! If the instance is shared between threads, then it may need to be locked before calling this method. - public static Slice Random([NotNull] System.Security.Cryptography.RandomNumberGenerator rng, int count, bool nonZeroBytes = false) - { - if (rng == null) throw new ArgumentNullException("rng"); - if (count < 0) throw new ArgumentOutOfRangeException("count", count, "Count cannot be negative"); - if (count == 0) return Slice.Empty; - - var bytes = new byte[count]; - - if (nonZeroBytes) - rng.GetNonZeroBytes(bytes); - else - rng.GetBytes(bytes); - - return new Slice(bytes, 0, count); - } - - /// Reports the zero-based index of the first occurrence of the specified slice in this source. - /// The slice Input slice - /// The slice to seek - /// - public static int Find(Slice source, Slice value) - { - const int NOT_FOUND = -1; - - SliceHelpers.EnsureSliceIsValid(ref source); - SliceHelpers.EnsureSliceIsValid(ref value); - - int m = value.Count; - if (m == 0) return 0; - - int n = source.Count; - if (n == 0) return NOT_FOUND; - - if (m == n) return source.Equals(value) ? 0 : NOT_FOUND; - if (m <= n) - { - byte[] src = source.Array; - int p = source.Offset; - byte firstByte = value[0]; - - // note: this is a very simplistic way to find a value, and is optimized for the case where the separator is only one byte (most common) - while (n-- > 0) - { - if (src[p++] == firstByte) - { // possible match ? - if (m == 1 || SliceHelpers.SameBytesUnsafe(src, p, value.Array, value.Offset + 1, m - 1)) - { - return p - source.Offset - 1; - } - } - } - } - - return NOT_FOUND; - } - - /// Concatenates all the elements of a slice array, using the specified separator between each element. - /// The slice to use as a separator. Can be empty. - /// An array that contains the elements to concatenate. - /// A slice that consists of the elements in a value delimited by the slice. If is an empty array, the method returns . - /// If is null. - public static Slice Join(Slice separator, [NotNull] Slice[] values) - { - if (values == null) throw new ArgumentNullException("values"); - - int count = values.Length; - if (count == 0) return Slice.Empty; - if (count == 1) return values[0]; - return Join(separator, values, 0, count); - } - - /// Concatenates the specified elements of a slice array, using the specified separator between each element. - /// The slice to use as a separator. Can be empty. - /// An array that contains the elements to concatenate. - /// The first element in to use. - /// The number of elements of to use. - /// A slice that consists of the slices in delimited by the slice. -or- if is zero, has no elements, or and all the elements of are . - /// If is null. - /// If or is less than zero. -or- plus is greater than the number of elements in . - public static Slice Join(Slice separator, [NotNull] Slice[] values, int startIndex, int count) - { - // Note: this method is modeled after String.Join() and should behave the same - // - Only difference is that Slice.Nil and Slice.Empty are equivalent (either for separator, or for the elements of the array) - - if (values == null) throw new ArgumentNullException("values"); - //REVIEW: support negative indexing ? - if (startIndex < 0) throw new ArgumentOutOfRangeException("startIndex", startIndex, "Start index must be a positive integer"); - if (count < 0) throw new ArgumentOutOfRangeException("count", count, "Count must be a positive integer"); - if (startIndex > values.Length - count) throw new ArgumentOutOfRangeException("startIndex", startIndex, "Start index must fit within the array"); - - if (count == 0) return Slice.Empty; - if (count == 1) return values[startIndex]; - - int size = 0; - for (int i = 0; i < values.Length; i++) size += values[i].Count; - size += (values.Length - 1) * separator.Count; - - // if the size overflows, that means that the resulting buffer would need to be >= 2 GB, which is not possible! - if (size < 0) throw new OutOfMemoryException(); - - //note: we want to make sure the buffer of the writer will be the exact size (so that we can use the result as a byte[] without copying again) - var tmp = new byte[size]; - var writer = new SliceWriter(tmp); - for (int i = 0; i < values.Length; i++) - { - if (i > 0) writer.WriteBytes(separator); - writer.WriteBytes(values[i]); - } - Contract.Assert(writer.Buffer.Length == size); - return writer.ToSlice(); - } - - /// Concatenates the specified elements of a slice sequence, using the specified separator between each element. - /// The slice to use as a separator. Can be empty. - /// A sequence will return the elements to concatenate. - /// A slice that consists of the slices in delimited by the slice. -or- if has no elements, or and all the elements of are . - /// If is null. - public static Slice Join(Slice separator, [NotNull] IEnumerable values) - { - if (values == null) throw new ArgumentNullException("values"); - var array = (values as Slice[]) ?? values.ToArray(); - return Join(separator, array, 0, array.Length); - } - - /// Concatenates the specified elements of a slice array, using the specified separator between each element. - /// The slice to use as a separator. Can be empty. - /// An array that contains the elements to concatenate. - /// The first element in to use. - /// The number of elements of to use. - /// A byte array that consists of the slices in delimited by the slice. -or- an emtpy array if is zero, has no elements, or and all the elements of are . - /// If is null. - /// If or is less than zero. -or- plus is greater than the number of elements in . - [NotNull] - public static byte[] JoinBytes(Slice separator, [NotNull] Slice[] values, int startIndex, int count) - { - // Note: this method is modeled after String.Join() and should behave the same - // - Only difference is that Slice.Nil and Slice.Empty are equivalent (either for separator, or for the elements of the array) - - if (values == null) throw new ArgumentNullException("values"); - //REVIEW: support negative indexing ? - if (startIndex < 0) throw new ArgumentOutOfRangeException("startIndex", startIndex, "Start index must be a positive integer"); - if (count < 0) throw new ArgumentOutOfRangeException("count", count, "Count must be a positive integer"); - if (startIndex > values.Length - count) throw new ArgumentOutOfRangeException("startIndex", startIndex, "Start index must fit within the array"); - - if (count == 0) return Slice.EmptyArray; - if (count == 1) return values[startIndex].GetBytes() ?? Slice.EmptyArray; - - int size = 0; - for (int i = 0; i < count; i++) size = checked(size + values[startIndex + i].Count); - size = checked(size + (count - 1) * separator.Count); - - // if the size overflows, that means that the resulting buffer would need to be >= 2 GB, which is not possible! - if (size < 0) throw new OutOfMemoryException(); - - //note: we want to make sure the buffer of the writer will be the exact size (so that we can use the result as a byte[] without copying again) - var tmp = new byte[size]; - int p = 0; - for (int i = 0; i < count; i++) - { - if (i > 0) separator.WriteTo(tmp, ref p); - values[startIndex + i].WriteTo(tmp, ref p); - } - Contract.Assert(p == tmp.Length); - return tmp; - } - - /// Concatenates the specified elements of a slice sequence, using the specified separator between each element. - /// The slice to use as a separator. Can be empty. - /// A sequence will return the elements to concatenate. - /// A byte array that consists of the slices in delimited by the slice. -or- an empty array if has no elements, or and all the elements of are . - /// If is null. - [NotNull] - public static byte[] JoinBytes(Slice separator, [NotNull] IEnumerable values) - { - if (values == null) throw new ArgumentNullException("values"); - var array = (values as Slice[]) ?? values.ToArray(); - return JoinBytes(separator, array, 0, array.Length); - } - - /// Returns a slice array that contains the sub-slices in that are delimited by . A parameter specifies whether to return empty array elements. - /// Input slice that must be split into sub-slices - /// Separator that delimits the sub-slices in . Cannot be empty or nil - /// to omit empty array alements from the array returned; or to include empty array elements in the array returned. - /// An array whose elements contain the sub-slices that are delimited by . - /// If is empty, or if is not one of the values. - /// If does not contain the delimiter, the returned array consists of a single element that repeats the input, or an empty array if input is itself empty. - /// To reduce memory usage, the sub-slices returned in the array will all share the same underlying buffer of the input slice. - [NotNull] - public static Slice[] Split(Slice input, Slice separator, StringSplitOptions options = StringSplitOptions.None) - { - // this method is made to behave the same way as String.Split(), especially the following edge cases - // - Empty.Split(..., StringSplitOptions.None) => { Empty } - // - Empty.Split(..., StringSplitOptions.RemoveEmptyEntries) => { } - // differences: - // - If input is Nil, it is considered equivalent to Empty - // - If separator is Nil or Empty, the method throws - - var list = new List(); - - if (separator.Count <= 0) throw new ArgumentException("Separator must have at least one byte", "separator"); - if (options < StringSplitOptions.None || options > StringSplitOptions.RemoveEmptyEntries) throw new ArgumentException("options"); - - bool skipEmpty = options.HasFlag(StringSplitOptions.RemoveEmptyEntries); - if (input.Count == 0) - { - return skipEmpty ? Slice.EmptySliceArray : new [] { Slice.Empty }; - } - - while (input.Count > 0) - { - int p = Find(input, separator); - if (p < 0) - { // last chunk - break; - } - if (p == 0) - { // empty chunk - if (!skipEmpty) list.Add(Slice.Empty); - } - else - { - list.Add(input.Substring(0, p)); - } - // note: we checked earlier that separator.Count > 0, so we are guaranteed to advance the cursor - input = input.Substring(p + separator.Count); - } - - if (input.Count > 0 || !skipEmpty) - { - list.Add(input); - } - - return list.ToArray(); - } - - /// Decode a Base64 encoded string into a slice - public static Slice FromBase64(string base64String) - { - return base64String == null ? Slice.Nil : base64String.Length == 0 ? Slice.Empty : Slice.Create(Convert.FromBase64String(base64String)); - } - - /// Encode an unsigned 8-bit integer into a slice - public static Slice FromByte(byte value) - { - return new Slice(ByteSprite, value, 1); - } - - #region 16-bit integers - - /// Encode a signed 16-bit integer into a variable size slice (1 or 2 bytes) in little-endian - public static Slice FromInt16(short value) - { - if (value >= 0) - { - if (value <= 255) - { - return Slice.FromByte((byte)value); - } - else - { - return new Slice(new byte[] { (byte)value, (byte)(value >> 8) }, 0, 2); - } - } - - return FromFixed16(value); - } - - /// Encode a signed 16-bit integer into a 2-byte slice in little-endian - public static Slice FromFixed16(short value) - { - return new Slice( - new byte[] - { - (byte)value, - (byte)(value >> 8) - }, - 0, - 2 - ); - } - - /// Encode an unsigned 16-bit integer into a variable size slice (1 or 2 bytes) in little-endian - public static Slice FromUInt16(ushort value) - { - if (value <= 255) - { - return Slice.FromByte((byte)value); - } - else - { - return FromFixedU16(value); - } - } - - /// Encode an unsigned 16-bit integer into a 2-byte slice in little-endian - /// 0x1122 => 11 22 - public static Slice FromFixedU16(ushort value) - { - return new Slice( - new byte[] - { - (byte)value, - (byte)(value >> 8) - }, - 0, - 2 - ); - } - - /// Encode an unsigned 16-bit integer into a 2-byte slice in big-endian - /// 0x1122 => 22 11 - public static Slice FromFixedU16BE(ushort value) - { - return new Slice( - new byte[] - { - (byte)(value >> 8), - (byte)value - }, - 0, - 4 - ); - } - - /// Encode an unsigned 16-bit integer into 7-bit encoded unsigned int (aka 'Varint16') - public static Slice FromVarint16(ushort value) - { - if (value < 128) - { - return FromByte((byte)value); - } - else - { - var writer = new SliceWriter(3); - writer.WriteVarint16(value); - return writer.ToSlice(); - } - } - - #endregion - - #region 32-bit integers - - /// Encode a signed 32-bit integer into a variable size slice (1, 2 or 4 bytes) in little-endian - public static Slice FromInt32(int value) - { - if (value >= 0) - { - if (value <= 255) - { - return Slice.FromByte((byte)value); - } - if (value <= 65535) - { - //TODO: possible micro optimization is for values like 0x100, 0x201, 0x1413 or 0x4342, where we could use 2 consecutive bytes in the ByteSprite, - return new Slice(new byte[] { (byte)value, (byte)(value >> 8) }, 0, 2); - } - } - - return FromFixed32(value); - } - - /// Encode a signed 32-bit integer into a 4-byte slice in little-endian - public static Slice FromFixed32(int value) - { - return new Slice( - new byte[] - { - (byte)value, - (byte)(value >> 8), - (byte)(value >> 16), - (byte)(value >> 24) - }, - 0, - 4 - ); - } - - /// Encode an unsigned 32-bit integer into a variable size slice (1, 2 or 4 bytes) in little-endian - public static Slice FromUInt32(uint value) - { - if (value <= 255) - { - return Slice.FromByte((byte)value); - } - if (value <= 65535) - { - return new Slice(new byte[] { (byte)value, (byte)(value >> 8) }, 0, 2); - } - - return FromFixedU32(value); - } - - /// Encode an unsigned 32-bit integer into a 4-byte slice in little-endian - /// 0x11223344 => 11 22 33 44 - public static Slice FromFixedU32(uint value) - { - return new Slice( - new byte[] - { - (byte)value, - (byte)(value >> 8), - (byte)(value >> 16), - (byte)(value >> 24) - }, - 0, - 4 - ); - } - - /// Encode an unsigned 32-bit integer into a 4-byte slice in big-endian - /// 0x11223344 => 44 33 22 11 - public static Slice FromFixedU32BE(uint value) - { - return new Slice( - new byte[] - { - (byte)(value >> 24), - (byte)(value >> 16), - (byte)(value >> 8), - (byte)value - }, - 0, - 4 - ); - } - - /// Encode an unsigned 32-bit integer into 7-bit encoded unsigned int (aka 'Varint32') - public static Slice FromVarint32(uint value) - { - if (value < 128) - { - return FromByte((byte)value); - } - else - { - var writer = new SliceWriter(5); - writer.WriteVarint32(value); - return writer.ToSlice(); - } - } - - #endregion - - #region 64-bit integers - - /// Encode a signed 64-bit integer into a variable size slice (1, 2, 4 or 8 bytes) in little-endian - public static Slice FromInt64(long value) - { - if (value >= 0 && value <= int.MaxValue) - { - return FromInt32((int)value); - } - return FromFixed64(value); - } - - /// Encode a signed 64-bit integer into a 8-byte slice in little-endian - public static Slice FromFixed64(long value) - { - return new Slice( - new byte[] - { - (byte)value, - (byte)(value >> 8), - (byte)(value >> 16), - (byte)(value >> 24), - (byte)(value >> 32), - (byte)(value >> 40), - (byte)(value >> 48), - (byte)(value >> 56) - }, - 0, - 8 - ); - } - - /// Encode an unsigned 64-bit integer into a variable size slice (1, 2, 4 or 8 bytes) in little-endian - public static Slice FromUInt64(ulong value) - { - if (value <= 255) - { - return Slice.FromByte((byte)value); - } - if (value <= 65535) - { - return new Slice(new byte[] { (byte)value, (byte)(value >> 8) }, 0, 2); - } - - if (value <= uint.MaxValue) - { - return new Slice( - new byte[] - { - (byte)value, - (byte)(value >> 8), - (byte)(value >> 16), - (byte)(value >> 24) - }, - 0, - 4 - ); - } - - return FromFixedU64(value); - } - - /// Encode an unsigned 64-bit integer into a 8-byte slice in little-endian - /// 0x1122334455667788 => 11 22 33 44 55 66 77 88 - public static Slice FromFixedU64(ulong value) - { - return new Slice( - new byte[] - { - (byte)value, - (byte)(value >> 8), - (byte)(value >> 16), - (byte)(value >> 24), - (byte)(value >> 32), - (byte)(value >> 40), - (byte)(value >> 48), - (byte)(value >> 56) - }, - 0, - 8 - ); - } - - /// Encode an unsigned 64-bit integer into a 8-byte slice in big-endian - /// 0x1122334455667788 => 88 77 66 55 44 33 22 11 - public static Slice FromFixedU64BE(ulong value) - { - return new Slice( - new byte[] - { - (byte)(value >> 56), - (byte)(value >> 48), - (byte)(value >> 40), - (byte)(value >> 32), - (byte)(value >> 24), - (byte)(value >> 16), - (byte)(value >> 8), - (byte)value - }, - 0, - 8 - ); - } - - /// Encode an unsigned 64-bit integer into 7-bit encoded unsigned int (aka 'Varint64') - public static Slice FromVarint64(ulong value) - { - if (value < 128) - { - return FromByte((byte)value); - } - else - { - var writer = new SliceWriter(10); - writer.WriteVarint64(value); - return writer.ToSlice(); - } - } - - #endregion - - #region decimals - - /// Encode a 32-bit decimal into an 4-byte slice - public static Slice FromSingle(float value) - { - //TODO: may not work on BE platforms? - uint bits; - unsafe { bits = *(uint*)(&value); } - return FromFixedU32(bits); - } - - /// Encode a 64-bit decimal into an 8-byte slice - public static Slice FromDouble(double value) - { - //TODO: may not work on BE platforms? - ulong bits; - unsafe { bits = *(ulong*)(&value); } - return FromFixedU64(bits); - } - - #endregion - - - /// Create a 16-byte slice containing a System.Guid encoding according to RFC 4122 (Big Endian) - /// WARNING: Slice.FromGuid(guid).GetBytes() will not produce the same result as guid.ToByteArray() ! - /// If you need to produce Microsoft compatible byte arrays, use Slice.Create(guid.ToByteArray()) but then you shoud NEVER use Slice.ToGuid() to decode such a value ! - public static Slice FromGuid(Guid value) - { - // UUID are stored using the RFC4122 format (Big Endian), while .NET's System.GUID use Little Endian - // => we will convert the GUID into a UUID under the hood, and hope that it gets converted back when read from the db - - return new Uuid128(value).ToSlice(); - } - - /// Create a 16-byte slice containing an RFC 4122 compliant 128-bit UUID - /// You should never call this method on a slice created from the result of calling System.Guid.ToByteArray() ! - public static Slice FromUuid128(Uuid128 value) - { - // UUID should already be in the RFC 4122 ordering - return value.ToSlice(); - } - - /// Create an 8-byte slice containing an 64-bit UUID - public static Slice FromUuid64(Uuid64 value) - { - return value.ToSlice(); - } - - internal static readonly Encoding DefaultEncoding = -#if CORE_CLR - Encoding.GetEncoding(0); -#else - Encoding.Default; -#endif - - /// Dangerously create a slice containing string converted to ASCII. All non-ASCII characters may be corrupted or converted to '?' - /// WARNING: if you put a string that contains non-ASCII chars, it will be silently corrupted! This should only be used to store keywords or 'safe' strings. - /// Note: depending on your default codepage, chars from 128 to 255 may be preserved, but only if they are decoded using the same codepage at the other end ! - public static Slice FromAscii(string text) - { - return text == null ? Slice.Nil : text.Length == 0 ? Slice.Empty : Slice.Create(DefaultEncoding.GetBytes(text)); - } - - /// Create a slice containing the UTF-8 bytes of the string - public static Slice FromString(string value) - { - return value == null ? Slice.Nil : value.Length == 0 ? Slice.Empty : Slice.Create(Encoding.UTF8.GetBytes(value)); - } - - /// Create a slice that holds the UTF-8 encoded representation of - /// - /// The returned slice is only guaranteed to hold 1 byte for ASCII chars (0..127). For non-ASCII chars, the size can be from 1 to 6 bytes. - /// If you need to use ASCII chars, you should use Slice.FromByte() instead - public static Slice FromChar(char value) - { - if (value < 128) - { // ASCII - return Slice.FromByte((byte)value); - } - - // note: Encoding.UTF8.GetMaxByteCount(1) returns 6, but allocate 8 to stay aligned - var tmp = new byte[8]; - int n = Encoding.UTF8.GetBytes(new char[] { value }, 0, 1, tmp, 0); - return n == 1 ? FromByte(tmp[0]) : new Slice(tmp, 0, n); - } - - /// Convert an hexadecimal digit (0-9A-Fa-f) into the corresponding decimal value - /// Hexadecimal digit (case insensitive) - /// Decimal value between 0 and 15, or an exception - private static int NibbleToDecimal(char c) - { - int x = c - 48; - if (x < 10) return x; - if (x >= 17 && x <= 42) return x - 7; - if (x >= 49 && x <= 74) return x - 39; - throw new FormatException("Input is not a valid hexadecimal digit"); - } - - /// Convert an hexadecimal encoded string ("1234AA7F") into a slice - /// String contains a sequence of pairs of hexadecimal digits with no separating spaces. - /// Slice containing the decoded byte array, or an exeception if the string is empty or has an odd length - public static Slice FromHexa(string hexaString) - { - if (string.IsNullOrEmpty(hexaString)) return hexaString == null ? Slice.Nil : Slice.Empty; - - if (hexaString.IndexOf(' ') > 0) - { // remove spaces - hexaString = hexaString.Replace(" ", ""); - } - - if ((hexaString.Length & 1) != 0) throw new ArgumentException("Hexadecimal string must be of even length", "hexaString"); - - var buffer = new byte[hexaString.Length >> 1]; - for (int i = 0; i < hexaString.Length; i += 2) - { - buffer[i >> 1] = (byte) ((NibbleToDecimal(hexaString[i]) << 4) | NibbleToDecimal(hexaString[i + 1])); - } - return new Slice(buffer, 0, buffer.Length); - } - - /// Returns true is the slice is not null - /// An empty slice is NOT considered null - public bool HasValue { get { return this.Array != null; } } - - /// Returns true if the slice is null - /// An empty slice is NOT considered null - public bool IsNull { get { return this.Array == null; } } - - /// Return true if the slice is not null but contains 0 bytes - /// A null slice is NOT empty - public bool IsEmpty { get { return this.Count == 0 && this.Array != null; } } - - /// Returns true if the slice is null or empty, or false if it contains at least one byte - public bool IsNullOrEmpty { get { return this.Count == 0; } } - - /// Returns true if the slice contains at least one byte, or false if it is null or empty - public bool IsPresent { get { return this.Count > 0; } } - - /// Return a byte array containing all the bytes of the slice, or null if the slice is null - /// Byte array with a copy of the slice, or null - [Pure, CanBeNull] - public byte[] GetBytes() - { - if (this.Count == 0) return this.Array == null ? null : Slice.EmptyArray; - SliceHelpers.EnsureSliceIsValid(ref this); - - var tmp = new byte[this.Count]; - SliceHelpers.CopyBytesUnsafe(tmp, 0, this.Array, this.Offset, this.Count); - return tmp; - } - - /// Return a byte array containing a subset of the bytes of the slice, or null if the slice is null - /// Byte array with a copy of a subset of the slice, or null - [Pure, CanBeNull] - public byte[] GetBytes(int offset, int count) - { - //TODO: throw if this.Array == null ? (what does "Slice.Nil.GetBytes(..., 0)" mean ?) - - if (offset < 0) throw new ArgumentOutOfRangeException("offset"); - if (count < 0 || offset + count > this.Count) throw new ArgumentOutOfRangeException("count"); - - if (count == 0) return this.Array == null ? null : Slice.EmptyArray; - SliceHelpers.EnsureSliceIsValid(ref this); - - var tmp = new byte[count]; - SliceHelpers.CopyBytesUnsafe(tmp, 0, this.Array, this.Offset + offset, count); - return tmp; - } - - /// Return a stream that wraps this slice - /// Stream that will read the slice from the start. - /// - /// You can use this method to convert text into specific encodings, load bitmaps (JPEG, PNG, ...), or any serialization format that requires a Stream or TextReader instance. - /// Disposing this stream will have no effect on the slice. - /// - [Pure, NotNull] - public SliceStream AsStream() - { - SliceHelpers.EnsureSliceIsValid(ref this); - return new SliceStream(this); - } - - /// Stringify a slice containing only ASCII chars - /// ASCII string, or null if the slice is null - [Pure, CanBeNull] - public string ToAscii() - { - if (this.Count == 0) return this.HasValue ? String.Empty : default(string); - SliceHelpers.EnsureSliceIsValid(ref this); - return Slice.DefaultEncoding.GetString(this.Array, this.Offset, this.Count); - } - - /// Stringify a slice containing an UTF-8 encoded string - /// Unicode string, or null if the slice is null - [Pure, CanBeNull] - public string ToUnicode() - { - if (this.Count == 0) return this.HasValue ? String.Empty : default(string); - SliceHelpers.EnsureSliceIsValid(ref this); - return Encoding.UTF8.GetString(this.Array, this.Offset, this.Count); - } - - /// Converts a slice using Base64 encoding - [Pure, CanBeNull] - public string ToBase64() - { - if (this.Count == 0) return this.Array == null ? null : String.Empty; - SliceHelpers.EnsureSliceIsValid(ref this); - return Convert.ToBase64String(this.Array, this.Offset, this.Count); - } - - /// Converts a slice into a string with each byte encoded into hexadecimal (lowercase) - /// "0123456789abcdef" - [Pure, CanBeNull] - public string ToHexaString() - { - if (this.Count == 0) return this.Array == null ? null : String.Empty; - var buffer = this.Array; - int p = this.Offset; - int n = this.Count; - var sb = new StringBuilder(n * 2); - while (n-- > 0) - { - byte b = buffer[p++]; - int x = b >> 4; - sb.Append((char)(x + (x < 10 ? 48 : 87))); - x = b & 0xF; - sb.Append((char)(x + (x < 10 ? 48 : 87))); - } - return sb.ToString(); - } - - /// Converts a slice into a string with each byte encoded into hexadecimal (uppercase) separated by a char - /// Character used to separate the hexadecimal pairs (ex: ' ') - /// "01 23 45 67 89 ab cd ef" - [Pure, CanBeNull] - public string ToHexaString(char sep) - { - if (this.Count == 0) return this.Array == null ? null : String.Empty; - var buffer = this.Array; - int p = this.Offset; - int n = this.Count; - var sb = new StringBuilder(n * (sep == '\0' ? 2 : 3)); - while (n-- > 0) - { - if (sep != '\0' && sb.Length > 0) sb.Append(sep); - byte b = buffer[p++]; - int x = b >> 4; - sb.Append((char)(x + (x < 10 ? 48 : 55))); - x = b & 0xF; - sb.Append((char)(x + (x < 10 ? 48 : 55))); - } - return sb.ToString(); - } - - [NotNull] - private static StringBuilder EscapeString(StringBuilder sb, [NotNull] byte[] buffer, int offset, int count, [NotNull] Encoding encoding) - { - if (sb == null) sb = new StringBuilder(count + 16); - foreach(var c in encoding.GetChars(buffer, offset, count)) - { - if ((c >= ' ' && c <= '~') || (c >= 880 && c <= 2047) || (c >= 12352 && c <= 12591)) - sb.Append(c); - else if (c == '\n') - sb.Append(@"\n"); - else if (c == '\r') - sb.Append(@"\r"); - else if (c == '\t') - sb.Append(@"\t"); - else if (c > 127) - sb.Append(@"\u").Append(((int)c).ToString("x4", CultureInfo.InvariantCulture)); - else // pas clean! - sb.Append(@"\x").Append(((int)c).ToString("x2", CultureInfo.InvariantCulture)); - } - return sb; - } - - /// Helper method that dumps the slice as a string (if it contains only printable ascii chars) or an hex array if it contains non printable chars. It should only be used for logging and troubleshooting ! - /// Returns either "'abc'", "<00 42 7F>", or "{ ...JSON... }". Returns "''" for Slice.Empty, and "" for - [Pure, NotNull] - public string ToAsciiOrHexaString() //REVIEW: rename this to ToPrintableString() ? - { - //REVIEW: rename this to ToFriendlyString() ? or ToLoggableString() ? - if (this.Count == 0) return this.Array != null ? "''" : String.Empty; - - var buffer = this.Array; - int n = this.Count; - int p = this.Offset; - - // look for UTF-8 BOM - if (n >= 3 && buffer[p] == 0xEF && buffer[p + 1] == 0xBB && buffer[p + 2] == 0xBF) - { // this is supposed to be an UTF-8 string - return EscapeString(new StringBuilder(n).Append('\''), buffer, p + 3, n - 3, Encoding.UTF8).Append('\'').ToString(); - } - - if (n >= 2) - { - // look for JSON objets or arrays - if ((buffer[p] == '{' && buffer[p + n - 1] == '}') || (buffer[p] == '[' && buffer[p + n - 1] == ']')) - { - return EscapeString(new StringBuilder(n + 16), buffer, p, n, Encoding.UTF8).ToString(); - } - } - - // do a first path on the slice to look for binary of possible text - bool mustEscape = false; - while (n-- > 0) - { - byte b = buffer[p++]; - if (b >= 32 && b < 127) continue; - - // we accept via escaping the following special chars: CR, LF, TAB - if (b == 10 || b == 13 || b == 9) - { - mustEscape = true; - continue; - } - - //TODO: are there any chars above 128 that could be accepted ? - - // this looks like binary - return "<" + ToHexaString(' ') + ">"; - } - - if (!mustEscape) - { // only printable chars found - return new StringBuilder(n + 2).Append('\'').Append(Encoding.ASCII.GetString(buffer, this.Offset, this.Count)).Append('\'').ToString(); - } - else - { // some escaping required - return EscapeString(new StringBuilder(n + 2).Append('\''), buffer, this.Offset, this.Count, Encoding.UTF8).Append('\'').ToString(); - } - } - - /// Converts a slice into a byte - /// Value of the first and only byte of the slice, or 0 if the slice is null or empty. - /// If the slice has more than one byte - [Pure] - public byte ToByte() - { - if (this.Count == 0) return 0; - if (this.Count > 1) throw new FormatException("Cannot convert slice into a Byte because it is larger than 1 byte"); - SliceHelpers.EnsureSliceIsValid(ref this); - return this.Array[this.Offset]; - } - - /// Converts a slice into a signed byte (-128..+127) - /// Value of the first and only byte of the slice, or 0 if the slice is null or empty. - /// If the slice has more than one byte - [Pure] - public sbyte ToSByte() - { - if (this.Count == 0) return 0; - if (this.Count > 1) throw new FormatException("Cannot convert slice into an SByte because it is larger than 1 byte"); - SliceHelpers.EnsureSliceIsValid(ref this); - return (sbyte)this.Array[this.Offset]; - } - - /// Converts a slice into a boolean. - /// False if the slice is empty, or is equal to the byte 0; otherwise, true. - [Pure] - public bool ToBool() - { - SliceHelpers.EnsureSliceIsValid(ref this); - // Anything appart from nil/empty, or the byte 0 itself is considered truthy. - return this.Count > 1 || (this.Count == 1 && this.Array[this.Offset] != 0); - //TODO: consider checking if the slice consist of only zeroes ? (ex: Slice.FromFixed32(0) could be considered falsy ...) - } - - #region 16 bits... - - /// Converts a slice into a little-endian encoded, signed 16-bit integer. - /// 0 of the slice is null or empty, a signed integer, or an error if the slice has more than 2 bytes - /// If there are more than 2 bytes in the slice - [Pure] - public short ToInt16() - { - SliceHelpers.EnsureSliceIsValid(ref this); - switch (this.Count) - { - case 0: return 0; - case 1: return this.Array[this.Offset]; - case 2: return (short) (this.Array[this.Offset] | (this.Array[this.Offset + 1] << 8)); - default: throw new FormatException("Cannot convert slice into an Int16 because it is larger than 2 bytes"); - } - } - - /// Converts a slice into a big-endian encoded, signed 16-bit integer. - /// 0 of the slice is null or empty, a signed integer, or an error if the slice has more than 2 bytes - /// If there are more than 2 bytes in the slice - [Pure] - public short ToInt16BE() - { - SliceHelpers.EnsureSliceIsValid(ref this); - switch (this.Count) - { - case 0: return 0; - case 1: return this.Array[this.Offset]; - case 2: return (short)(this.Array[this.Offset + 1] | (this.Array[this.Offset] << 8)); - default: throw new FormatException("Cannot convert slice into an Int16 because it is larger than 2 bytes"); - } - } - - /// Converts a slice into a little-endian encoded, unsigned 16-bit integer. - /// 0 of the slice is null or empty, an unsigned integer, or an error if the slice has more than 2 bytes - /// If there are more than 2 bytes in the slice - [Pure] - public ushort ToUInt16() - { - SliceHelpers.EnsureSliceIsValid(ref this); - switch (this.Count) - { - case 0: return 0; - case 1: return this.Array[this.Offset]; - case 2: return (ushort)(this.Array[this.Offset] | (this.Array[this.Offset + 1] << 8)); - default: throw new FormatException("Cannot convert slice into an UInt16 because it is larger than 2 bytes"); - } - } - - /// Converts a slice into a little-endian encoded, unsigned 16-bit integer. - /// 0 of the slice is null or empty, an unsigned integer, or an error if the slice has more than 2 bytes - /// If there are more than 2 bytes in the slice - [Pure] - public ushort ToUInt16BE() - { - SliceHelpers.EnsureSliceIsValid(ref this); - switch (this.Count) - { - case 0: return 0; - case 1: return this.Array[this.Offset]; - case 2: return (ushort)(this.Array[this.Offset + 1] | (this.Array[this.Offset] << 8)); - default: throw new FormatException("Cannot convert slice into an UInt16 because it is larger than 2 bytes"); - } - } - - /// Read a variable-length, little-endian encoded, unsigned integer from a specific location in the slice - /// Relative offset of the first byte - /// Number of bytes to read (up to 2) - /// Decoded unsigned short. - /// If is less than zero, or more than 2. - [Pure] - public ushort ReadUInt16(int offset, int bytes) - { - if (bytes < 0 || bytes > 2) throw new ArgumentOutOfRangeException("bytes"); - - var buffer = this.Array; - int p = UnsafeMapToOffset(offset); - switch(bytes) - { - case 0: return 0; - case 1: return buffer[p]; - default: return (ushort)(buffer[p] | (buffer[p + 1] << 8)); - } - } - - /// Read a variable-length, big-endian encoded, unsigned integer from a specific location in the slice - /// Relative offset of the first byte - /// Number of bytes to read (up to 2) - /// Decoded unsigned short. - /// If is less than zero, or more than 2. - [Pure] - public ushort ReadUInt16BE(int offset, int bytes) - { - if (bytes < 0 || bytes > 2) throw new ArgumentOutOfRangeException("bytes"); - - var buffer = this.Array; - int p = UnsafeMapToOffset(offset); - switch (bytes) - { - case 0: return 0; - case 1: return buffer[p]; - default: return (ushort)(buffer[p + 1] | (buffer[p] << 8)); - } - } - - #endregion - - #region 32 bits... - - /// Converts a slice into a little-endian encoded, signed 32-bit integer. - /// 0 of the slice is null or empty, a signed integer, or an error if the slice has more than 4 bytes - /// If there are more than 4 bytes in the slice - [Pure] - public int ToInt32() - { - if (this.Count == 0) return 0; - if (this.Count > 4) throw new FormatException("Cannot convert slice into an Int32 because it is larger than 4 bytes"); - SliceHelpers.EnsureSliceIsValid(ref this); - - var buffer = this.Array; - int n = this.Count; - int p = this.Offset + n - 1; - - int value = buffer[p--]; - while (--n > 0) - { - value = (value << 8) | buffer[p--]; - } - return value; - } - - /// Converts a slice into a big-endian encoded, signed 32-bit integer. - /// 0 of the slice is null or empty, a signed integer, or an error if the slice has more than 4 bytes - /// If there are more than 4 bytes in the slice - [Pure] - public int ToInt32BE() - { - SliceHelpers.EnsureSliceIsValid(ref this); - - int n = this.Count; - if (n == 0) return 0; - if (n > 4) throw new FormatException("Cannot convert slice into an Int32 because it is larger than 4 bytes"); - - var buffer = this.Array; - int p = this.Offset; - - int value = buffer[p++]; - while (--n > 0) - { - value = (value << 8) | buffer[p++]; - } - return value; - } - - /// Converts a slice into a little-endian encoded, unsigned 32-bit integer. - /// 0 of the slice is null or empty, an unsigned integer, or an error if the slice has more than 4 bytes - /// If there are more than 4 bytes in the slice - [Pure] - public uint ToUInt32() - { - SliceHelpers.EnsureSliceIsValid(ref this); - - int n = this.Count; - if (n == 0) return 0; - if (n > 4) throw new FormatException("Cannot convert slice into an UInt32 because it is larger than 4 bytes"); - - var buffer = this.Array; - int p = this.Offset + n - 1; - - uint value = buffer[p--]; - while (--n > 0) - { - value = (value << 8) | buffer[p--]; - } - return value; - } - - /// Converts a slice into a big-endian encoded, unsigned 32-bit integer. - /// 0 of the slice is null or empty, an unsigned integer, or an error if the slice has more than 4 bytes - /// If there are more than 4 bytes in the slice - [Pure] - public uint ToUInt32BE() - { - SliceHelpers.EnsureSliceIsValid(ref this); - - int n = this.Count; - if (n == 0) return 0; - if (n > 4) throw new FormatException("Cannot convert slice into an UInt32 because it is larger than 4 bytes"); - - var buffer = this.Array; - int p = this.Offset; - - uint value = buffer[p++]; - while (--n > 0) - { - value = (value << 8) | buffer[p++]; - } - return value; - } - - /// Read a variable-length, little-endian encoded, unsigned integer from a specific location in the slice - /// Relative offset of the first byte - /// Number of bytes to read (up to 4) - /// Decoded unsigned integer. - /// If is less than zero, or more than 4. - [Pure] - public uint ReadUInt32(int offset, int bytes) - { - if (bytes < 0 || bytes > 4) throw new ArgumentOutOfRangeException("bytes"); - if (bytes == 0) return 0; - - var buffer = this.Array; - int p = UnsafeMapToOffset(offset) + bytes - 1; - - uint value = buffer[p--]; - while (--bytes > 0) - { - value = (value << 8) | buffer[p--]; - } - return value; - } - - /// Read a variable-length, big-endian encoded, unsigned integer from a specific location in the slice - /// Relative offset of the first byte - /// Number of bytes to read (up to 4) - /// Decoded unsigned integer. - /// If is less than zero, or more than 4. - [Pure] - public uint ReadUInt32BE(int offset, int bytes) - { - if (bytes < 0 || bytes > 4) throw new ArgumentOutOfRangeException("bytes"); - if (bytes == 0) return 0; - - var buffer = this.Array; - int p = UnsafeMapToOffset(offset); - - uint value = buffer[p++]; - while (--bytes > 0) - { - value = (value << 8) | buffer[p++]; - } - return value; - } - - #endregion - - #region 64 bits... - - /// Converts a slice into a little-endian encoded, signed 64-bit integer. - /// 0 of the slice is null or empty, a signed integer, or an error if the slice has more than 8 bytes - /// If there are more than 8 bytes in the slice - [Pure] - public long ToInt64() - { - if (this.Count == 0) return 0L; - if (this.Count > 8) throw new FormatException("Cannot convert slice into an Int64 because it is larger than 8 bytes"); - SliceHelpers.EnsureSliceIsValid(ref this); - - var buffer = this.Array; - int n = this.Count; - int p = this.Offset + n - 1; - - long value = buffer[p--]; - while (--n > 0) - { - value = (value << 8) | buffer[p--]; - } - - return value; - } - - /// Converts a slice into a big-endian encoded, signed 64-bit integer. - /// 0 of the slice is null or empty, a signed integer, or an error if the slice has more than 8 bytes - /// If there are more than 8 bytes in the slice - [Pure] - public long ToInt64BE() - { - if (this.Count == 0) return 0L; - if (this.Count > 8) throw new FormatException("Cannot convert slice into an Int64 because it is larger than 8 bytes"); - SliceHelpers.EnsureSliceIsValid(ref this); - - var buffer = this.Array; - int n = this.Count; - int p = this.Offset; - - long value = buffer[p++]; - while (--n > 0) - { - value = (value << 8) | buffer[p++]; - } - return value; - } - - /// Converts a slice into a little-endian encoded, unsigned 64-bit integer. - /// 0 of the slice is null or empty, an unsigned integer, or an error if the slice has more than 8 bytes - /// If there are more than 8 bytes in the slice - [Pure] - public ulong ToUInt64() - { - if (this.Count == 0) return 0L; - if (this.Count > 8) throw new FormatException("Cannot convert slice into an UInt64 because it is larger than 8 bytes"); - SliceHelpers.EnsureSliceIsValid(ref this); - - var buffer = this.Array; - int n = this.Count; - int p = this.Offset + n - 1; - - ulong value = buffer[p--]; - while (--n > 0) - { - value = (value << 8) | buffer[p--]; - } - return value; - } - - /// Converts a slice into a little-endian encoded, unsigned 64-bit integer. - /// 0 of the slice is null or empty, an unsigned integer, or an error if the slice has more than 8 bytes - /// If there are more than 8 bytes in the slice - [Pure] - public ulong ToUInt64BE() - { - if (this.Count == 0) return 0L; - if (this.Count > 8) throw new FormatException("Cannot convert slice into an UInt64 because it is larger than 8 bytes"); - SliceHelpers.EnsureSliceIsValid(ref this); - - var buffer = this.Array; - int n = this.Count; - int p = this.Offset; - - ulong value = buffer[p++]; - while (--n > 0) - { - value = (value << 8) | buffer[p++]; - } - return value; - } - - /// Read a variable-length, little-endian encoded, unsigned integer from a specific location in the slice - /// Relative offset of the first byte - /// Number of bytes to read (up to 8) - /// Decoded unsigned integer. - /// If is less than zero, or more than 8. - [Pure] - public ulong ReadUInt64(int offset, int bytes) - { - if (bytes < 0 || bytes > 8) throw new ArgumentOutOfRangeException("bytes"); - if (bytes == 0) return 0UL; - - var buffer = this.Array; - int p = UnsafeMapToOffset(offset) + bytes - 1; - - ulong value = buffer[p--]; - while (--bytes > 0) - { - value = (value << 8) | buffer[p--]; - } - return value; - } - - /// Read a variable-length, big-endian encoded, unsigned integer from a specific location in the slice - /// Relative offset of the first byte - /// Number of bytes to read (up to 8) - /// Decoded unsigned integer. - /// If is less than zero, or more than 8. - [Pure] - public ulong ReadUInt64BE(int offset, int bytes) - { - if (bytes < 0 || bytes > 8) throw new ArgumentOutOfRangeException("bytes"); - if (bytes == 0) return 0UL; - - var buffer = this.Array; - int p = UnsafeMapToOffset(offset); - - ulong value = buffer[p++]; - while (--bytes > 0) - { - value = (value << 8) | buffer[p++]; - } - return value; - } - - /// Converts a slice into a 64-bit UUID. - /// Uuid decoded from the Slice. - /// The slice can either be an 8-byte array, or an ASCII string of 16, 17 or 19 chars - [Pure] - public Uuid64 ToUuid64() - { - if (this.Count == 0) return default(Uuid64); - SliceHelpers.EnsureSliceIsValid(ref this); - - switch (this.Count) - { - case 8: - { // binary (8 bytes) - return new Uuid64(this); - } - - case 16: // hex16 - case 17: // hex8-hex8 - case 19: // {hex8-hex8} - { - return Uuid64.Parse(this.ToAscii()); - } - } - - throw new FormatException("Cannot convert slice into an Uuid64 because it has an incorrect size"); - } - - #endregion - - #region 128 bits... - - /// Converts a slice into a Guid. - /// Native Guid decoded from the Slice. - /// The slice can either be a 16-byte RFC4122 GUID, or an ASCII string of 36 chars - [Pure] - public Guid ToGuid() - { - if (this.Count == 0) return default(Guid); - SliceHelpers.EnsureSliceIsValid(ref this); - - if (this.Count == 16) - { // direct byte array - - // UUID are stored using the RFC4122 format (Big Endian), while .NET's System.GUID use Little Endian - // we need to swap the byte order of the Data1, Data2 and Data3 chunks, to ensure that Guid.ToString() will return the proper value. - - return new Uuid128(this).ToGuid(); - } - - if (this.Count == 36) - { // string representation (ex: "da846709-616d-4e82-bf55-d1d3e9cde9b1") - return Guid.Parse(this.ToAscii()); - } - - throw new FormatException("Cannot convert slice into a Guid because it has an incorrect size"); - } - - /// Converts a slice into a 128-bit UUID. - /// Uuid decoded from the Slice. - /// The slice can either be a 16-byte RFC4122 GUID, or an ASCII string of 36 chars - [Pure] - public Uuid128 ToUuid128() - { - if (this.Count == 0) return default(Uuid128); - SliceHelpers.EnsureSliceIsValid(ref this); - - if (this.Count == 16) - { - return new Uuid128(this); - } - - if (this.Count == 36) - { - return Uuid128.Parse(this.ToAscii()); - } - - throw new FormatException("Cannot convert slice into an Uuid128 because it has an incorrect size"); - } - - #endregion - - /// Returns a new slice that contains an isolated copy of the buffer - /// Slice that is equivalent, but is isolated from any changes to the buffer - [Pure] - public Slice Memoize() - { - if (this.Count == 0) return this.Array == null ? Slice.Nil : Slice.Empty; - return new Slice(GetBytes(), 0, this.Count); - } - - /// Map an offset in the slice into the absolute offset in the buffer, without any bound checking - /// Relative offset (negative values mean from the end) - /// Absolute offset in the buffer - private int UnsafeMapToOffset(int index) - { - int p = NormalizeIndex(index); - Contract.Requires(p >= 0 & p < this.Count, "Index is outside the slice buffer"); - return this.Offset + p; - } - - /// Map an offset in the slice into the absolute offset in the buffer - /// Relative offset (negative values mean from the end) - /// Absolute offset in the buffer - /// If the index is outside the slice - private int MapToOffset(int index) - { - int p = NormalizeIndex(index); - if (p < 0 || p >= this.Count) FailIndexOutOfBound(index); - checked { return this.Offset + p; } - } - - /// Normalize negative index values into offset from the start - /// Relative offset (negative values mean from the end) - /// Relative offset from the start of the slice - private int NormalizeIndex(int index) - { - checked { return index < 0 ? index + this.Count : index; } - } - - /// Returns the value of one byte in the slice - /// Offset of the byte (negative values means start from the end) - public byte this[int index] - { - get { return this.Array[MapToOffset(index)]; } - } - - /// Returns a substring of the current slice that fits withing the specified index range - /// The starting position of the substring. Positive values means from the start, negative values means from the end - /// The end position (excluded) of the substring. Positive values means from the start, negative values means from the end - /// Subslice - public Slice this[int start, int end] - { - get - { - start = NormalizeIndex(start); - end = NormalizeIndex(end); - - // bound check - if (start < 0) start = 0; - if (end > this.Count) end = this.Count; - - if (start >= end) return Slice.Empty; - if (start == 0 && end == this.Count) return this; - - checked { return new Slice(this.Array, this.Offset + start, end - start); } - } - } - - [ContractAnnotation("=> halt")] - private static void FailIndexOutOfBound(int index) - { - throw new IndexOutOfRangeException("Index is outside the slice"); - } - - /// Copy this slice into another buffer, and move the cursor - /// Buffer where to copy this slice - /// Offset into the destination buffer - public void WriteTo(byte[] buffer, ref int cursor) - { - SliceHelpers.EnsureBufferIsValid(buffer, cursor, this.Count); - SliceHelpers.EnsureSliceIsValid(ref this); - - if (this.Count > 0) - { - SliceHelpers.CopyBytes(buffer, cursor, this.Array, this.Offset, this.Count); - cursor += this.Count; - } - } - - /// Copy this slice into another buffer - /// Buffer where to copy this slice - /// Offset into the destination buffer - public void CopyTo(byte[] buffer, int offset) - { - SliceHelpers.EnsureBufferIsValid(buffer, offset, this.Count); - SliceHelpers.EnsureSliceIsValid(ref this); - - SliceHelpers.CopyBytesUnsafe(buffer, offset, this.Array, this.Offset, this.Count); - } - - /// Retrieves a substring from this instance. The substring starts at a specified character position. - /// The starting position of the substring. Positive values mmeans from the start, negative values means from the end - /// A slice that is equivalent to the substring that begins at (from the start or the end depending on the sign) in this instance, or Slice.Empty if is equal to the length of the slice. - /// The substring does not copy the original data, and refers to the same buffer as the original slice. Any change to the parent slice's buffer will be seen by the substring. You must call Memoize() on the resulting substring if you want a copy - /// {"ABCDE"}.Substring(0) => {"ABC"} - /// {"ABCDE"}.Substring(1} => {"BCDE"} - /// {"ABCDE"}.Substring(-2} => {"DE"} - /// {"ABCDE"}.Substring(5} => Slice.Empty - /// Slice.Empty.Substring(0) => Slice.Empty - /// Slice.Nil.Substring(0) => Slice.Emtpy - /// - /// indicates a position not within this instance, or is less than zero - [Pure] - public Slice Substring(int offset) - { - if (offset == 0) return this; - - // negative values means from the end - if (offset < 0) offset = this.Count + offset; - - if (offset < 0) throw new ArgumentOutOfRangeException("offset", "Offset cannot be less then start of the slice"); - if (offset > this.Count) throw new ArgumentOutOfRangeException("offset", "Offset cannot be larger than end of slice"); - - return this.Count == offset ? Slice.Empty : new Slice(this.Array, this.Offset + offset, this.Count - offset); - } - - /// Retrieves a substring from this instance. The substring starts at a specified character position and has a specified length. - /// The starting position of the substring. Positive values means from the start, negative values means from the end - /// Number of bytes in the substring - /// A slice that is equivalent to the substring of length that begins at (from the start or the end depending on the sign) in this instance, or Slice.Empty if count is zero. - /// The substring does not copy the original data, and refers to the same buffer as the original slice. Any change to the parent slice's buffer will be seen by the substring. You must call Memoize() on the resulting substring if you want a copy - /// {"ABCDE"}.Substring(0, 3) => {"ABC"} - /// {"ABCDE"}.Substring(1, 3} => {"BCD"} - /// {"ABCDE"}.Substring(-2, 2} => {"DE"} - /// Slice.Empty.Substring(0, 0) => Slice.Empty - /// Slice.Nil.Substring(0, 0) => Slice.Emtpy - /// - /// plus indicates a position not within this instance, or or is less than zero - [Pure] - public Slice Substring(int offset, int count) - { - if (count == 0) return Slice.Empty; - - // negative values means from the end - if (offset < 0) offset = this.Count + offset; - - if (offset < 0 || offset >= this.Count) throw new ArgumentOutOfRangeException("offset", "Offset must be inside the slice"); - if (count < 0) throw new ArgumentOutOfRangeException("count", "Count must be a positive integer"); - if (offset > this.Count - count) throw new ArgumentOutOfRangeException("count", "Offset and count must refer to a location within the slice"); - - return new Slice(this.Array, this.Offset + offset, count); - } - - /// Returns a slice array that contains the sub-slices in this instance that are delimited by the specified separator - /// The slice that delimits the sub-slices in this instance. - /// to omit empty array elements from the array returned; or to include empty array elements in the array returned. - /// An array whose elements contains the sub-slices in this instance that are delimited by the value of . - [Pure] - public Slice[] Split(Slice separator, StringSplitOptions options = StringSplitOptions.None) - { - return Split(this, separator, options); - } - - /// Reports the zero-based index of the first occurence of the specified slice in this instance. - /// The slice to seek - /// The zero-based index of if that slice is found, or -1 if it is not. If is , then the return value is -1. - [Pure] - public int IndexOf(Slice value) - { - return Find(this, value); - } - - /// Reports the zero-based index of the first occurence of the specified slice in this instance. The search starts at a specified position. - /// The slice to seek - /// The search starting position - /// The zero-based index of if that slice is found, or -1 if it is not. If is , then the return value is startIndex - [Pure] - public int IndexOf(Slice value, int startIndex) - { - //REVIEW: support negative indexing ? - if (startIndex < 0 || startIndex > this.Count) throw new ArgumentOutOfRangeException("startIndex", startIndex, "Start index must be inside the buffer"); - if (this.Count == 0) - { - return value.Count == 0 ? startIndex : -1; - } - var tmp = startIndex == 0 ? this : new Slice(this.Array, this.Offset + startIndex, this.Count - startIndex); - return Find(tmp, value); - } - - /// Determines whether the beginning of this slice instance matches a specified slice. - /// The slice to compare - /// true if matches the beginning of this slice; otherwise, false - [Pure] - public bool StartsWith(Slice value) - { - if (!value.HasValue) throw new ArgumentNullException("value"); - - // any strings starts with the empty string - if (value.Count == 0) return true; - - // prefix cannot be bigger - if (value.Count > this.Count) return false; - - return SliceHelpers.SameBytes(this.Array, this.Offset, value.Array, value.Offset, value.Count); - } - - /// Determines whether the end of this slice instance matches a specified slice. - /// The slice to compare to the substring at the end of this instance. - /// true if matches the end of this slice; otherwise, false - [Pure] - public bool EndsWith(Slice value) - { - if (!value.HasValue) throw new ArgumentNullException("value"); - - // any strings ends with the empty string - if (value.Count == 0) return true; - - // suffix cannot be bigger - if (value.Count > this.Count) return false; - - return SliceHelpers.SameBytes(this.Array, this.Offset + this.Count - value.Count, value.Array, value.Offset, value.Count); - } - - /// Equivalent of StartsWith, but the returns false if both slices are identical - [Pure] - public bool PrefixedBy(Slice parent) - { - // empty is a parent of everyone - if (parent.Count == 0) return true; - - // we must have at least one more byte then the parent - if (this.Count <= parent.Count) return false; - - // must start with the same bytes - return SliceHelpers.SameBytes(parent.Array, parent.Offset, this.Array, this.Offset, parent.Count); - } - - /// Equivalent of EndsWith, but the returns false if both slices are identical - public bool SuffixedBy(Slice parent) - { - // empty is a parent of everyone - if (parent.IsNullOrEmpty) return true; - // empty is not a child of anything - if (this.IsNullOrEmpty) return false; - - // we must have at least one more byte then the parent - if (this.Count <= parent.Count) return false; - - // must start with the same bytes - return SliceHelpers.SameBytes(parent.Array, parent.Offset + this.Count - parent.Count, this.Array, this.Offset, parent.Count); - } - - /// Append/Merge a slice at the end of the current slice - /// Slice that must be appended - /// Merged slice if both slices are contigous, or a new slice containg the content of the current slice, followed by the tail slice. Or Slice.Empty if both parts are nil or empty - [Pure] - public Slice Concat(Slice tail) - { - if (tail.Count == 0) return this.Count > 0 ? this: Slice.Empty; - if (this.Count == 0) return tail; - - SliceHelpers.EnsureSliceIsValid(ref tail); - SliceHelpers.EnsureSliceIsValid(ref this); - - // special case: adjacent segments ? - if (object.ReferenceEquals(this.Array, tail.Array) && this.Offset + this.Count == tail.Offset) - { - return new Slice(this.Array, this.Offset, this.Count + tail.Count); - } - - byte[] tmp = new byte[this.Count + tail.Count]; - SliceHelpers.CopyBytesUnsafe(tmp, 0, this.Array, this.Offset, this.Count); - SliceHelpers.CopyBytesUnsafe(tmp, this.Count, tail.Array, tail.Offset, tail.Count); - return new Slice(tmp, 0, tmp.Length); - } - - /// Append an array of slice at the end of the current slice, all sharing the same buffer - /// Slices that must be appended - /// Array of slices (for all keys) that share the same underlying buffer - [Pure, NotNull] - public Slice[] ConcatRange([NotNull] Slice[] slices) - { - if (slices == null) throw new ArgumentNullException("slices"); - SliceHelpers.EnsureSliceIsValid(ref this); - - // pre-allocate by computing final buffer capacity - var prefixSize = this.Count; - var capacity = slices.Sum((slice) => prefixSize + slice.Count); - var writer = new SliceWriter(capacity); - var next = new List(slices.Length); - - //TODO: use multiple buffers if item count is huge ? - - foreach (var slice in slices) - { - writer.WriteBytes(this); - writer.WriteBytes(slice); - next.Add(writer.Position); - } - - return FdbKey.SplitIntoSegments(writer.Buffer, 0, next); - } - - /// Append a sequence of slice at the end of the current slice, all sharing the same buffer - /// Slices that must be appended - /// Array of slices (for all keys) that share the same underlying buffer - [Pure, NotNull] - public Slice[] ConcatRange([NotNull] IEnumerable slices) - { - if (slices == null) throw new ArgumentNullException("slices"); - - // use optimized version for arrays - var array = slices as Slice[]; - if (array != null) return ConcatRange(array); - - var next = new List(); - var writer = SliceWriter.Empty; - - //TODO: use multiple buffers if item count is huge ? - - foreach (var slice in slices) - { - writer.WriteBytes(this); - writer.WriteBytes(slice); - next.Add(writer.Position); - } - - return FdbKey.SplitIntoSegments(writer.Buffer, 0, next); - - } - - /// Concatenate two slices together - public static Slice Concat(Slice a, Slice b) - { - return a.Concat(b); - } - - /// Concatenate three slices together - public static Slice Concat(Slice a, Slice b, Slice c) - { - int count = a.Count + b.Count + c.Count; - if (count == 0) return Slice.Empty; - var writer = new SliceWriter(count); - writer.WriteBytes(a); - writer.WriteBytes(b); - writer.WriteBytes(c); - return writer.ToSlice(); - } - - /// Concatenate an array of slices into a single slice - public static Slice Concat(params Slice[] args) - { - int count = 0; - for (int i = 0; i < args.Length; i++) count += args[i].Count; - if (count == 0) return Slice.Empty; - var writer = new SliceWriter(count); - for (int i = 0; i < args.Length; i++) writer.WriteBytes(args[i]); - return writer.ToSlice(); - } - - /// Adds a prefix to a list of slices - /// Prefix to add to all the slices - /// List of slices to process - /// Array of slice that all start with and followed by the corresponding entry in - /// This method is optmized to reduce the amount of memory allocated - public static Slice[] ConcatRange(Slice prefix, IEnumerable slices) - { - if (slices == null) throw new ArgumentNullException("slices"); - - if (prefix.IsNullOrEmpty) - { // nothing to do, but we still need to copy the array - return slices.ToArray(); - } - - Slice[] res; - Slice[] arr; - ICollection coll; - - if ((arr = slices as Slice[]) != null) - { // fast-path for arrays (most frequent with range reads) - - // we wil use a SliceBuffer to store all the keys produced in as few byte[] arrays as needed - - // precompute the exact size needed - int totalSize = prefix.Count * arr.Length; - for (int i = 0; i < arr.Length; i++) totalSize += arr[i].Count; - var buf = new SliceBuffer(Math.Min(totalSize, 64 * 1024)); - - res = new Slice[arr.Length]; - for (int i = 0; i < arr.Length; i++) - { - res[i] = buf.Intern(prefix, arr[i], aligned: false); - } - } - else if ((coll = slices as ICollection) != null) - { // collection (size known) - - //TODO: also use a SliceBuffer since we could precompute the total size... - - res = new Slice[coll.Count]; - int p = 0; - foreach (var suffix in coll) - { - res[p++] = prefix.Concat(suffix); - } - } - else - { // streaming sequence (size unknown) - - //note: we can only scan the list once, so would be no way to get a sensible value for the buffer's page size - var list = new List(); - foreach (var suffix in slices) - { - list.Add(prefix.Concat(suffix)); - } - res = list.ToArray(); - } - - return res; - } - - /// Implicitly converts a Slice into an ArraySegment<byte> - public static implicit operator ArraySegment(Slice value) - { - if (!value.HasValue) return default(ArraySegment); - return new ArraySegment(value.Array, value.Offset, value.Count); - } - - /// Implicitly converts an ArraySegment<byte> into a Slice - public static implicit operator Slice(ArraySegment value) - { - return new Slice(value.Array, value.Offset, value.Count); - } - - #region Slice arithmetics... - - /// Compare two slices for equality - /// True if the slices contains the same bytes - public static bool operator ==(Slice a, Slice b) - { - return a.Equals(b); - } - - /// Compare two slices for inequality - /// True if the slice do not contain the same bytes - public static bool operator !=(Slice a, Slice b) - { - return !a.Equals(b); - } - - /// Compare two slices - /// True if is lexicographically less than ; otherwise, false. - public static bool operator <(Slice a, Slice b) - { - return a.CompareTo(b) < 0; - } - - /// Compare two slices - /// True if is lexicographically less than or equal to ; otherwise, false. - public static bool operator <=(Slice a, Slice b) - { - return a.CompareTo(b) <= 0; - } - - /// Compare two slices - /// True if is lexicographically greater than ; otherwise, false. - public static bool operator >(Slice a, Slice b) - { - return a.CompareTo(b) > 0; - } - - /// Compare two slices - /// True if is lexicographically greater than or equal to ; otherwise, false. - public static bool operator >=(Slice a, Slice b) - { - return a.CompareTo(b) >= 0; - } - - /// Append/Merge two slices together - /// First slice - /// Second slice - /// Merged slices if both slices are contigous, or a new slice containg the content of the first slice, followed by the second - public static Slice operator +(Slice a, Slice b) - { - return a.Concat(b); - } - - /// Appends a byte at the end of the slice - /// First slice - /// Byte to append at the end - /// New slice with the byte appended - public static Slice operator +(Slice a, byte b) - { - if (a.Count == 0) return Slice.FromByte(b); - var tmp = new byte[a.Count + 1]; - SliceHelpers.CopyBytesUnsafe(tmp, 0, a.Array, a.Offset, a.Count); - tmp[a.Count] = b; - return new Slice(tmp, 0, tmp.Length); - } - - /// Remove bytes at the end of slice - /// Smaller slice - public static Slice operator -(Slice s, int n) - { - if (n < 0) throw new ArgumentOutOfRangeException("n", "Cannot subtract a negative number from a slice"); - if (n > s.Count) throw new ArgumentOutOfRangeException("n", "Cannout substract more bytes than the slice contains"); - - if (n == 0) return s; - if (n == s.Count) return Slice.Empty; - - return new Slice(s.Array, s.Offset, s.Count - n); - } - - // note: We also need overloads with Nullable's to be able to do things like "if (slice == null)", "if (slice != null)" or "if (null != slice)". - // For structs that have "==" / "!=" operators, the compiler will think that when you write "slice == null", you really mean "(Slice?)slice == default(Slice?)", and that would ALWAYS false if you don't have specialized overloads to intercept. - - /// Determines whether two specified instances of are equal - public static bool operator ==(Slice? a, Slice? b) - { - return a.GetValueOrDefault().Equals(b.GetValueOrDefault()); - } - - /// Determines whether two specified instances of are not equal - public static bool operator !=(Slice? a, Slice? b) - { - return !a.GetValueOrDefault().Equals(b.GetValueOrDefault()); - } - - /// Determines whether one specified is less than another specified . - public static bool operator <(Slice? a, Slice? b) - { - return a.GetValueOrDefault() < b.GetValueOrDefault(); - } - - /// Determines whether one specified is less than or equal to another specified . - public static bool operator <=(Slice? a, Slice? b) - { - return a.GetValueOrDefault() <= b.GetValueOrDefault(); - } - - /// Determines whether one specified is greater than another specified . - public static bool operator >(Slice? a, Slice? b) - { - return a.GetValueOrDefault() > b.GetValueOrDefault(); - } - - /// Determines whether one specified is greater than or equal to another specified . - public static bool operator >=(Slice? a, Slice? b) - { - return a.GetValueOrDefault() >= b.GetValueOrDefault(); - } - - /// Concatenates two together. - public static Slice operator +(Slice? a, Slice? b) - { - // note: makes "slice + null" work! - return a.GetValueOrDefault().Concat(b.GetValueOrDefault()); - } - - #endregion - - /// Returns a printable representation of the key - /// You can roundtrip the result of calling slice.ToString() by passing it to (string) and get back the original slice. - public override string ToString() - { - return Slice.Dump(this); - } - - /// Returns a printable representation of a key - /// This may not be efficient, so it should only be use for testing/logging/troubleshooting - [NotNull] - public static string Dump(Slice value) - { - const int MAX_SIZE = 1024; - - if (value.Count == 0) return value.HasValue ? "" : ""; - - SliceHelpers.EnsureSliceIsValid(ref value); - - var buffer = value.Array; - int count = Math.Min(value.Count, MAX_SIZE); - int pos = value.Offset; - - var sb = new StringBuilder(count + 16); - while (count-- > 0) - { - int c = buffer[pos++]; - if (c < 32 || c >= 127 || c == 60) - { - sb.Append('<'); - int x = c >> 4; - sb.Append((char)(x + (x < 10 ? 48 : 55))); - x = c & 0xF; - sb.Append((char)(x + (x < 10 ? 48 : 55))); - sb.Append('>'); - } - else - { - sb.Append((char)c); - } - } - if (value.Count > MAX_SIZE) sb.Append("[...]"); - return sb.ToString(); - } - - /// Decode the string that was generated by slice.ToString() or Slice.Dump(), back into the original slice - /// This may not be efficient, so it should only be use for testing/logging/troubleshooting - public static Slice Unescape(string value) - { - var writer = SliceWriter.Empty; - for (int i = 0; i < value.Length; i++) - { - char c = value[i]; - if (c == '<') - { - if (value[i + 3] != '>') throw new FormatException(String.Format("Invalid escape character at offset {0}", i)); - c = (char)(NibbleToDecimal(value[i + 1]) << 4 | NibbleToDecimal(value[i + 2])); - i += 3; - } - writer.WriteByte((byte)c); - } - return writer.ToSlice(); - } - - #region Streams... - - /// Read the content of a stream into a slice - /// Source stream, that must be in a readable state - /// Slice containing the stream content (or if the stream is ) - /// If is null. - /// If the size of the stream exceeds or if it does not support reading. - public static Slice FromStream([NotNull] Stream data) - { - if (data == null) throw new ArgumentNullException("data"); - - // special case for empty values - if (data == Stream.Null) return Slice.Nil; - if (!data.CanRead) throw new InvalidOperationException("Cannot read from provided stream"); - - if (data.Length == 0) return Slice.Empty; - if (data.Length > int.MaxValue) throw new InvalidOperationException("Streams of more than 2GB are not supported"); - //TODO: other checks? - - int length; - checked { length = (int)data.Length; } - - if (data is MemoryStream || data is UnmanagedMemoryStream) // other types of already completed streams ? - { // read synchronously - return LoadFromNonBlockingStream(data, length); - } - - // read asynchronoulsy - return LoadFromBlockingStream(data, length); - } - - /// Asynchronously read the content of a stream into a slice - /// Source stream, that must be in a readable state - /// Optional cancellation token for this operation - /// Slice containing the stream content (or if the stream is ) - /// If is null. - /// If the size of the stream exceeds or if it does not support reading. - public static Task FromStreamAsync([NotNull] Stream data, CancellationToken cancellationToken) - { - if (data == null) throw new ArgumentNullException("data"); - - // special case for empty values - if (data == Stream.Null) return Task.FromResult(Slice.Nil); - if (!data.CanRead) throw new InvalidOperationException("Cannot read from provided stream"); - - if (data.Length == 0) return Task.FromResult(Slice.Empty); - if (data.Length > int.MaxValue) throw new InvalidOperationException("Streams of more than 2GB are not supported"); - //TODO: other checks? - - if (cancellationToken.IsCancellationRequested) return TaskHelpers.FromCancellation(cancellationToken); - - int length; - checked { length = (int)data.Length; } - - if (data is MemoryStream || data is UnmanagedMemoryStream) // other types of already completed streams ? - { // read synchronously - return Task.FromResult(LoadFromNonBlockingStream(data, length)); - } - - // read asynchronoulsy - return LoadFromBlockingStreamAsync(data, length, 0, cancellationToken); - } - - /// Read from a non-blocking stream that already contains all the data in memory (MemoryStream, UnmanagedStream, ...) - /// Source stream - /// Number of bytes to read from the stream - /// Slice containing the loaded data - private static Slice LoadFromNonBlockingStream([NotNull] Stream source, int length) - { - Contract.Requires(source != null && source.CanRead && source.Length <= int.MaxValue); - - var ms = source as MemoryStream; - if (ms != null) - { // Already holds onto a byte[] - - //note: should be use GetBuffer() ? It can throws and is dangerous (could mutate) - return Slice.Create(ms.ToArray()); - } - - // read it in bulk, without buffering - - var buffer = new byte[length]; //TODO: round up to avoid fragmentation ? - - // note: reading should usually complete with only one big read, but loop until completed, just to be sure - int p = 0; - int r = length; - while (r > 0) - { - int n = source.Read(buffer, p, r); - if (n <= 0) throw new InvalidOperationException(String.Format("Unexpected end of stream at {0} / {1} bytes", p, length)); - p += n; - r -= n; - } - Contract.Assert(r == 0 && p == length); - - return Slice.Create(buffer); - } - - /// Synchronously read from a blocking stream (FileStream, NetworkStream, ...) - /// Source stream - /// Number of bytes to read from the stream - /// If non zero, max amount of bytes to read in one chunk. If zero, tries to read everything at once - /// Slice containing the loaded data - private static Slice LoadFromBlockingStream([NotNull] Stream source, int length, int chunkSize = 0) - { - Contract.Requires(source != null && source.CanRead && source.Length <= int.MaxValue && chunkSize >= 0); - - if (chunkSize == 0) chunkSize = int.MaxValue; - - var buffer = new byte[length]; //TODO: round up to avoid fragmentation ? - - // note: reading should usually complete with only one big read, but loop until completed, just to be sure - int p = 0; - int r = length; - while (r > 0) - { - int c = Math.Max(r, chunkSize); - int n = source.Read(buffer, p, c); - if (n <= 0) throw new InvalidOperationException(String.Format("Unexpected end of stream at {0} / {1} bytes", p, length)); - p += n; - r -= n; - } - Contract.Assert(r == 0 && p == length); - - return Slice.Create(buffer); - } - - /// Asynchronously read from a blocking stream (FileStream, NetworkStream, ...) - /// Source stream - /// Number of bytes to read from the stream - /// If non zero, max amount of bytes to read in one chunk. If zero, tries to read everything at once - /// Optional cancellation token for this operation - /// Slice containing the loaded data - private static async Task LoadFromBlockingStreamAsync([NotNull] Stream source, int length, int chunkSize, CancellationToken cancellationToken) - { - Contract.Requires(source != null && source.CanRead && source.Length <= int.MaxValue && chunkSize >= 0); - - if (chunkSize == 0) chunkSize = int.MaxValue; - - var buffer = new byte[length]; //TODO: round up to avoid fragmentation ? - - // note: reading should usually complete with only one big read, but loop until completed, just to be sure - int p = 0; - int r = length; - while (r > 0) - { - int c = Math.Min(r, chunkSize); - int n = await source.ReadAsync(buffer, p, c, cancellationToken); - if (n <= 0) throw new InvalidOperationException(String.Format("Unexpected end of stream at {0} / {1} bytes", p, length)); - p += n; - r -= n; - } - Contract.Assert(r == 0 && p == length); - - return Slice.Create(buffer); - } - - #endregion - - #region Equality, Comparison... - - /// Checks if an object is equal to the current slice - /// Object that can be either another slice, a byte array, or a byte array segment. - /// true if the object represents a sequence of bytes that has the same size and same content as the current slice. - public override bool Equals(object obj) - { - if (obj == null) return this.Array == null; - if (obj is Slice) return Equals((Slice)obj); - if (obj is ArraySegment) return Equals((ArraySegment)obj); - if (obj is byte[]) return Equals((byte[])obj); - return false; - } - - /// Gets the hash code for this slice - /// A 32-bit signed hash code calculated from all the bytes in the slice. - public override int GetHashCode() - { - SliceHelpers.EnsureSliceIsValid(ref this); - if (this.Array == null) return 0; - return SliceHelpers.ComputeHashCodeUnsafe(this.Array, this.Offset, this.Count); - } - - /// Checks if another slice is equal to the current slice. - /// Slice compared with the current instance - /// true if both slices have the same size and contain the same sequence of bytes; otherwise, false. - public bool Equals(Slice other) - { - SliceHelpers.EnsureSliceIsValid(ref other); - SliceHelpers.EnsureSliceIsValid(ref this); - - // note: Slice.Nil != Slice.Empty - if (this.Array == null) return other.Array == null; - if (other.Array == null) return false; - - return this.Count == other.Count && SliceHelpers.SameBytesUnsafe(this.Array, this.Offset, other.Array, other.Offset, this.Count); - } - - /// Lexicographically compare this slice with another one, and return an indication of their relative sort order - /// Slice to compare with this instance - /// Returns a NEGATIVE value if the current slice is LESS THAN , ZERO if it is EQUAL TO , and a POSITIVE value if it is GREATER THAN . - /// If both this instance and are Nil or Empty, the comparison will return ZERO. If only is Nil or Empty, it will return a NEGATIVE value. If only this instance is Nil or Empty, it will return a POSITIVE value. - public int CompareTo(Slice other) - { - if (this.Count == 0) return other.Count == 0 ? 0 : -1; - if (other.Count == 0) return +1; - SliceHelpers.EnsureSliceIsValid(ref other); - SliceHelpers.EnsureSliceIsValid(ref this); - return SliceHelpers.CompareBytesUnsafe(this.Array, this.Offset, this.Count, other.Array, other.Offset, other.Count); - } - - /// Checks if the content of a byte array segment matches the current slice. - /// Byte array segment compared with the current instance - /// true if both segment and slice have the same size and contain the same sequence of bytes; otherwise, false. - public bool Equals(ArraySegment other) - { - return this.Count == other.Count && SliceHelpers.SameBytes(this.Array, this.Offset, other.Array, other.Offset, this.Count); - } - - /// Checks if the content of a byte array matches the current slice. - /// Byte array compared with the current instance - /// true if the both array and slice have the same size and contain the same sequence of bytes; otherwise, false. - public bool Equals(byte[] other) - { - if (other == null) return this.Array == null; - return this.Count == other.Length && SliceHelpers.SameBytes(this.Array, this.Offset, other, 0, this.Count); - } - - #endregion - - [UsedImplicitly(ImplicitUseTargetFlags.WithMembers)] - private sealed class DebugView - { - private readonly Slice m_slice; - - public DebugView(Slice slice) - { - m_slice = slice; - } - - public byte[] Data - { - get - { - if (m_slice.Count == 0) return m_slice.Array == null ? null : EmptyArray; - if (m_slice.Offset == 0 && m_slice.Count == m_slice.Array.Length) return m_slice.Array; - var tmp = new byte[m_slice.Count]; - System.Array.Copy(m_slice.Array, m_slice.Offset, tmp, 0, m_slice.Count); - return tmp; - } - } - - public string Text - { - get - { - if (m_slice.Count == 0) return m_slice.Array == null ? null : String.Empty; - return m_slice.ToAsciiOrHexaString(); - } - } - - public int Count - { - get { return m_slice.Count; } - } - - } - - } - -} diff --git a/FoundationDB.Client/Utils/SliceComparer.cs b/FoundationDB.Client/Utils/SliceComparer.cs deleted file mode 100644 index 83d911b51..000000000 --- a/FoundationDB.Client/Utils/SliceComparer.cs +++ /dev/null @@ -1,79 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Client -{ - using System; - using System.Collections.Generic; - - /// Performs optimized equality and comparison checks on Slices - public sealed class SliceComparer : IComparer, IEqualityComparer - { - /// Default instance of the slice comparator - public static readonly SliceComparer Default = new SliceComparer(); - - private SliceComparer() - { } - - /// Lexicographically compare two slices and returns an indication of their relative sort order. - /// Slice compared with - /// Slice compared with - /// Returns a NEGATIVE value if is LESS THAN , ZERO if is EQUAL TO , and a POSITIVE value if is GREATER THAN . - /// - /// If both and are nil or empty, the comparison will return ZERO. If only is nil or empty, it will return a NEGATIVE value. If only is nil or empty, it will return a POSITIVE value. - /// There are no guarantees that non-zero results will be exactly -1 or +1. You should always use comparison operators or the sign of the returned value, instead of testing for equality with -1 or +1. - /// - public int Compare(Slice x, Slice y) - { - //REVIEW: cmp(Nil, Empty) returns 0 but Nil != Empty ? - if (x.Count == 0) return y.Count == 0 ? 0 : -1; - if (y.Count == 0) return +1; - return SliceHelpers.CompareBytes(x.Array, x.Offset, x.Count, y.Array, y.Offset, y.Count); - } - - /// Checks if two slices are equal. - /// Slice compared with - /// Slice compared with - /// true if and have the same size and contain the same sequence of bytes; otherwise, false. - public bool Equals(Slice x, Slice y) - { - return x.Count == y.Count && SliceHelpers.SameBytes(x.Array, x.Offset, y.Array, y.Offset, y.Count); - } - - /// Computes the hash code of a slice - /// A slice - /// A 32-bit signed hash coded calculated from all the bytes in the slice - public int GetHashCode(Slice obj) - { - if (obj.Array == null) return 0; - return SliceHelpers.ComputeHashCode(obj.Array, obj.Offset, obj.Count); - } - - } - -} diff --git a/FoundationDB.Client/Utils/SliceHelpers.cs b/FoundationDB.Client/Utils/SliceHelpers.cs deleted file mode 100644 index c4d93a7fb..000000000 --- a/FoundationDB.Client/Utils/SliceHelpers.cs +++ /dev/null @@ -1,734 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -#define USE_NATIVE_MEMORY_OPERATORS - -namespace FoundationDB.Client -{ - using FoundationDB.Client.Utils; - using JetBrains.Annotations; - using System; - using System.Runtime.CompilerServices; - using System.Runtime.ConstrainedExecution; - using System.Runtime.InteropServices; - using System.Security; - - internal static class SliceHelpers - { - - public static void EnsureSliceIsValid(ref Slice slice) - { - if (slice.Count == 0 && slice.Offset >= 0) return; - if (slice.Count < 0 || slice.Offset < 0 || slice.Array == null || slice.Offset + slice.Count > slice.Array.Length) - { - ThrowMalformedSlice(slice); - } - } - - /// Reject an invalid slice by throw an error with the appropriate diagnostic message. - /// Slice that is being naugthy - [ContractAnnotation("=> halt")] - public static void ThrowMalformedSlice(Slice slice) - { -#if DEBUG - // If you break here, that means that a slice is invalid (negative count, offset, ...), which may be a sign of memory corruption! - // You should walk up the stack to see what is going on ! - if (System.Diagnostics.Debugger.IsAttached) System.Diagnostics.Debugger.Break(); -#endif - - if (slice.Offset < 0) throw new FormatException("The specified slice has a negative offset, which is not legal. This may be a side effect of memory corruption."); - if (slice.Count < 0) throw new FormatException("The specified slice has a negative size, which is not legal. This may be a side effect of memory corruption."); - if (slice.Count > 0) - { - if (slice.Array == null) throw new FormatException("The specified slice is missing its underlying buffer."); - if (slice.Offset + slice.Count > slice.Array.Length) throw new FormatException("The specified slice is larger than its underlying buffer."); - } - // maybe it's Lupus ? - throw new FormatException("The specified slice is invalid."); - } - - public static void EnsureBufferIsValid(byte[] array, int offset, int count) - { - if (count == 0 && offset >= 0) return; - if (count < 0 || offset < 0 || array == null || offset + count > array.Length) - { - ThrowMalformedBuffer(array, offset, count); - } - } - - /// Reject an invalid slice by throw an error with the appropriate diagnostic message. - [ContractAnnotation("=> halt")] - public static void ThrowMalformedBuffer(byte[] array, int offset, int count) - { - if (offset < 0) throw new ArgumentException("The specified segment has a negative offset, which is not legal. This may be a side effect of memory corruption.", "offset"); - if (count < 0) throw new ArgumentException("The specified segment has a negative size, which is not legal. This may be a side effect of memory corruption.", "count"); - if (count > 0) - { - if (array == null) throw new ArgumentException("The specified segment is missing its underlying buffer.", "array"); - if (offset + count > array.Length) throw new ArgumentException("The specified segment is larger than its underlying buffer.", "count"); - } - // maybe it's Lupus ? - throw new ArgumentException("The specified segment is invalid."); - } - - /// Round a size to a multiple of 16 - /// Minimum size required - /// Size rounded up to the next multiple of 16 - /// If the rounded size overflows over 2 GB - public static int Align(int size) - { - const int ALIGNMENT = 16; // MUST BE A POWER OF TWO! - const int MASK = (-ALIGNMENT) & int.MaxValue; - - if (size <= ALIGNMENT) - { - if (size < 0) throw new ArgumentOutOfRangeException("size", "Size cannot be negative"); - return ALIGNMENT; - } - // force an exception if we overflow above 2GB - checked { return (size + (ALIGNMENT - 1)) & MASK; } - } - - /// Round a number to the next power of 2 - /// Positive integer that will be rounded up (if not already a power of 2) - /// Smallest power of 2 that is greater then or equal to - /// Will return 1 for = 0 (because 0 is not a power 2 !), and will throws for < 0 - /// If is a negative number - public static int NextPowerOfTwo(int x) - { - // cf http://en.wikipedia.org/wiki/Power_of_two#Algorithm_to_round_up_to_power_of_two - - // special case - if (x == 0) return 1; - if (x < 0) throw new ArgumentOutOfRangeException("x", x, "Cannot compute the next power of two for negative numbers"); - //TODO: check for overflow at if x > 2^30 ? - - --x; - x |= (x >> 1); - x |= (x >> 2); - x |= (x >> 4); - x |= (x >> 8); - x |= (x >> 16); - return x + 1; - } - - /// Compute the hash code of a byte segment - /// Buffer - /// Offset of the start of the segment in the buffer - /// Number of bytes in the segment - /// A 32-bit signed hash code calculated from all the bytes in the segment. - public static int ComputeHashCode([NotNull] byte[] bytes, int offset, int count) - { - if (bytes == null || offset < 0 || count < 0 || offset + count > bytes.Length) SliceHelpers.ThrowMalformedBuffer(bytes, offset, count); - - return ComputeHashCodeUnsafe(bytes, offset, count); - } - - /// Compute the hash code of a byte segment, without validating the arguments - /// Buffer - /// Offset of the start of the segment in the buffer - /// Number of bytes in the segment - /// A 32-bit signed hash code calculated from all the bytes in the segment. - public static int ComputeHashCodeUnsafe([NotNull] byte[] bytes, int offset, int count) - { - Contract.Requires(bytes != null && offset >= 0 && count >= 0); - - //TODO: use a better hash algorithm? (xxHash, CityHash, SipHash, ...?) - // => will be called a lot when Slices are used as keys in an hash-based dictionary (like Dictionary) - // => won't matter much for *ordered* dictionary that will probably use IComparer.Compare(..) instead of the IEqalityComparer.GetHashCode()/Equals() combo - // => we don't need a cryptographic hash, just something fast and suitable for use with hashtables... - // => probably best to select an algorithm that works on 32-bit or 64-bit chunks - - // : unoptimized 32 bits FNV-1a implementation - uint h = 2166136261; // FNV1 32 bits offset basis - int p = offset; - int n = count; - while (n-- > 0) - { - h = (h ^ bytes[p++]) * 16777619; // FNV1 32 prime - } - return (int)h; - // - } - - /// Compare two byte segments for equality - /// Left buffer - /// Start offset in left buffer - /// Right buffer - /// Start offset in right buffer - /// Number of bytes to compare - /// true if all bytes are the same in both segments - public static bool SameBytes(byte[] left, int leftOffset, byte[] right, int rightOffset, int count) - { - SliceHelpers.EnsureBufferIsValid(left, leftOffset, count); - SliceHelpers.EnsureBufferIsValid(right, rightOffset, count); - - if (left == null || right == null) return left == right; - return SameBytesUnsafe(left, leftOffset, right, rightOffset, count); - } - - /// Compare two byte segments for equality, without validating the arguments - /// Left buffer - /// Start offset in left buffer - /// Right buffer - /// Start offset in right buffer - /// Number of bytes to compare - /// true if all bytes are the same in both segments - public static bool SameBytesUnsafe([NotNull] byte[] left, int leftOffset, [NotNull] byte[] right, int rightOffset, int count) - { - Contract.Requires(left != null && leftOffset >= 0 && right != null && rightOffset >= 0 && count >= 0); - - // for very small keys, the cost of pinning and marshalling may be too high - if (count <= 8) - { - while (count-- > 0) - { - if (left[leftOffset++] != right[rightOffset++]) return false; - } - return true; - } - - if (object.ReferenceEquals(left, right)) - { // In cases where the keys are backed by the same buffer, we don't need to pin the same buffer twice - - if (leftOffset == rightOffset) - { // same segment in the same buffer - return true; - } - - unsafe - { - fixed (byte* ptr = left) - { - return 0 == CompareMemoryUnsafe(ptr + leftOffset, ptr + rightOffset, count); - } - } - } - else - { - unsafe - { - fixed (byte* pLeft = left) - fixed (byte* pRight = right) - { - return 0 == CompareMemoryUnsafe(pLeft + leftOffset, pRight + rightOffset, count); - } - } - } - } - - /// Compare two byte segments lexicographically - /// Left buffer - /// Start offset in left buffer - /// Number of bytes in left buffer - /// Right buffer - /// Start offset in right buffer - /// Number of bytes in right buffer - /// Returns zero if segments are identical (same bytes), a negative value if left is lexicographically less than right, or a positive value if left is lexicographically greater than right - /// The comparison algorithm respect the following: - /// * "A" < "B" - /// * "A" < "AA" - /// * "AA" < "B" - public static int CompareBytes(byte[] left, int leftOffset, int leftCount, byte[] right, int rightOffset, int rightCount) - { - SliceHelpers.EnsureBufferIsValid(left, leftOffset, leftCount); - SliceHelpers.EnsureBufferIsValid(right, rightOffset, rightCount); - - return CompareBytesUnsafe(left, leftOffset, leftCount, right, rightOffset, rightCount); - } - - /// Compare two byte segments lexicographically, without validating the arguments - /// Left buffer - /// Start offset in left buffer - /// Number of bytes in left buffer - /// Right buffer - /// Start offset in right buffer - /// Number of bytes in right buffer - /// Returns zero if segments are identical (same bytes), a negative value if left is lexicographically less than right, or a positive value if left is lexicographically greater than right - /// The comparison algorithm respect the following: - /// * "A" < "B" - /// * "A" < "AA" - /// * "AA" < "B" - public static int CompareBytesUnsafe([NotNull] byte[] left, int leftOffset, int leftCount, [NotNull] byte[] right, int rightOffset, int rightCount) - { - Contract.Requires(left != null && right != null && leftOffset >= 0 && leftCount >= 0 && rightOffset >= 0 && rightCount >= 0); - - if (object.ReferenceEquals(left, right)) - { // In cases where the keys are backed by the same buffer, we don't need to pin the same buffer twice - - if (leftCount == rightCount && leftOffset == rightOffset) - { // same segment in the same buffer - return 0; - } - - unsafe - { - fixed (byte* ptr = left) - { - int n = CompareMemoryUnsafe(ptr + leftOffset, ptr + rightOffset, Math.Min(leftCount, rightCount)); - return n != 0 ? n : leftCount - rightCount; - } - } - } - else - { - unsafe - { - fixed (byte* pLeft = left) - fixed (byte* pRight = right) - { - int n = CompareMemoryUnsafe(pLeft + leftOffset, pRight + rightOffset, Math.Min(leftCount, rightCount)); - return n != 0 ? n : leftCount - rightCount; - } - } - } - } - - /// Copy the content of a byte segment into another. CAUTION: The arguments are NOT in the same order as Buffer.BlockCopy() or Array.Copy() ! - /// Destination buffer - /// Offset in destination buffer - /// Source buffer - /// Offset in source buffer - /// Number of bytes to copy - /// CAUTION: THE ARGUMENTS ARE REVERSED! They are in the same order as memcpy() and memmove(), with destination first, and source second! - public static void CopyBytes(byte[] dst, int dstOffset, byte[] src, int srcOffset, int count) - { - SliceHelpers.EnsureBufferIsValid(dst, dstOffset, count); - SliceHelpers.EnsureBufferIsValid(src, srcOffset, count); - - CopyBytesUnsafe(dst, dstOffset, src, srcOffset, count); - } - - /// Copy the content of a byte segment into another, without validating the arguments. CAUTION: The arguments are NOT in the same order as Buffer.BlockCopy() or Array.Copy() ! - /// Destination buffer - /// Offset in destination buffer - /// Source buffer - /// Offset in source buffer - /// Number of bytes to copy - /// CAUTION: THE ARGUMENTS ARE REVERSED! They are in the same order as memcpy() and memmove(), with destination first, and source second! - public static void CopyBytesUnsafe([NotNull] byte[] dst, int dstOffset, [NotNull] byte[] src, int srcOffset, int count) - { - Contract.Requires(dst != null && src != null && dstOffset >= 0 && srcOffset >= 0 && count >= 0); - - if (count <= 8) - { // for very small keys, the cost of pinning and marshalling may be to high - - while (count-- > 0) - { - dst[dstOffset++] = src[srcOffset++]; - } - } - else if (object.ReferenceEquals(dst, src)) - { // In cases where the keys are backed by the same buffer, we don't need to pin the same buffer twice - - unsafe - { - fixed (byte* ptr = dst) - { - MoveMemoryUnsafe(ptr + dstOffset, ptr + srcOffset, count); - } - } - } - else - { - unsafe - { - fixed (byte* pDst = dst) - fixed (byte* pSrc = src) - { - MoveMemoryUnsafe(pDst + dstOffset, pSrc + srcOffset, count); - } - } - } - } - - /// Copy the content of a native byte segment into a managed segment, without validating the arguments. - /// Destination buffer - /// Offset in destination buffer - /// Point to the source buffer - /// Number of bytes to copy - /// CAUTION: THE ARGUMENTS ARE REVERSED! They are in the same order as memcpy() and memmove(), with destination first, and source second! - public static unsafe void CopyBytesUnsafe([NotNull] byte[] dst, int dstOffset, byte* src, int count) - { - Contract.Requires(dst != null && src != null && dstOffset >= 0 && count >= 0); - - if (count <= 8) - { - while (count-- > 0) - { - dst[dstOffset++] = *src++; - } - } - else - { - fixed (byte* ptr = dst) - { - MoveMemoryUnsafe(ptr + dstOffset, src, count); - } - } - } - - /// Fill the content of a managed buffer with the same byte repeated - public static void SetBytes(byte[] bytes, byte value) - { - if (bytes == null) throw new ArgumentNullException("bytes"); - SetBytes(bytes, 0, bytes.Length, value); - } - - /// Fill the content of a managed segment with the same byte repeated - public static void SetBytes(byte[] bytes, int offset, int count, byte value) - { - SliceHelpers.EnsureBufferIsValid(bytes, offset, count); - - if (count <= 8) - { // for very small keys, the cost of pinning and marshalling may be to high - - while (count-- > 0) - { - bytes[offset++] = value; - } - } - else - { - unsafe - { - fixed (byte* ptr = bytes) - { - SetMemoryUnsafe(ptr + offset, value, count); - } - } - } - } - - /// Fill the content of a native byte segment with the same byte repeated - public static unsafe void SetBytes(byte* bytes, int count, byte value) - { - if (bytes == null) throw new ArgumentNullException("bytes"); - if (count < 0) throw new ArgumentException("Count cannot be a negative number.", "count"); - - if (count <= 8) - { // for very small keys, the cost of pinning and marshalling may be to high - - while (count-- > 0) - { - *bytes++ = value; - } - } - else - { - SetMemoryUnsafe(bytes, value, count); - } - } - - /// Dangerously copy native memory from one location to another - /// Where to copy the bytes - /// Where to read the bytes - /// Number of bytes to copy - [SecurityCritical, ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] -#if USE_NATIVE_MEMORY_OPERATORS && !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - private static unsafe void MoveMemoryUnsafe(byte* dest, byte* src, int count) - { - Contract.Requires(dest != null && src != null && count >= 0); - -#if USE_NATIVE_MEMORY_OPERATORS - NativeMethods.memmove(dest, src, new IntPtr(count)); -#else - if (count >= 16) - { - do - { - *((int*)(dest + 0)) = *((int*)(src + 0)); - *((int*)(dest + 4)) = *((int*)(src + 4)); - *((int*)(dest + 8)) = *((int*)(src + 8)); - *((int*)(dest + 12)) = *((int*)(src + 12)); - dest += 16; - src += 16; - } - while ((count -= 16) >= 16); - } - if (count > 0) - { - if ((count & 8) != 0) - { - *((int*)(dest + 0)) = *((int*)(src + 0)); - *((int*)(dest + 4)) = *((int*)(src + 4)); - dest += 8; - src += 8; - } - if ((count & 4) != 0) - { - *((int*)dest) = *((int*)src); - dest += 4; - src += 4; - } - if ((count & 2) != 0) - { - *((short*)dest) = *((short*)src); - dest += 2; - src += 2; - } - if ((count & 1) != 0) - { - *dest = *src; - } - } -#endif - } - - /// Dangerously fill native memory with a specific byte - /// Where to fill the bytes - /// Byte to set - /// Number of bytes to set - /// If ==0, you should call - [SecurityCritical, ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] -#if USE_NATIVE_MEMORY_OPERATORS && !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - private static unsafe void SetMemoryUnsafe(byte* dest, byte c, int count) - { - Contract.Requires(dest != null && count >= 0); - -#if USE_NATIVE_MEMORY_OPERATORS - NativeMethods.memset(dest, c, new IntPtr(count)); -#else - int fill32 = c; - fill32 = fill32 << 8 | c; - fill32 = fill32 << 8 | c; - fill32 = fill32 << 8 | c; - - if (count >= 16) - { - do - { - *((int*)(dest + 0)) = fill32; - *((int*)(dest + 4)) = fill32; - *((int*)(dest + 8)) = fill32; - *((int*)(dest + 12)) = fill32; - dest += 16; - } - while ((count -= 16) >= 16); - } - if (count > 0) - { - if ((count & 8) != 0) - { - *((int*)(dest + 0)) = fill32; - *((int*)(dest + 4)) = fill32; - dest += 8; - } - if ((count & 4) != 0) - { - *((int*)dest) = fill32; - dest += 4; - } - if ((count & 2) != 0) - { - *((short*)dest) = (short)fill32; - dest += 2; - } - if ((count & 1) != 0) - { - *dest = c; - } - } -#endif - } - - /// Dangerously clear native memory - /// Where to clear the bytes - /// Number of bytes to clear - [SecurityCritical, ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] -#if USE_NATIVE_MEMORY_OPERATORS && !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - private static unsafe void ClearMemoryUnsafe(byte* dest, int count) - { - Contract.Requires(dest != null && count >= 0); - -#if USE_NATIVE_MEMORY_OPERATORS - NativeMethods.memset(dest, 0, new IntPtr(count)); -#else - if (count >= 16) - { - do - { - *((ulong*)(dest + 0)) = 0UL; - *((ulong*)(dest + 8)) = 0UL; - dest += 16; - } - while ((count -= 16) >= 16); - } - if (count > 0) - { - if ((count & 8) != 0) - { - *((ulong*)(dest)) = 0UL; - dest += 8; - } - if ((count & 4) != 0) - { - *((int*)dest) = 0; - dest += 4; - } - if ((count & 2) != 0) - { - *((short*)dest) = 0; - dest += 2; - } - if ((count & 1) != 0) - { - *dest = 0; - } - } -#endif - } - - /// Returns the offset of the first difference found between two buffers of the same size - /// Pointer to the first byte of the left buffer - /// Pointer to the first byte of the right buffer - /// Number of bytes to compare in both buffers - /// Offset (from the first byte) of the first difference encountered, or -1 if both buffers are identical. - [SecurityCritical, ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] -#if USE_NATIVE_MEMORY_OPERATORS && !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - private static unsafe int CompareMemoryUnsafe(byte* left, byte* right, int count) - { - Contract.Requires(left != null && right != null && count >= 0); - -#if USE_NATIVE_MEMORY_OPERATORS - return NativeMethods.memcmp(left, right, new IntPtr(count)); -#else - - // We want to scan in chunks of 8 bytes, until we find a difference (or there's less than 8 bytes remaining). - // If we find a difference that way, we backtrack and then scan byte per byte to locate the location of the mismatch. - // for the last 1 to 7 bytes, we just do a regular check - - // XOR Comparison: We XOR two 8-bytes chunks together. - // - If all bytes are identical, the XOR result will be 0. - // - If at least one bit is difference, the XOR result will be non-zero, and the first different will be in the first non-zero byte. - - // Identical data: - // left : "11 22 33 44 55 66 77 88" => 0x8877665544332211 - // right: "11 22 33 44 55 66 77 88" => 0x8877665544332211 - // left XOR right => 0x8877665544332211 ^ 0x8877665544332211 = 0 - - // Different data: - // left : "11 22 33 44 55 66 77 88" => 0x8877665544332211 - // right: "11 22 33 44 55 AA BB CC" => 0xCCBBAA5544332211 - // left XOR right =0x8877665544332211 ^ 0xCCBBAA5544332211 = 0x44CCCC0000000000 - // the first non-zero byte is at offset 5 (big-endian) with the value of 0xCC - - byte* start = left; - - //TODO: align the start of the 8-byte scan to an 8-byte aligne memory address ? - - // compares using 8-bytes chunks - while (count >= 8) - { - ulong k = *((ulong*)left) ^ *((ulong*)right); - - if (k != 0) - { // there is difference in these 8 bytes, iterate until we find it - int p = 0; - while ((k & 0xFF) == 0) - { - ++p; - k >>= 8; - } - return left[p] - right[p]; - } - left += 8; - right += 8; - count -= 8; - } - - // if more than 4 bytes remain, check 32 bits at a time - if (count >= 4) - { - if (*((uint*)left) != *((uint*)right)) - { - goto compare_tail; - } - left += 4; - right += 4; - count -= 4; - } - - // from here, there is at mos 3 bytes remaining - - compare_tail: - while (count-- > 0) - { - int n = *(left++) - *(right++); - if (n != 0) return n; - } - return 0; -#endif - } - -#if USE_NATIVE_MEMORY_OPERATORS - - [SuppressUnmanagedCodeSecurity] - internal static unsafe class NativeMethods - { - - /// Compare characters in two buffers. - /// First buffer. - /// Second buffer. - /// Number of bytes to compare. - /// The return value indicates the relationship between the buffers. - [DllImport("msvcrt.dll", CallingConvention = CallingConvention.Cdecl, SetLastError = false)] - public static extern int memcmp(byte* buf1, byte* buf2, IntPtr count); - - /// Moves one buffer to another. - /// Destination object. - /// Source object. - /// Number of bytes to copy. - /// The value of dest. - /// Copies count bytes from src to dest. If some regions of the source area and the destination overlap, both functions ensure that the original source bytes in the overlapping region are copied before being overwritten. - [DllImport("msvcrt.dll", CallingConvention = CallingConvention.Cdecl, SetLastError = false)] - public static extern byte* memmove(byte* dest, byte* src, IntPtr count); - - /// Sets the first bytes of to the byte . - /// Pointer to destination - /// Byte to set - /// Number of bytes - /// The value of - [DllImport("msvcrt.dll", CallingConvention = CallingConvention.Cdecl, SetLastError = false)] - public static extern byte* memset(byte* dest, int c, IntPtr count); - - } - -#endif - - } - -} diff --git a/FoundationDB.Client/Utils/SliceReader.cs b/FoundationDB.Client/Utils/SliceReader.cs deleted file mode 100644 index bb92ade0f..000000000 --- a/FoundationDB.Client/Utils/SliceReader.cs +++ /dev/null @@ -1,261 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Client -{ - using JetBrains.Annotations; - using System; - - /// Helper class that holds the internal state used to parse tuples from slices - public struct SliceReader - { - - /// Creates a reader on a byte array - public static SliceReader FromBuffer(byte[] buffer) - { - return new SliceReader(Slice.Create(buffer)); - } - - /// Creates a reader on a segment of a byte array - public static SliceReader FromBuffer(byte[] buffer, int offset, int count) - { - return new SliceReader(Slice.Create(buffer, offset, count)); - } - - /// Buffer containing the tuple being parsed - public Slice Buffer { get { return m_buffer; } } - private Slice m_buffer; //PERF: readonly struct - - /// Current position inside the buffer - public int Position; - - /// Creates a new reader over a slice - /// Slice that will be used as the underlying buffer - public SliceReader(Slice buffer) - { - m_buffer = buffer; - this.Position = 0; - } - - /// Returns true if there are more bytes to parse - public bool HasMore { get { return this.Position < m_buffer.Count; } } - - /// Returns the number of bytes remaining - public int Remaining { get { return Math.Max(0, m_buffer.Count - this.Position); } } - - /// Returns a slice with all the bytes read so far in the buffer - public Slice Head - { - get { return m_buffer.Substring(0, this.Position); } - } - - /// Returns a slice with all the remaining bytes in the buffer - public Slice Tail - { - get { return m_buffer.Substring(this.Position); } - } - - /// Ensure that there are at least bytes remaining in the buffer - public void EnsureBytes(int count) - { - if (count < 0 || checked(this.Position + count) > m_buffer.Count) throw new ArgumentOutOfRangeException("count"); - } - - /// Return the value of the next byte in the buffer, or -1 if we reached the end - [Pure] - public int PeekByte() - { - int p = this.Position; - return p < m_buffer.Count ? m_buffer[p] : -1; - } - - /// Return the value of the byte at a specified offset from the current position, or -1 if this is after the end, or before the start - [Pure] - public int PeekByteAt(int offset) - { - int p = this.Position + offset; - return p < m_buffer.Count && p >= 0 ? m_buffer[p] : -1; - } - - /// Skip the next bytes of the buffer - public void Skip(int count) - { - EnsureBytes(count); - - this.Position += count; - } - - /// Read the next byte from the buffer - public byte ReadByte() - { - EnsureBytes(1); - - int p = this.Position; - byte b = m_buffer[p]; - this.Position = p + 1; - return b; - } - - /// Read the next bytes from the buffer - public Slice ReadBytes(int count) - { - EnsureBytes(count); - - int p = this.Position; - this.Position = p + count; - return m_buffer.Substring(p, count); - } - - /// Read the next 2 bytes as an unsigned 16-bit integer, encoded in little-endian - public ushort ReadFixed16() - { - return ReadBytes(2).ToUInt16(); - } - - /// Read the next 4 bytes as an unsigned 32-bit integer, encoded in little-endian - public uint ReadFixed32() - { - return ReadBytes(4).ToUInt32(); - } - - /// Read the next 8 bytes as an unsigned 64-bit integer, encoded in little-endian - public ulong ReadFixed64() - { - return ReadBytes(8).ToUInt64(); - } - - /// Read the next 2 bytes as an unsigned 16-bit integer, encoded in big-endian - public ushort ReadFixed16BE() - { - return ReadBytes(2).ToUInt16BE(); - } - - /// Read the next 4 bytes as an unsigned 32-bit integer, encoded in big-endian - public uint ReadFixed32BE() - { - return ReadBytes(4).ToUInt32BE(); - } - - /// Read the next 8 bytes as an unsigned 64-bit integer, encoded in big-endian - public ulong ReadFixed64BE() - { - return ReadBytes(8).ToUInt64BE(); - } - - /// Read an encoded nul-terminated byte array from the buffer - public Slice ReadByteString() - { - var buffer = m_buffer.Array; - int start = m_buffer.Offset + this.Position; - int p = start; - int end = m_buffer.Offset + m_buffer.Count; - - while (p < end) - { - byte b = buffer[p++]; - if (b == 0) - { - //TODO: decode \0\xFF ? - if (p < end && buffer[p] == 0xFF) - { - // skip the next byte and continue - p++; - continue; - } - - this.Position = p - m_buffer.Offset; - return new Slice(buffer, start, p - start); - } - } - - throw new FormatException("Truncated byte string (expected terminal NUL not found)"); - } - - /// Reads a 7-bit encoded unsigned int (aka 'Varint16') from the buffer, and advances the cursor - /// Can Read up to 3 bytes from the input - public ushort ReadVarint16() - { - //note: this could read up to 21 bits of data, so we check for overflow - return checked((ushort)ReadVarint(3)); - } - - /// Reads a 7-bit encoded unsigned int (aka 'Varint32') from the buffer, and advances the cursor - /// Can Read up to 5 bytes from the input - public uint ReadVarint32() - { - //note: this could read up to 35 bits of data, so we check for overflow - return checked((uint)ReadVarint(5)); - } - - /// Reads a 7-bit encoded unsigned long (aka 'Varint32') from the buffer, and advances the cursor - /// Can Read up to 10 bytes from the input - public ulong ReadVarint64() - { - return ReadVarint(10); - } - - /// Reads a Base 128 Varint from the input - /// Maximum number of bytes allowed (5 for 32 bits, 10 for 64 bits) - private ulong ReadVarint(int count) - { - var buffer = m_buffer.Array; - int p = m_buffer.Offset + this.Position; - int end = m_buffer.Offset + m_buffer.Count; - - ulong x = 0; - int s = 0; - - // read bytes until the MSB is unset - while (count-- > 0) - { - if (p > end) throw new FormatException("Truncated Varint"); - byte b = buffer[p++]; - - x |= (b & 0x7FUL) << s; - if (b < 0x80) - { - this.Position = p - m_buffer.Offset; - return x; - } - s += 7; - } - throw new FormatException("Malformed Varint"); - } - - /// Reads a variable sized slice, by first reading its size (stored as a Varint32) and then the data - public Slice ReadVarbytes() - { - uint size = ReadVarint32(); - if (size > int.MaxValue) throw new FormatException("Malformed variable size"); - if (size == 0) return Slice.Empty; - return ReadBytes((int)size); - } - - } - -} diff --git a/FoundationDB.Client/Utils/SliceWriter.cs b/FoundationDB.Client/Utils/SliceWriter.cs deleted file mode 100644 index 10be371f6..000000000 --- a/FoundationDB.Client/Utils/SliceWriter.cs +++ /dev/null @@ -1,879 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Client -{ - using FoundationDB.Client.Utils; - using JetBrains.Annotations; - using System; - using System.Diagnostics; - using System.Runtime.CompilerServices; - - /// Slice buffer that emulates a pseudo-stream using a byte array that will automatically grow in size, if necessary - /// IMPORTANT: This struct does not extensively check the parameters! The caller should ensure that everything is valid (this is to get the max performance when serializing keys and values) - [DebuggerDisplay("Position={Position}, Capacity={Buffer == null ? -1 : Buffer.Length}"), DebuggerTypeProxy(typeof(SliceWriter.DebugView))] - public struct SliceWriter - { - // Invariant - // * Valid data always start at offset 0 - // * 'this.Position' is equal to the current size as well as the offset of the next available free spot - // * 'this.Buffer' is either null (meaning newly created stream), or is at least as big as this.Position - - #region Private Members... - - /// Buffer holding the data - public byte[] Buffer; - - /// Position in the buffer ( == number of already written bytes) - public int Position; - - #endregion - - #region Constructors... - - /// Returns a new, empty, slice writer - public static SliceWriter Empty { get { return default(SliceWriter); } } - - /// Create a new empty binary buffer with an initial allocated size - /// Initial capacity of the buffer - public SliceWriter(int capacity) - { - if (capacity < 0) throw new ArgumentOutOfRangeException("capacity"); - - this.Buffer = new byte[capacity]; - this.Position = 0; - } - - /// Create a new binary writer using an existing buffer - /// Initial buffer - /// Since the content of the will be modified, only a temporary or scratch buffer should be used. If the writer needs to grow, a new buffer will be allocated. - public SliceWriter([NotNull] byte[] buffer) - : this(buffer, 0) - { } - - /// Create a new binary buffer using an existing buffer and with the cursor to a specific location - /// Since the content of the will be modified, only a temporary or scratch buffer should be used. If the writer needs to grow, a new buffer will be allocated. - public SliceWriter([NotNull] byte[] buffer, int index) - { - if (buffer == null) throw new ArgumentNullException("buffer"); - if (index < 0 || index > buffer.Length) throw new ArgumentOutOfRangeException("index"); - - this.Buffer = buffer; - this.Position = index; - } - - /// Creates a new binary buffer, initialized by copying pre-existing data - /// Data that will be copied at the start of the buffer - /// Optional initial capacity of the buffer - /// The cursor will already be placed at the end of the prefix - public SliceWriter(Slice prefix, int capacity = 0) - { - if (capacity < 0) throw new ArgumentException("Capacity must be a positive integer.", "capacity"); - - int n = prefix.Count; - Contract.Assert(n >= 0); - - if (capacity == 0) - { // most frequent usage is to add a packed integer at the end of a prefix - capacity = SliceHelpers.Align(n + 8); - } - else - { - capacity = Math.Max(capacity, n); - } - - var buffer = new byte[capacity]; - if (n > 0) prefix.CopyTo(buffer, 0); - - this.Buffer = buffer; - this.Position = n; - } - - #endregion - - #region Public Properties... - - /// Returns true if the buffer contains at least some data - public bool HasData - { - get { return this.Position > 0; } - } - - /// Return the byte at the specified index - /// Index in the buffer (0-based if positive, from the end if negative) - public byte this[int index] - { - [Pure] - get - { - Contract.Assert(this.Buffer != null && this.Position >= 0); - //note: we will get bound checking for free in release builds - if (index < 0) index += this.Position; - if (index < 0 || index >= this.Position) throw new IndexOutOfRangeException(); - return this.Buffer[index]; - } - } - - /// Returns a slice pointing to a segment inside the buffer - /// The starting position of the substring. Positive values means from the start, negative values means from the end - /// The end position (excluded) of the substring. Positive values means from the start, negative values means from the end - /// Slice that corresponds to the section selected. If the if equal to or greater than then an empty Slice is returned - /// If either or is outside of the currently allocated buffer. - public Slice this[int? beginInclusive, int? endExclusive] - { - [Pure] - get - { - int from = beginInclusive ?? 0; - int until = endExclusive ?? this.Position; - - // remap negative indexes - if (from < 0) from += this.Position; - if (until < 0) until += this.Position; - - // bound check - if (from < 0 || from >= this.Position) throw new ArgumentOutOfRangeException("beginInclusive", "The start index must be inside the bounds of the buffer."); - if (until < 0 || until > this.Position) throw new ArgumentOutOfRangeException("endExclusive", "The end index must be inside the bounds of the buffer."); - - // chop chop - int count = until - from; - return count > 0 ? new Slice(this.Buffer, from, count) : Slice.Empty; - } - } - - #endregion - - /// Returns a byte array filled with the contents of the buffer - /// The buffer is copied in the byte array. And change to one will not impact the other - [Pure, NotNull] - public byte[] GetBytes() - { - Contract.Requires(this.Position >= 0); - - var bytes = new byte[this.Position]; - if (this.Position > 0) - { - Contract.Assert(this.Buffer != null && this.Buffer.Length >= this.Position); - SliceHelpers.CopyBytesUnsafe(bytes, 0, this.Buffer, 0, bytes.Length); - } - return bytes; - } - - /// Returns a slice pointing to the content of the buffer - /// Any change to the slice will change the buffer ! - [Pure] - public Slice ToSlice() - { - if (this.Buffer == null || this.Position == 0) - { - return Slice.Empty; - } - else - { - Contract.Assert(this.Buffer.Length >= this.Position); - return new Slice(this.Buffer, 0, this.Position); - } - } - - /// Returns a slice pointing to the first bytes of the buffer - /// Size of the segment - /// Any change to the slice will change the buffer ! - /// If is less than zero, or larger than the current buffer size - [Pure] - public Slice ToSlice(int count) - { - if (count < 0 || count > this.Position) throw new ArgumentException("count"); - - return count > 0 ? new Slice(this.Buffer, 0, count) : Slice.Empty; - } - - /// Returns a slice pointing to a segment inside the buffer - /// Offset of the segment from the start of the buffer - /// Any change to the slice will change the buffer ! - /// If is less then zero, or after the current position - [Pure] - public Slice Substring(int offset) - { - if (offset < 0 || offset > this.Position) throw new ArgumentException("Offset must be inside the buffer", "offset"); - - int count = this.Position - offset; - return count > 0 ? new Slice(this.Buffer, offset, this.Position - offset) : Slice.Empty; - } - - /// Returns a slice pointing to a segment inside the buffer - /// Offset of the segment from the start of the buffer - /// Size of the segment - /// Any change to the slice will change the buffer ! - /// If either or are less then zero, or do not fit inside the current buffer - [Pure] - public Slice Substring(int offset, int count) - { - if (offset < 0 || offset >= this.Position) throw new ArgumentException("Offset must be inside the buffer", "offset"); - if (count < 0 || offset + count > this.Position) throw new ArgumentException("The buffer is too small", "count"); - - return count > 0 ? new Slice(this.Buffer, offset, count) : Slice.Empty; - } - - /// Truncate the buffer by setting the cursor to the specified position. - /// New size of the buffer - /// If the buffer was smaller, it will be resized and filled with zeroes. If it was biffer, the cursor will be set to the specified position, but previous data will not be deleted. - public void SetLength(int position) - { - Contract.Requires(position >= 0); - - if (this.Position < position) - { - int missing = position - this.Position; - EnsureBytes(missing); - //TODO: native memset() ? - Array.Clear(this.Buffer, this.Position, missing); - } - this.Position = position; - } - - /// Delete the first N bytes of the buffer, and shift the remaining to the front - /// Number of bytes to remove at the head of the buffer - /// New size of the buffer (or 0 if it is empty) - /// This should be called after every successfull write to the underlying stream, to update the buffer. - public int Flush(int bytes) - { - if (bytes == 0) return this.Position; - if (bytes < 0) throw new ArgumentOutOfRangeException("bytes"); - - if (bytes < this.Position) - { // copy the left over data to the start of the buffer - int remaining = this.Position - bytes; - SliceHelpers.CopyBytesUnsafe(this.Buffer, 0, this.Buffer, bytes, remaining); - this.Position = remaining; - return remaining; - } - else - { - //REVIEW: should we throw if there are less bytes in the buffer than we want to flush ? - this.Position = 0; - return 0; - } - } - - /// Empties the current buffer after a succesfull write - /// Shrink the buffer if a lot of memory is wated - public void Reset() - { - if (this.Position > 0) - { - // reduce size ? - // If the buffer exceeds 4K and we used less than 1/8 of it the last time, we will "shrink" the buffer - if (this.Buffer.Length > 4096 && (this.Position << 3) <= Buffer.Length) - { // Shrink it - Buffer = new byte[SliceHelpers.NextPowerOfTwo(this.Position)]; - } - else - { // Clear it - //TODO: native memset() ? - Array.Clear(Buffer, 0, this.Position); - } - this.Position = 0; - } - } - - /// Advance the cursor of the buffer without writing anything, and return the previous position - /// Number of bytes to skip - /// Pad value (0xFF by default) - /// Position of the cursor BEFORE moving it. Can be used as a marker to go back later and fill some value - /// Will fill the skipped bytes with - public int Skip(int skip, byte pad = 0xFF) - { - Contract.Requires(skip > 0); - - EnsureBytes(skip); - var buffer = this.Buffer; - int p = this.Position; - for (int i = 0; i < skip; i++) - { - buffer[p + i] = pad; - } - this.Position = p + skip; - return p; - } - - /// Rewinds the cursor to a previous position in the buffer, while saving the current position - /// Will receive the current cursor position - /// Previous position in the buffer - public void Rewind(out int cursor, int position) - { - Contract.Requires(position >= 0 && position <= this.Position); - cursor = this.Position; - this.Position = position; - } - - /// Add a byte to the end of the buffer, and advance the cursor - /// Byte, 8 bits -#if !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - public void WriteByte(byte value) - { - EnsureBytes(1); - this.Buffer[this.Position] = value; - ++this.Position; - } - -#if !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - internal void UnsafeWriteByte(byte value) - { - Contract.Requires(this.Buffer != null && this.Position < this.Buffer.Length); - this.Buffer[this.Position++] = value; - } - -#if !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - internal void WriteByte2(byte value1, byte value2) - { - EnsureBytes(2); - - int p = this.Position; - this.Buffer[p] = value1; - this.Buffer[p + 1] = value2; - this.Position = p + 2; - } - -#if !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - internal void UnsafeWriteByte2(byte value1, byte value2) - { - Contract.Requires(this.Buffer != null && this.Position + 1 < this.Buffer.Length); - int p = this.Position; - this.Buffer[p] = value1; - this.Buffer[p + 1] = value2; - this.Position = p + 2; - } - -#if !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - internal void WriteByte3(byte value1, byte value2, byte value3) - { - EnsureBytes(3); - - var buffer = this.Buffer; - int p = this.Position; - buffer[p] = value1; - buffer[p + 1] = value2; - buffer[p + 2] = value3; - this.Position = p + 3; - } - -#if !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - internal void UnsafeWriteByte3(byte value1, byte value2, byte value3) - { - Contract.Requires(this.Buffer != null && this.Position + 2 < this.Buffer.Length); - var buffer = this.Buffer; - int p = this.Position; - buffer[p] = value1; - buffer[p + 1] = value2; - buffer[p + 2] = value3; - this.Position = p + 3; - } - -#if !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - internal void WriteByte4(byte value1, byte value2, byte value3, byte value4) - { - EnsureBytes(4); - - var buffer = this.Buffer; - int p = this.Position; - buffer[p] = value1; - buffer[p + 1] = value2; - buffer[p + 2] = value3; - buffer[p + 3] = value4; - this.Position = p + 4; - } - -#if !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - internal void UnsafeWriteByte4(byte value1, byte value2, byte value3, byte value4) - { - Contract.Requires(this.Buffer != null && this.Position + 3 < this.Buffer.Length); - var buffer = this.Buffer; - int p = this.Position; - buffer[p] = value1; - buffer[p + 1] = value2; - buffer[p + 2] = value3; - buffer[p + 3] = value4; - this.Position = p + 4; - } - -#if !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - internal void WriteByte5(byte value1, byte value2, byte value3, byte value4, byte value5) - { - EnsureBytes(5); - - var buffer = this.Buffer; - int p = this.Position; - buffer[p] = value1; - buffer[p + 1] = value2; - buffer[p + 2] = value3; - buffer[p + 3] = value4; - buffer[p + 4] = value5; - this.Position = p + 5; - } - -#if !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - internal void UnsafeWriteByte5(byte value1, byte value2, byte value3, byte value4, byte value5) - { - Contract.Requires(this.Buffer != null && this.Position + 4 < this.Buffer.Length); - var buffer = this.Buffer; - int p = this.Position; - buffer[p] = value1; - buffer[p + 1] = value2; - buffer[p + 2] = value3; - buffer[p + 3] = value4; - buffer[p + 4] = value5; - this.Position = p + 5; - } - - /// Append a byte array to the end of the buffer - /// - public void WriteBytes(byte[] data) - { - if (data != null) - { - WriteBytes(data, 0, data.Length); - } - } - - /// Append a chunk of a byte array to the end of the buffer - /// - /// - /// - public void WriteBytes(byte[] data, int offset, int count) - { - SliceHelpers.EnsureBufferIsValid(data, offset, count); - - if (count > 0) - { - EnsureBytes(count); - SliceHelpers.CopyBytesUnsafe(this.Buffer, this.Position, data, offset, count); - this.Position += count; - } - } - - /// Append a chunk of memory to the end of the buffer - public unsafe void WriteBytesUnsafe(byte* data, int count) - { - if (data == null) throw new ArgumentNullException("data"); - if (count < 0) throw new ArgumentOutOfRangeException("count"); - - if (count > 0) - { - EnsureBytes(count); - SliceHelpers.CopyBytesUnsafe(this.Buffer, this.Position, data, count); - this.Position += count; - } - } - - internal void UnsafeWriteBytes(byte[] data, int offset, int count) - { - Contract.Requires(this.Buffer != null && this.Position >= 0 && data != null && count >= 0 && this.Position + count <= this.Buffer.Length && offset >= 0 && offset + count <= data.Length); - - if (count > 0) - { - SliceHelpers.CopyBytesUnsafe(this.Buffer, this.Position, data, offset, count); - this.Position += count; - } - } - - /// Append a segment of bytes to the end of the buffer - public void WriteBytes(Slice data) - { - SliceHelpers.EnsureSliceIsValid(ref data); - - int n = data.Count; - if (n > 0) - { - EnsureBytes(n); - SliceHelpers.CopyBytesUnsafe(this.Buffer, this.Position, data.Array, data.Offset, n); - this.Position += n; - } - } - - internal unsafe void WriteBytes(byte* data, int count) - { - if (count == 0) return; - if (data == null) throw new ArgumentNullException("data"); - if (count < 0) throw new ArgumentException("count"); - - EnsureBytes(count); - Contract.Assert(this.Buffer != null && this.Position >= 0 && this.Position + count <= this.Buffer.Length); - - SliceHelpers.CopyBytesUnsafe(this.Buffer, this.Position, data, count); - this.Position += count; - } - - internal unsafe void UnsafeWriteBytes(byte* data, int count) - { - if (count <= 0) return; - - Contract.Requires(this.Buffer != null && this.Position >= 0 && data != null && count >= 0 && this.Position + count <= this.Buffer.Length); - - SliceHelpers.CopyBytesUnsafe(this.Buffer, this.Position, data, count); - this.Position += count; - } - - #region Fixed, Little-Endian - - /// Writes a 16-bit unsigned integer, using little-endian encoding - /// Advances the cursor by 2 bytes - public void WriteFixed16(uint value) - { - EnsureBytes(2); - var buffer = this.Buffer; - int p = this.Position; - buffer[p] = (byte)value; - buffer[p + 1] = (byte)(value >> 8); - this.Position = p + 2; - } - - /// Writes a 32-bit unsigned integer, using little-endian encoding - /// Advances the cursor by 4 bytes - public void WriteFixed32(uint value) - { - EnsureBytes(4); - var buffer = this.Buffer; - int p = this.Position; - buffer[p] = (byte)value; - buffer[p + 1] = (byte)(value >> 8); - buffer[p + 2] = (byte)(value >> 16); - buffer[p + 3] = (byte)(value >> 24); - this.Position = p + 4; - } - - /// Writes a 64-bit unsigned integer, using little-endian encoding - /// Advances the cursor by 8 bytes - public void WriteFixed64(ulong value) - { - EnsureBytes(8); - var buffer = this.Buffer; - int p = this.Position; - buffer[p] = (byte)value; - buffer[p + 1] = (byte)(value >> 8); - buffer[p + 2] = (byte)(value >> 16); - buffer[p + 3] = (byte)(value >> 24); - buffer[p + 4] = (byte)(value >> 32); - buffer[p + 5] = (byte)(value >> 40); - buffer[p + 6] = (byte)(value >> 48); - buffer[p + 7] = (byte)(value >> 56); - this.Position = p + 8; - } - - #endregion - - #region Fixed, Big-Endian - - /// Writes a 16-bit unsigned integer, using big-endian encoding - /// Advances the cursor by 2 bytes - public void WriteFixed16BE(uint value) - { - EnsureBytes(2); - var buffer = this.Buffer; - int p = this.Position; - buffer[p] = (byte)(value >> 8); - buffer[p + 1] = (byte)value; - this.Position = p + 2; - } - - /// Writes a 32-bit unsigned integer, using big-endian encoding - /// Advances the cursor by 4 bytes - public void WriteFixed32BE(uint value) - { - EnsureBytes(4); - var buffer = this.Buffer; - int p = this.Position; - buffer[p] = (byte)(value >> 24); - buffer[p + 1] = (byte)(value >> 16); - buffer[p + 2] = (byte)(value >> 8); - buffer[p + 3] = (byte)(value); - this.Position = p + 4; - } - - /// Writes a 64-bit unsigned integer, using big-endian encoding - /// Advances the cursor by 8 bytes - public void WriteFixed64BE(ulong value) - { - EnsureBytes(8); - var buffer = this.Buffer; - int p = this.Position; - buffer[p] = (byte)(value >> 56); - buffer[p + 1] = (byte)(value >> 48); - buffer[p + 2] = (byte)(value >> 40); - buffer[p + 3] = (byte)(value >> 32); - buffer[p + 4] = (byte)(value >> 24); - buffer[p + 5] = (byte)(value >> 16); - buffer[p + 6] = (byte)(value >> 8); - buffer[p + 7] = (byte)(value); - this.Position = p + 8; - } - - #endregion - - #region Variable size - - /// Writes a 7-bit encoded unsigned int (aka 'Varint16') at the end, and advances the cursor - public void WriteVarint16(ushort value) - { - const uint MASK = 128; - - if (value < (1 << 7)) - { - WriteByte((byte)value); - } - else if (value < (1 << 14)) - { - WriteByte2( - (byte)(value | MASK), - (byte)(value >> 7) - ); - } - else - { - WriteByte3( - (byte)(value | MASK), - (byte)((value >> 7) | MASK), - (byte)(value >> 14) - ); - } - } - - /// Writes a 7-bit encoded unsigned int (aka 'Varint32') at the end, and advances the cursor - public void WriteVarint32(uint value) - { - const uint MASK = 128; - - if (value < (1 << 7)) - { - WriteByte((byte)value); - } - else if (value < (1 << 14)) - { - WriteByte2( - (byte)(value | MASK), - (byte)(value >> 7) - ); - } - else if (value < (1 << 21)) - { - WriteByte3( - (byte)(value | MASK), - (byte)((value >> 7) | MASK), - (byte)(value >> 14) - ); - } - else if (value < (1 << 28)) - { - WriteByte4( - (byte)(value | MASK), - (byte)((value >> 7) | MASK), - (byte)((value >> 14) | MASK), - (byte)(value >> 21) - ); - } - else - { - WriteByte5( - (byte)(value | MASK), - (byte)((value >> 7) | MASK), - (byte)((value >> 14) | MASK), - (byte)((value >> 21) | MASK), - (byte)(value >> 28) - ); - } - } - - /// Writes a 7-bit encoded unsigned long (aka 'Varint64') at the end, and advances the cursor - public void WriteVarint64(ulong value) - { - const uint MASK = 128; - - // max size is 5 - EnsureBytes(value < (1 << 7) ? 1 : value < (1 << 14) ? 2 : value < (1 << 21) ? 3 : 10); - - var buffer = this.Buffer; - int p = this.Position; - while (value >= MASK) - { - buffer[p++] = (byte)((value & (MASK - 1)) | MASK); - value >>= 7; - } - - buffer[p++] = (byte)value; - this.Position = p; - } - - /// Writes a length-prefixed byte array, and advances the cursor - public void WriteVarbytes(Slice value) - { - //REVIEW: what should we do for Slice.Nil ? - - SliceHelpers.EnsureSliceIsValid(ref value); - int n = value.Count; - if (n < 128) - { - EnsureBytes(n + 1); - var buffer = this.Buffer; - int p = this.Position; - // write the count (single byte) - buffer[p] = (byte)n; - // write the bytes - if (n > 0) - { - SliceHelpers.CopyBytesUnsafe(buffer, p + 1, value.Array, value.Offset, n); - } - this.Position = p + n + 1; - } - else - { - // write the count - WriteVarint32((uint)value.Count); - // write the bytes - SliceHelpers.CopyBytesUnsafe(this.Buffer, this.Position, value.Array, value.Offset, n); - this.Position += n; - } - } - - #endregion - - /// Ensures that we can fit a specific amount of data at the end of the buffer - /// Number of bytes that will be written - /// If the buffer is too small, it will be resized -#if !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - public void EnsureBytes(int count) - { - Contract.Requires(count >= 0); - if (Buffer == null || Position + count > Buffer.Length) - { - GrowBuffer(ref Buffer, Position + count); - } - Contract.Ensures(this.Buffer != null && this.Buffer.Length >= this.Position + count); - } - - /// Ensures that we can fit data at a specifc offset in the buffer - /// Offset into the buffer (from the start) - /// Number of bytes that will be written at this offset - /// If the buffer is too small, it will be resized -#if !NET_4_0 - [MethodImpl(MethodImplOptions.AggressiveInlining)] -#endif - public void EnsureOffsetAndSize(int offset, int count) - { - Contract.Requires(offset >= 0); - Contract.Requires(count >= 0); - - if (this.Buffer == null || offset + count > this.Buffer.Length) - { - GrowBuffer(ref this.Buffer, offset + count); - } - } - - /// Resize a buffer by doubling its capacity - /// Reference to the variable holding the buffer to create/resize. If null, a new buffer will be allocated. If not, the content of the buffer will be copied into the new buffer. - /// Mininum guaranteed buffer size after resizing. - /// The buffer will be resized to the maximum betweeb the previous size multiplied by 2, and . The capacity will always be rounded to a multiple of 16 to reduce memory fragmentation - public static void GrowBuffer(ref byte[] buffer, int minimumCapacity = 0) - { - Contract.Requires(minimumCapacity >= 0); - - // double the size of the buffer, or use the minimum required - long newSize = Math.Max(buffer == null ? 0 : (((long)buffer.Length) << 1), minimumCapacity); - - // .NET (as of 4.5) cannot allocate an array with more than 2^31 - 1 items... - if (newSize > 0x7fffffffL) FailCannotGrowBuffer(); - - // round up to 16 bytes, to reduce fragmentation - int size = SliceHelpers.Align((int)newSize); - - Array.Resize(ref buffer, size); - } - - [ContractAnnotation("=> halt")] - private static void FailCannotGrowBuffer() - { -#if DEBUG - // If you breakpoint here, that means that you probably have an uncheked maximum buffer size, or a runaway while(..) { append(..) } code in your layer code ! - // => you should ALWAYS ensure a reasonable maximum size of your allocations ! - if (Debugger.IsAttached) Debugger.Break(); -#endif - // note: some methods in the BCL do throw an OutOfMemoryException when attempting to allocated more than 2^31 - throw new OutOfMemoryException("Buffer cannot be resized, because it would exceed the maximum allowed size"); - } - - [UsedImplicitly(ImplicitUseTargetFlags.WithMembers)] - private sealed class DebugView - { - private readonly SliceWriter m_writer; - - public DebugView(SliceWriter writer) - { - m_writer = writer; - } - - public byte[] Data - { - get - { - if (m_writer.Buffer.Length == m_writer.Position) return m_writer.Buffer; - var tmp = new byte[m_writer.Position]; - System.Array.Copy(m_writer.Buffer, tmp, tmp.Length); - return tmp; - } - } - - public int Position - { - get { return m_writer.Position; } - } - - } - - } - -} diff --git a/FoundationDB.Client/Utils/TinyJsonParser.cs b/FoundationDB.Client/Utils/TinyJsonParser.cs index 727d42c80..f90b87304 100644 --- a/FoundationDB.Client/Utils/TinyJsonParser.cs +++ b/FoundationDB.Client/Utils/TinyJsonParser.cs @@ -1,12 +1,12 @@ -using JetBrains.Annotations; -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Globalization; -using System.Text; - + namespace FoundationDB.Client.Utils { + using System; + using System.Collections.Generic; + using System.Globalization; + using System.Text; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; /// Tiny JSON parser internal sealed class TinyJsonParser @@ -240,6 +240,7 @@ private string ReadStringLiteral() case 't': sb.Append('\t'); break; case 'f': sb.Append('\f'); break; case 'b': sb.Append('\b'); break; + case '"': sb.Append('"'); break; case 'u': { if (cursor + 4 >= end) throw SyntaxError("Truncated unicode escape sequence in string literal"); @@ -343,7 +344,7 @@ internal static Dictionary ParseObject([NotNull] char[] chars, i if (token == Token.Eof) return null; // ensure we got an object - if (token != Token.MapBegin) throw new InvalidOperationException(String.Format("JSON object expected, but got a {0}", token)); + if (token != Token.MapBegin) throw new InvalidOperationException($"JSON object expected, but got a {token}"); var map = (Dictionary)parser.m_current; // ensure that there is nothing after the object @@ -370,8 +371,7 @@ public static List ParseArray(Slice data) [NotNull] internal static Dictionary GetMapField(Dictionary map, string field) { - object item; - return map != null && map.TryGetValue(field, out item) ? (Dictionary)item : s_missingMap; + return map != null && map.TryGetValue(field, out object item) ? (Dictionary)item : s_missingMap; } [NotNull] @@ -399,10 +399,10 @@ internal static string GetStringField(Dictionary map, string fie return map != null && map.TryGetValue(field, out item) ? (bool)item : default(bool?); } - internal static KeyValuePair GetStringPair(Dictionary map, string key, string value) + internal static (string Key, string Value) GetStringPair(Dictionary map, string key, string value) { object item; - return new KeyValuePair( + return ( map != null && map.TryGetValue(key, out item) ? (string)item : null, map != null && map.TryGetValue(value, out item) ? (string)item : null ); diff --git a/FoundationDB.Client/Utils/Uuid64.cs b/FoundationDB.Client/Utils/Uuid64.cs deleted file mode 100644 index 387eaa169..000000000 --- a/FoundationDB.Client/Utils/Uuid64.cs +++ /dev/null @@ -1,715 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Client -{ - using FoundationDB.Client.Utils; - using JetBrains.Annotations; - using System; - using System.ComponentModel; - using System.Diagnostics; - using System.Globalization; - - [DebuggerDisplay("[{ToString()}]")] - [ImmutableObject(true), Serializable] - public struct Uuid64 : IFormattable, IEquatable, IComparable - { - public static readonly Uuid64 Empty = default(Uuid64); - - private readonly ulong m_value; - - public Uuid64(ulong value) - { - m_value = value; - } - - public Uuid64(long value) - { - m_value = (ulong)value; - } - - public Uuid64(byte[] value) - { - if (value == null) throw new ArgumentNullException("value"); - if (value.Length != 8) throw new ArgumentException("Value must be 8 bytes long", "value"); - - m_value = Read(value, 0); - } - - public Uuid64(Slice value) - { - if (value == null) throw new ArgumentNullException("value"); - if (value.Count != 8) throw new ArgumentException("Value must be 8 bytes long", "value"); - - m_value = Read(value.Array, value.Offset); - } - - public Uuid64(string value) - { - if (!TryParse(value, out m_value)) - { - throw new FormatException("Invalid Uuid64 format"); - } - } - - /// Generate a new random 64-bit UUID, using a global source of randomness. - /// Instance of a new Uuid64 that is random. - /// - ///

If you need sequential uuids, you should use a different generator (ex: FlakeID, ...)

- ///

This method uses a cryptographic RNG under a lock to generate 8 bytes of randomness, which can be slow. If you must generate a large number of unique ids, you should use a different source.

- ///
- public static Uuid64 NewUuid() - { - //Note: we chould use Guid.NewGuid() as a source of randomness, but even though a guid is "guaranteed" to be unique, a substring of a guid is not.. or is it? - return Uuid64RandomGenerator.Default.NewUuid(); - } - - #region Parsing... - - public static Uuid64 Parse([NotNull] string s) - { - if (s == null) throw new ArgumentNullException("s"); - ulong value; - if (!TryParse(s, out value)) - { - throw new FormatException("Invalid Uuid64 format"); - } - return new Uuid64(value); - } - - public static bool TryParse([NotNull] string s, out Uuid64 result) - { - if (s == null) throw new ArgumentNullException("s"); - ulong value; - if (!TryParse(s, out value)) - { - result = default(Uuid64); - return false; - } - result = new Uuid64(value); - return true; - } - - private static bool TryParse(string s, out ulong result) - { - Contract.Requires(s != null); - - // we support the following formats: "{hex8-hex8}", "{hex16}", "hex8-hex8", "hex16" and "base62" - // we don't support base10 format, because there is no way to differentiate from hex or base62 - - result = 0; - switch (s.Length) - { - case 19: - { // {xxxxxxxx-xxxxxxxx} - if (s[0] != '{' || s[18] != '}') - { - return false; - } - return TryDecode16(s.ToCharArray(), 1, true, out result); - } - case 18: - { // {xxxxxxxxxxxxxxxx} - if (s[0] != '{' || s[17] != '}') - { - return false; - } - return TryDecode16(s.ToCharArray(), 1, false, out result); - } - case 17: - { // xxxxxxxx-xxxxxxxx - if (s[8] != '-') return false; - return TryDecode16(s.ToCharArray(), 0, true, out result); - } - case 16: - { // xxxxxxxxxxxxxxxx - return TryDecode16(s.ToCharArray(), 0, false, out result); - } - } - - // only base62 is allowed - if (s.Length <= 11) - { - return TryDecode62(s.ToCharArray(), out result); - } - - return false; - } - - #endregion - - #region Casting... - - public static implicit operator Uuid64(ulong value) - { - return new Uuid64(value); - } - - public static explicit operator ulong(Uuid64 value) - { - return value.m_value; - } - - public static implicit operator Uuid64(long value) - { - return new Uuid64(value); - } - - public static explicit operator long(Uuid64 value) - { - return (long)value.m_value; - } - - #endregion - - #region IFormattable... - - public long ToInt64() - { - return (long)m_value; - } - - public ulong ToUInt64() - { - return m_value; - } - - public Slice ToSlice() - { - return Slice.FromFixedU64BE(m_value); - } - - public byte[] ToByteArray() - { - var bytes = Slice.FromFixedU64BE(m_value).Array; - Contract.Assert(bytes != null && bytes.Length == 8); // HACKHACK: for perf reasons, we rely on the fact that Slice.FromFixedU64BE() allocates a new 8-byte array that we can return without copying - return bytes; - } - - /// Returns a string representation of the value of this instance. - /// String using the format "xxxxxxxx-xxxxxxxx", where 'x' is a lower-case hexadecimal digit - /// Strings returned by this method will always to 17 characters long. - public override string ToString() - { - return ToString(null, null); - } - - /// Returns a string representation of the value of this instance, according to the provided format specifier. - /// A single format specifier that indicates how to format the value of this Guid. The format parameter can be "D", "B", "X", "G", "Z" or "N". If format is null or an empty string (""), "D" is used. - /// The value of this , using the specified format. - /// See for a description of the different formats - public string ToString(string format) - { - return ToString(format, null); - } - - /// Returns a string representation of the value of this instance. - /// This argument is ignored - /// String using the format "xxxxxxxx-xxxxxxxx", where 'x' is a lower-case hexadecimal digit - /// Strings returned by this method will always to 17 characters long. - public string ToString(IFormatProvider formatProvider) - { - return ToString("D", null); - } - - /// Returns a string representation of the value of this instance of the class, according to the provided format specifier and culture-specific format information. - /// A single format specifier that indicates how to format the value of this Guid. The format parameter can be "D", "N", "Z", "R", "X" or "B". If format is null or an empty string (""), "D" is used. - /// An object that supplies culture-specific formatting information. Only used for the "R" format. - /// The value of this , using the specified format. - /// - ///

The D format encodes the value as two groups of 8 hexadecimal digits, separated by an hyphen: "01234567-89abcdef" (17 characters).

- ///

The X format encodes the value as a single group of 16 hexadecimal digits: "0123456789abcdef" (16 characters).

- ///

The B format is equivalent to the D format, but surrounded with '{' and '}': "{01234567-89abcdef}" (19 characters).

- ///

The R format encodes the value as a decimal number "1234567890" (1 to 20 characters) which can be parsed as an UInt64 without loss.

- ///

The C format uses a compact base-62 encoding that preserves lexicographical ordering, composed of digits, uppercase alpha and lowercase alpha, suitable for compact representation that can fit in a querystring.

- ///

The Z format is equivalent to the C format, but with extra padding so that the string is always 11 characters long.

- ///
- public string ToString(string format, IFormatProvider formatProvider) - { - if (string.IsNullOrEmpty(format)) format = "D"; - - switch(format) - { - case "D": - case "d": - { // Default format is "xxxxxxxx-xxxxxxxx" - return Encode16(m_value, separator: true, quotes: false); - } - - case "C": - case "c": - { // base 62, compact, no padding - return Encode62(m_value, padded: false); - } - case "Z": - case "z": - { // base 62, padded with '0' up to 11 chars - return Encode62(m_value, padded: true); - } - - case "R": - case "r": - { // Integer: "1234567890" - return m_value.ToString(null, formatProvider ?? CultureInfo.InvariantCulture); - } - - case "X": - case "x": - { // "xxxxxxxxxxxxxxxx" - return Encode16(m_value, separator: false, quotes: false); - } - - case "B": - case "b": - { // "{xxxxxxxx-xxxxxxxx}" - return Encode16(m_value, separator: true, quotes: true); - } - } - throw new FormatException("Invalid Uuid64 format specification."); - } - - #endregion - - #region IEquatable / IComparable... - - public override bool Equals(object obj) - { - if (obj is Uuid64) return Equals((Uuid64)obj); - if (obj is ulong) return m_value == (ulong)obj; - if (obj is long) return m_value == (ulong)(long)obj; - //TODO: string format ? Slice ? - return false; - } - - public override int GetHashCode() - { - return ((int)m_value) ^ (int)(m_value >> 32); - } - - public bool Equals(Uuid64 other) - { - return m_value == other.m_value; - } - - public int CompareTo(Uuid64 other) - { - return m_value.CompareTo(other.m_value); - } - - #endregion - - #region Base16 encoding... - - private static char HexToChar(int a) - { - a &= 0xF; - return a > 9 ? (char)(a - 10 + 'a') : (char)(a + '0'); - } - - private static unsafe char* HexsToChars(char* ptr, int a) - { - Contract.Requires(ptr != null); - ptr[0] = HexToChar(a >> 28); - ptr[1] = HexToChar(a >> 24); - ptr[2] = HexToChar(a >> 20); - ptr[3] = HexToChar(a >> 16); - ptr[4] = HexToChar(a >> 12); - ptr[5] = HexToChar(a >> 8); - ptr[6] = HexToChar(a >> 4); - ptr[7] = HexToChar(a); - return ptr + 8; - } - - private unsafe static string Encode16(ulong value, bool separator, bool quotes) - { - int size = 16 + (separator ? 1 : 0) + (quotes ? 2 : 0); - char* buffer = stackalloc char[24]; // max 19 mais on arrondi a 24 - - char* ptr = buffer; - if (quotes) *ptr++ = '{'; - ptr = HexsToChars(ptr, (int)(value >> 32)); - if (separator) *ptr++ = '-'; - ptr = HexsToChars(ptr, (int)(value & 0xFFFFFFFF)); - if (quotes) *ptr++ = '}'; - - Contract.Assert(ptr == buffer + size); - return new string(buffer, 0, size); - } - - private static int CharToHex(char c) - { - if (c <= '9') - { - return c >= '0' ? (c - 48) : -1; - } - if (c <= 'F') - { - return c >= 'A' ? (c - 55) : -1; - } - if (c <= 'f') - { - return c >= 'a' ? (c - 87) : -1; - } - return -1; - } - - private static bool TryCharsToHexs(char[] chars, int offset, out uint result) - { - int word = 0; - for (int i = 0; i < 8; i++) - { - int a = CharToHex(chars[offset++]); - if (a == -1) - { - result = 0; - return false; - } - word = (word << 4) | a; - } - result = (uint)word; - return true; - } - - private static bool TryDecode16(char[] chars, int offset, bool separator, out ulong result) - { - uint a, b; - - if ((!separator || chars[offset + 8] == '-') - && TryCharsToHexs(chars, offset, out a) - && TryCharsToHexs(chars, offset + (separator ? 9 : 8), out b)) - { - result = ((ulong)a << 32) | (ulong)b; - return true; - } - - result = 0; - return false; - } - - #endregion - - #region Base62 encoding... - - //NOTE: this version of base62 encoding puts the digits BEFORE the letters, to ensure that the string representation of a UUID64 is in the same order as its byte[] or ulong version. - // => This scheme use the "0-9A-Za-z" ordering, while most other base62 encoder use "a-zA-Z0-9" - - private static readonly char[] Base62LexicographicChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz".ToCharArray(); - private static readonly int[] Base62Values = new int[3 * 32] - { - /* 32.. 63 */ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, -1, -1, -1, -1, - /* 64.. 95 */ -1, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, -1, -1, -1, -1, -1, - /* 96..127 */ -1, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -1, -1, -1, -1, -1, - }; - - /// Encode a 64-bit value into a base-62 string - /// 64-bit value to encode - /// If true, keep the leading '0' to return a string of length 11. If false, discards all extra leading '0' digits. - /// String that contains only digits, lower and upper case letters. The string will be lexicographically ordered, which means that sorting by string will give the same order as sorting by value. - /// - /// Encode62(0, false) => "0" - /// Encode62(0, true) => "00000000000" - /// Encode62(0xDEADBEEF) => "" - /// - private static string Encode62(ulong value, bool padded) - { - // special case for default(Uuid64) which may be more frequent than others - if (value == 0) return padded ? "00000000000" : "0"; - - // encoding a 64 bits value in Base62 yields 10.75 "digits", which is rounded up to 11 chars. - const int MAX_SIZE = 11; - - unsafe - { - // The maximum size is 11 chars, but we will allocate 64 bytes on the stack to keep alignment. - char* chars = stackalloc char[16]; - char[] bc = Base62LexicographicChars; - - // start from the last "digit" - char* pc = chars + (MAX_SIZE - 1); - - while (pc >= chars) - { - ulong r = value % 62L; - value /= 62L; - *pc-- = bc[(int)r]; - if (!padded && value == 0) - { // the rest will be all zeroes - break; - } - } - - ++pc; - int count = MAX_SIZE - (int)(pc - chars); - Contract.Assert(count > 0 && count <= 11); - return count <= 0 ? String.Empty : new string(pc, 0, count); - } - } - - private static bool TryDecode62(char[] s, out ulong value) - { - if (s == null || s.Length == 0 || s.Length > 11) - { // fail: too small/too big - value = 0; - return false; - } - - // we know that the original value is exactly 64bits, and any missing digit is '0' - ulong factor = 1UL; - ulong acc = 0UL; - int p = s.Length - 1; - int[] bv = Base62Values; - while (p >= 0) - { - // read digit - int a = s[p]; - // decode base62 digit - a = a >= 32 && a < 128 ? bv[a - 32] : -1; - if (a == -1) - { // fail: invalid character - value = 0; - return false; - } - // accumulate, while checking for overflow - acc = checked(acc + ((ulong)a * factor)); - if (p-- > 0) factor *= 62; - } - value = acc; - return true; - } - - #endregion - - #region Fast I/O... - - internal static ulong Read(byte[] buffer, int offset) - { - Contract.Requires(buffer != null && offset >= 0 && offset + 7 < buffer.Length); - // buffer contains the bytes in Big Endian - ulong res = buffer[offset + 7]; - res |= ((ulong)buffer[offset + 6]) << 8; - res |= ((ulong)buffer[offset + 5]) << 16; - res |= ((ulong)buffer[offset + 4]) << 24; - res |= ((ulong)buffer[offset + 3]) << 32; - res |= ((ulong)buffer[offset + 2]) << 40; - res |= ((ulong)buffer[offset + 1]) << 48; - res |= ((ulong)buffer[offset + 0]) << 56; - return res; - } - - internal unsafe static ulong Read(byte* src) - { - ulong tmp; - - if (BitConverter.IsLittleEndian) - { // Intel ? - byte* ptr = (byte*)&tmp; - // big endian - ptr[0] = src[7]; - ptr[1] = src[6]; - ptr[2] = src[5]; - ptr[3] = src[4]; - ptr[4] = src[3]; - ptr[5] = src[2]; - ptr[6] = src[1]; - ptr[7] = src[0]; - } - else - { // ARM ? - tmp = *((ulong*)src); - } - - return tmp; - } - - internal unsafe static void Write(ulong value, byte* ptr) - { - if (BitConverter.IsLittleEndian) - { // Intel ? - byte* src = (byte*)&value; - ptr[0] = src[7]; - ptr[1] = src[6]; - ptr[2] = src[5]; - ptr[3] = src[4]; - ptr[4] = src[3]; - ptr[5] = src[2]; - ptr[6] = src[1]; - ptr[7] = src[0]; - } - else - { // ARM ? - *((ulong*)ptr) = value; - } - - } - - internal unsafe void WriteTo(byte* ptr) - { - Write(m_value, ptr); - } - - #endregion - - #region Operators... - - public static bool operator ==(Uuid64 left, Uuid64 right) - { - return left.m_value == right.m_value; - } - - public static bool operator !=(Uuid64 left, Uuid64 right) - { - return left.m_value != right.m_value; - } - - public static bool operator >(Uuid64 left, Uuid64 right) - { - return left.m_value > right.m_value; - } - - public static bool operator >=(Uuid64 left, Uuid64 right) - { - return left.m_value >= right.m_value; - } - - public static bool operator <(Uuid64 left, Uuid64 right) - { - return left.m_value < right.m_value; - } - - public static bool operator <=(Uuid64 left, Uuid64 right) - { - return left.m_value <= right.m_value; - } - - // Comparing an Uuid64 to a 64-bit integer can have sense for "if (id == 0)" or "if (id != 0)" ? - - public static bool operator ==(Uuid64 left, long right) - { - return left.m_value == (ulong)right; - } - - public static bool operator ==(Uuid64 left, ulong right) - { - return left.m_value == right; - } - - public static bool operator !=(Uuid64 left, long right) - { - return left.m_value != (ulong)right; - } - - public static bool operator !=(Uuid64 left, ulong right) - { - return left.m_value != right; - } - - /// Add a value from this instance - public static Uuid64 operator +(Uuid64 left, long right) - { - //TODO: how to handle overflow ? negative values ? - ulong v = (ulong)right; - return new Uuid64(checked(left.m_value + v)); - } - - /// Add a value from this instance - public static Uuid64 operator +(Uuid64 left, ulong right) - { - return new Uuid64(checked(left.m_value + right)); - } - - /// Subtract a value from this instance - public static Uuid64 operator -(Uuid64 left, long right) - { - //TODO: how to handle overflow ? negative values ? - ulong v = (ulong)right; - return new Uuid64(checked(left.m_value - v)); - } - - /// Subtract a value from this instance - public static Uuid64 operator -(Uuid64 left, ulong right) - { - return new Uuid64(checked(left.m_value - right)); - } - - /// Increments the value of this instance - public static Uuid64 operator ++(Uuid64 value) - { - return new Uuid64(checked(value.m_value + 1)); - } - - /// Decrements the value of this instance - public static Uuid64 operator --(Uuid64 value) - { - return new Uuid64(checked(value.m_value - 1)); - } - - #endregion - - } - - /// Helper class for generating 64-bit UUIDs from a secure random number generator - public sealed class Uuid64RandomGenerator - { - - /// Default instance of a random generator - /// Using this instance will introduce a global lock in your application. You can create specific instances for worker threads, if you require concurrency. - public static readonly Uuid64RandomGenerator Default = new Uuid64RandomGenerator(); - - private readonly System.Security.Cryptography.RandomNumberGenerator m_rng; - private readonly byte[] m_scratch = new byte[8]; - - /// Create a new instance of a random UUID generator - public Uuid64RandomGenerator() - : this(null) - { } - - /// Create a new instance of a random UUID generator, using a specific random number generator - public Uuid64RandomGenerator(System.Security.Cryptography.RandomNumberGenerator generator) - { - m_rng = generator ?? System.Security.Cryptography.RandomNumberGenerator.Create(); - } - - /// Return a new random 64-bit UUID - /// Uuid64 that contains 64 bits worth of randomness. - /// - ///

This methods needs to acquire a lock. If multiple threads needs to generate ids concurrently, you may need to create an instance of this class for each threads.

- ///

The uniqueness of the generated uuids depends on the quality of the random number generator. If you cannot tolerate collisions, you either have to check if a newly generated uid already exists, or use a different kind of generator.

- ///
- public Uuid64 NewUuid() - { - lock (m_rng) - { - // get 8 bytes of randomness (0 allowed) - m_rng.GetBytes(m_scratch); - return new Uuid64(m_scratch); - } - } - - } - -} diff --git a/FoundationDB.Client/VersionStamp.cs b/FoundationDB.Client/VersionStamp.cs new file mode 100644 index 000000000..beec01694 --- /dev/null +++ b/FoundationDB.Client/VersionStamp.cs @@ -0,0 +1,406 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace FoundationDB.Client +{ + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.Runtime.CompilerServices; + using Doxense; + using Doxense.Diagnostics.Contracts; + using Doxense.Memory; + using JetBrains.Annotations; + + /// VersionStamp + /// A versionstamp is unique, monotonically (but not sequentially) increasing value for each committed transaction. + /// Its size can either be 10 bytes (80-bits) or 12-bytes (96-bits). + /// The first 8 bytes are the committed version of the database. The next 2 bytes are monotonic in the serialization order for transactions. + /// The optional last 2 bytes can contain a user-provider version number used to allow multiple stamps inside the same transaction. + /// + [DebuggerDisplay("{ToString(),nq}")] + public readonly struct VersionStamp : IEquatable, IComparable + { + //REVIEW: they are called "Versionstamp" in the doc, but "VersionStamp" seems more .NETy (like 'TimeSpan'). + // => Should we keep the uppercase 'S' or not ? + + private const ulong PLACEHOLDER_VERSION = ulong.MaxValue; + private const ushort PLACEHOLDER_ORDER = ushort.MaxValue; + private const ushort NO_USER_VERSION = 0; + private const ulong HSB_VERSION = 0x8000000000000000UL; + + private const ushort FLAGS_NONE = 0x0; + private const ushort FLAGS_HAS_VERSION = 0x1; // unset: 80-bits, set: 96-bits + private const ushort FLAGS_IS_INCOMPLETE = 0x2; // unset: complete, set: incomplete + + /// Serialized bytes of the default incomplete stamp (composed of only 0xFF) + internal static readonly Slice IncompleteToken = Slice.Repeat(0xFF, 10); + + /// Commit version of the transaction + /// This value is determined by the database at commit time. + + public readonly ulong TransactionVersion; // Bytes 0..7 + + /// Transaction Batch Order + /// This value is determined by the database at commit time. + public readonly ushort TransactionOrder; // Bytes 8..9 + + /// User-provided version (between 0 and 65535) + /// For 80-bits VersionStamps, this value will be 0 and will not be part of the serialized key. You can use to distinguish between both types of stamps. + public readonly ushort UserVersion; // Bytes 10..11 (if 'FLAGS_HAS_VERSION' is set) + + /// Internal flags (FLAGS_xxx constants) + private readonly ushort Flags; + //note: this flag is only present in memory, and is not serialized + + private VersionStamp(ulong version, ushort order, ushort user, ushort flags) + { + this.TransactionVersion = version; + this.TransactionOrder = order; + this.UserVersion = user; + this.Flags = flags; + } + + /// Creates an incomplete 80-bit with no user version. + /// Placeholder that will be serialized as FF FF FF FF FF FF FF FF FF FF (10 bytes). + /// + /// This stamp contains a temporary marker that will be later filled by the database with the actual VersioStamp by the database at transaction commit time. + /// If you need to create multiple distinct stamps within the same transaction, please use instead. + /// + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static VersionStamp Incomplete() + { + return new VersionStamp(PLACEHOLDER_VERSION, PLACEHOLDER_ORDER, NO_USER_VERSION, FLAGS_IS_INCOMPLETE); + } + + /// Creates an incomplete 96-bit with the given user version. + /// Value between 0 and 65535 that will be appended at the end of the Versionstamp, making it unique within the transaction. + /// Placeholder that will be serialized as FF FF FF FF FF FF FF FF FF FF vv vv (12 bytes) where 'vv vv' is the user version encoded in little-endian. + /// If is less than 0, or greater than 65534 (0xFFFE). + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static VersionStamp Incomplete(int userVersion) + { + Contract.Between(userVersion, 0, 0xFFFF, nameof(userVersion), "Local version must fit in 16-bits."); + return new VersionStamp(PLACEHOLDER_VERSION, PLACEHOLDER_ORDER, (ushort) userVersion, FLAGS_IS_INCOMPLETE | FLAGS_HAS_VERSION); + } + + /// Creates an incomplete 96-bit with the given user version. + /// Value between 0 and 65535 that will be appended at the end of the Versionstamp, making it unique within the transaction. + /// Placeholder that will be serialized as FF FF FF FF FF FF FF FF FF FF vv vv (12 bytes) where 'vv vv' is the user version encoded in little-endian. + /// If is less than 0, or greater than 65534 (0xFFFE). + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static VersionStamp Incomplete(ushort userVersion) + { + return new VersionStamp(PLACEHOLDER_VERSION, PLACEHOLDER_ORDER, userVersion, FLAGS_IS_INCOMPLETE | FLAGS_HAS_VERSION); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static VersionStamp Custom(ulong version, ushort order, bool incomplete) + { + return new VersionStamp(version, order, NO_USER_VERSION, incomplete ? FLAGS_IS_INCOMPLETE : FLAGS_NONE); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static VersionStamp Custom(ulong version, ushort order, int userVersion, bool incomplete) + { + Contract.Between(userVersion, 0, 0xFFFF, nameof(userVersion), "Local version must fit in 16-bits."); + return new VersionStamp(version, order, (ushort) userVersion, incomplete ? (ushort) (FLAGS_IS_INCOMPLETE | FLAGS_HAS_VERSION) : FLAGS_HAS_VERSION); + } + + /// Creates a 80-bit , obtained from the database. + /// Complete stamp, without user version. + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static VersionStamp Complete(ulong version, ushort order) + { + return new VersionStamp(version, order, NO_USER_VERSION, FLAGS_NONE); + } + + /// Creates a 96-bit , obtained from the database. + /// Complete stamp, with a user version. + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static VersionStamp Complete(ulong version, ushort order, int userVersion) + { + Contract.Between(userVersion, 0, 0xFFFF, nameof(userVersion), "Local version must fit in 16-bits, and cannot be 0xFFFF."); + return new VersionStamp(version, order, (ushort) userVersion, FLAGS_HAS_VERSION); + } + + /// Creates a 96-bit , obtained from the database. + /// Complete stamp, with a user version. + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static VersionStamp Complete(ulong version, ushort order, ushort userVersion) + { + return new VersionStamp(version, order, userVersion, FLAGS_HAS_VERSION); + } + + /// Test if the stamp has a user version (96-bits) or not (80-bits) + public bool HasUserVersion + { + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + get => (this.Flags & FLAGS_HAS_VERSION) != 0; + } + + /// Test if the stamp is marked as incomplete (true), or has already been resolved by the database (false) + public bool IsIncomplete + { + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + get => (this.Flags & FLAGS_IS_INCOMPLETE) != 0; + } + + /// Return the length (in bytes) of the versionstamp when serialized in binary format + /// Returns 12 bytes for stamps with a user version, and 10 bytes without. + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public int GetLength() => 10 + 2 * (this.Flags & FLAGS_HAS_VERSION); + + public override string ToString() + { + if (this.HasUserVersion) + { + return this.IsIncomplete + ? $"@?#{this.UserVersion}" + : $"@{this.TransactionVersion}-{this.TransactionOrder}#{this.UserVersion}"; + } + else + { + return this.IsIncomplete + ? "@?" + : $"@{this.TransactionVersion}-{this.TransactionOrder}"; + } + } + + public Slice ToSlice() + { + int len = GetLength(); // 10 or 12 + var tmp = Slice.Create(len); + unsafe + { + fixed (byte* ptr = &tmp.DangerousGetPinnableReference()) + { + WriteUnsafe(ptr, len, in this); + } + } + return tmp; + } + + public void WriteTo(in Slice buffer) + { + int len = GetLength(); // 10 or 12 + if (buffer.Count < len) throw new ArgumentException($"The target buffer must be at least {len} bytes long."); + unsafe + { + fixed (byte* ptr = &buffer.DangerousGetPinnableReference()) + { + WriteUnsafe(ptr, len, in this); + } + } + } + + public void WriteTo(ref SliceWriter writer) + { + var tmp = writer.Allocate(GetLength()); + unsafe + { + fixed (byte* ptr = &tmp.DangerousGetPinnableReference()) + { + WriteUnsafe(ptr, tmp.Count, in this); + } + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static unsafe void WriteUnsafe(byte* ptr, int len, in VersionStamp vs) + { + Contract.Debug.Assert(len == 10 || len == 12); + UnsafeHelpers.StoreUInt64BE(ptr, vs.TransactionVersion); + UnsafeHelpers.StoreUInt16BE(ptr + 8, vs.TransactionOrder); + if (len == 12) + { + UnsafeHelpers.StoreUInt16BE(ptr + 10, vs.UserVersion); + } + } + + /// Parse a VersionStamp from a sequence of 10 bytes + /// If the buffer length is not exactly 12 bytes + [Pure] + public static VersionStamp Parse(Slice data) + { + return TryParse(data, out var vs) ? vs : throw new FormatException("A VersionStamp is either 10 or 12 bytes."); + } + + /// Try parsing a VersionStamp from a sequence of bytes + public static bool TryParse(Slice data, out VersionStamp vs) + { + if (data.Count != 10 && data.Count != 12) + { + vs = default; + return false; + } + unsafe + { + fixed (byte* ptr = &data.DangerousGetPinnableReference()) + { + ReadUnsafe(ptr, data.Count, out vs); + return true; + } + } + } + + internal static unsafe void ReadUnsafe(byte* ptr, int len, out VersionStamp vs) + { + Contract.Debug.Assert(len == 10 || len == 12); + // reads a complete 12 bytes Versionstamp + ulong ver = UnsafeHelpers.LoadUInt64BE(ptr); + ushort order = UnsafeHelpers.LoadUInt16BE(ptr + 8); + ushort idx = len == 10 ? NO_USER_VERSION : UnsafeHelpers.LoadUInt16BE(ptr + 10); + ushort flags = FLAGS_NONE; + if (len == 12) flags |= FLAGS_HAS_VERSION; + if ((ver & HSB_VERSION) != 0) flags |= FLAGS_IS_INCOMPLETE; + vs = new VersionStamp(ver, order, idx, flags); + } + + #region Equality, Comparision, ... + + public override bool Equals(object obj) + { + return obj is VersionStamp vs && Equals(vs); + } + + public override int GetHashCode() + { + return HashCodes.Combine(this.TransactionVersion.GetHashCode(), this.TransactionOrder, this.UserVersion, this.Flags); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public bool Equals(VersionStamp other) + { + //PERF: could we use Unsafe and compare the next sizeof(VersionStamp) bytes at once? + return (this.TransactionVersion == other.TransactionVersion) + & (this.TransactionOrder == other.TransactionOrder) + & (this.UserVersion == other.UserVersion) + & (this.Flags == other.Flags); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static bool operator ==(VersionStamp left, VersionStamp right) + { + return left.Equals(right); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static bool operator !=(VersionStamp left, VersionStamp right) + { + return !left.Equals(right); + } + + [Pure] + public int CompareTo(VersionStamp other) + { + //ordering rules: + // - incomplete stamps are stored AFTER resolved stamps (since if they commit they would have a value higher than any other stamp already in the database) + // - ordered by transaction number then transaction batch order + // - stamps with no user version are sorted before stamps with user version if they have the same first 10 bytes, so (XXXX) is before (XXXX, 0) + + if (this.IsIncomplete) + { // we ignore the transaction version/order! + if (!other.IsIncomplete) return +1; // we are after + } + else + { + if (other.IsIncomplete) return -1; // we are before + int cmp = this.TransactionVersion.CompareTo(other.TransactionVersion); + if (cmp != 0) return cmp; + } + + // both have same version+order, or both are incomplete + // => we need to decide on the (optional) user version + return this.HasUserVersion + ? (other.HasUserVersion ? this.UserVersion.CompareTo(other.UserVersion) : +1) + : (other.HasUserVersion ? -1 : 0); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static bool operator <(VersionStamp left, VersionStamp right) + { + return left.CompareTo(right) < 0; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static bool operator <=(VersionStamp left, VersionStamp right) + { + return left.CompareTo(right) <= 0; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static bool operator >(VersionStamp left, VersionStamp right) + { + return left.CompareTo(right) > 0; + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public static bool operator >=(VersionStamp left, VersionStamp right) + { + return left.CompareTo(right) >= 0; + } + + //REVIEW: does these make sense or not? + // VersionStamp - VersionStamp == ??? + // VersionStamp + 123 == ??? + // VersionStamp * 2 == ??? + + public sealed class Comparer : IEqualityComparer, IComparer + { + /// Default comparer for s + public static Comparer Default { get; } = new Comparer(); + + private Comparer() + { } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public bool Equals(VersionStamp x, VersionStamp y) + { + return x.Equals(y); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public int GetHashCode(VersionStamp obj) + { + return obj.GetHashCode(); + } + + [Pure, MethodImpl(MethodImplOptions.AggressiveInlining)] + public int Compare(VersionStamp x, VersionStamp y) + { + return x.CompareTo(y); + } + + } + + #endregion + + } + +} diff --git a/FoundationDB.Layers.Common/Blobs/FdbBlob.cs b/FoundationDB.Layers.Common/Blobs/FdbBlob.cs index 8e96dc6f6..53ad20f24 100644 --- a/FoundationDB.Layers.Common/Blobs/FdbBlob.cs +++ b/FoundationDB.Layers.Common/Blobs/FdbBlob.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,16 +28,16 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Blobs { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Diagnostics; using System.Globalization; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using FoundationDB.Client; + using JetBrains.Annotations; /// Represents a potentially large binary value in FoundationDB. - [DebuggerDisplay("Subspace={Subspace}")] + [DebuggerDisplay("Subspace={" + nameof(FdbBlob.Subspace) + "}")] public class FdbBlob { private const long CHUNK_LARGE = 10000; // all chunks will be not greater than this size @@ -52,15 +52,16 @@ public class FdbBlob /// Only keys within the subspace will be used by the object. /// Other clients of the database should refrain from modifying the subspace. /// Subspace to be used for storing the blob data and metadata - public FdbBlob([NotNull] IFdbSubspace subspace) + public FdbBlob([NotNull] IKeySubspace subspace) { - if (subspace == null) throw new ArgumentNullException("subspace"); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); - this.Subspace = subspace.Using(TypeSystem.Tuples); + this.Subspace = subspace.AsDynamic(); } /// Subspace used as a prefix for all items in this table - public IFdbDynamicSubspace Subspace {[NotNull] get; private set; } + [NotNull] + public IDynamicKeySubspace Subspace { get; } /// Returns the key for data chunk at the specified offset /// @@ -90,7 +91,7 @@ protected virtual Slice AttributeKey(string name) #region Internal Helpers... - private struct Chunk + private readonly struct Chunk { public readonly Slice Key; public readonly Slice Data; @@ -108,7 +109,7 @@ private async Task GetChunkAtAsync([NotNull] IFdbTransaction trans, long { Contract.Requires(trans != null && offset >= 0); - var chunkKey = await trans.GetKeyAsync(FdbKeySelector.LastLessOrEqual(DataKey(offset))).ConfigureAwait(false); + var chunkKey = await trans.GetKeyAsync(KeySelector.LastLessOrEqual(DataKey(offset))).ConfigureAwait(false); if (chunkKey.IsNull) { // nothing before (sparse) return default(Chunk); @@ -200,7 +201,7 @@ private void SetSize([NotNull] IFdbTransaction trans, long size) /// public void Delete([NotNull] IFdbTransaction trans) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); trans.ClearRange(this.Subspace); } @@ -211,14 +212,14 @@ public void Delete([NotNull] IFdbTransaction trans) /// Return null if the blob does not exists, 0 if is empty, or the size in bytes public async Task GetSizeAsync([NotNull] IFdbReadOnlyTransaction trans) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); Slice value = await trans.GetAsync(SizeKey()).ConfigureAwait(false); if (value.IsNullOrEmpty) return default(long?); //note: python code stores the size as a string - long size = Int64.Parse(value.ToAscii()); + long size = Int64.Parse(value.ToString()); if (size < 0) throw new InvalidOperationException("The internal blob size cannot be negative"); return size; } @@ -228,8 +229,8 @@ public void Delete([NotNull] IFdbTransaction trans) /// public async Task ReadAsync([NotNull] IFdbReadOnlyTransaction trans, long offset, int n) { - if (trans == null) throw new ArgumentNullException("trans"); - if (offset < 0) throw new ArgumentNullException("offset", "Offset cannot be less than zero"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (offset < 0) throw new ArgumentNullException(nameof(offset), "Offset cannot be less than zero"); long? size = await GetSizeAsync(trans).ConfigureAwait(false); if (size == null) return Slice.Nil; // not found @@ -242,8 +243,8 @@ public async Task ReadAsync([NotNull] IFdbReadOnlyTransaction trans, long await trans .GetRange( - FdbKeySelector.LastLessOrEqual(DataKey(offset)), - FdbKeySelector.FirstGreaterOrEqual(DataKey(offset + n)) + KeySelector.LastLessOrEqual(DataKey(offset)), + KeySelector.FirstGreaterOrEqual(DataKey(offset + n)) ) .ForEachAsync((chunk) => { @@ -273,7 +274,7 @@ await trans }) .ConfigureAwait(false); - return new Slice(buffer, 0, buffer.Length); + return buffer.AsSlice(0, buffer.Length); } /// @@ -281,8 +282,8 @@ await trans /// public async Task WriteAsync([NotNull] IFdbTransaction trans, long offset, Slice data) { - if (trans == null) throw new ArgumentNullException("trans"); - if (offset < 0) throw new ArgumentOutOfRangeException("offset", "Offset cannot be less than zero"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (offset < 0) throw new ArgumentOutOfRangeException(nameof(offset), "Offset cannot be less than zero"); if (data.IsNullOrEmpty) return; @@ -307,7 +308,7 @@ public async Task WriteAsync([NotNull] IFdbTransaction trans, long offset, Slice /// public async Task AppendAsync([NotNull] IFdbTransaction trans, Slice data) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); if (data.IsNullOrEmpty) return; @@ -322,8 +323,8 @@ public async Task AppendAsync([NotNull] IFdbTransaction trans, Slice data) /// public async Task TruncateAsync([NotNull] IFdbTransaction trans, long newLength) { - if (trans == null) throw new ArgumentNullException("trans"); - if (newLength < 0) throw new ArgumentOutOfRangeException("newLength", "Length cannot be less than zero"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (newLength < 0) throw new ArgumentOutOfRangeException(nameof(newLength), "Length cannot be less than zero"); long? length = await GetSizeAsync(trans).ConfigureAwait(false); if (length != null) diff --git a/FoundationDB.Layers.Common/Collections/FdbMap`2.cs b/FoundationDB.Layers.Common/Collections/FdbMap`2.cs index 12f61b461..3d214eece 100644 --- a/FoundationDB.Layers.Common/Collections/FdbMap`2.cs +++ b/FoundationDB.Layers.Common/Collections/FdbMap`2.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,35 +28,34 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Collections { - using FoundationDB.Async; - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using FoundationDB.Linq; - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.Diagnostics; + using System.Linq; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Linq; + using Doxense.Serialization.Encoders; + using FoundationDB.Client; + using JetBrains.Annotations; [DebuggerDisplay("Name={Name}, Subspace={Subspace}")] public class FdbMap { - public FdbMap([NotNull] string name, [NotNull] IFdbSubspace subspace, [NotNull] IValueEncoder valueEncoder) - : this(name, subspace, KeyValueEncoders.Tuples.Key(), valueEncoder) + public FdbMap([NotNull] string name, [NotNull] IKeySubspace subspace, [NotNull] IValueEncoder valueEncoder) + : this(name, subspace.AsTyped(), valueEncoder) { } - public FdbMap([NotNull] string name, [NotNull] IFdbSubspace subspace, [NotNull] IKeyEncoder keyEncoder, [NotNull] IValueEncoder valueEncoder) + public FdbMap([NotNull] string name, [NotNull] ITypedKeySubspace subspace, [NotNull] IValueEncoder valueEncoder) { - if (name == null) throw new ArgumentNullException("name"); - if (subspace == null) throw new ArgumentNullException("subspace"); - if (keyEncoder == null) throw new ArgumentNullException("keyEncoder"); - if (valueEncoder == null) throw new ArgumentNullException("valueEncoder"); + if (name == null) throw new ArgumentNullException(nameof(name)); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); + if (valueEncoder == null) throw new ArgumentNullException(nameof(valueEncoder)); this.Name = name; this.Subspace = subspace; - this.Location = subspace.UsingEncoder(keyEncoder); this.ValueEncoder = valueEncoder; } @@ -64,16 +63,16 @@ public FdbMap([NotNull] string name, [NotNull] IFdbSubspace subspace, [NotNull] /// Name of the map // REVIEW: do we really need this property? - public string Name { [NotNull] get; private set; } - - /// Subspace used as a prefix for all items in this map - public IFdbSubspace Subspace { [NotNull] get; private set; } + [NotNull] + public string Name { get; } /// Subspace used to encoded the keys for the items - protected IFdbEncoderSubspace Location { [NotNull] get; private set; } + [NotNull] + public ITypedKeySubspace Subspace { get; } /// Class that can serialize/deserialize values into/from slices - public IValueEncoder ValueEncoder { [NotNull] get; private set; } + [NotNull] + public IValueEncoder ValueEncoder { get; } #endregion @@ -87,10 +86,10 @@ public FdbMap([NotNull] string name, [NotNull] IFdbSubspace subspace, [NotNull] /// If the map does not contain an entry with this key. public async Task GetAsync([NotNull] IFdbReadOnlyTransaction trans, TKey id) { - if (trans == null) throw new ArgumentNullException("trans"); - if (id == null) throw new ArgumentNullException("id"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (id == null) throw new ArgumentNullException(nameof(id)); - var data = await trans.GetAsync(this.Location.Keys.Encode(id)).ConfigureAwait(false); + var data = await trans.GetAsync(this.Subspace.Keys[id]).ConfigureAwait(false); if (data.IsNull) throw new KeyNotFoundException("The given id was not present in the map."); return this.ValueEncoder.DecodeValue(data); @@ -100,15 +99,15 @@ public async Task GetAsync([NotNull] IFdbReadOnlyTransaction trans, TKey /// Transaction used for the operation /// Key of the entry to read from the map /// Optional with the value of the entry it it exists, or an empty result if it is not present in the map. - public async Task> TryGetAsync([NotNull] IFdbReadOnlyTransaction trans, TKey id) + public async Task<(TValue Value, bool HasValue)> TryGetAsync([NotNull] IFdbReadOnlyTransaction trans, TKey id) { - if (trans == null) throw new ArgumentNullException("trans"); - if (id == null) throw new ArgumentNullException("id"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (id == null) throw new ArgumentNullException(nameof(id)); - var data = await trans.GetAsync(this.Location.Keys.Encode(id)).ConfigureAwait(false); + var data = await trans.GetAsync(this.Subspace.Keys[id]).ConfigureAwait(false); - if (data.IsNull) return default(Optional); - return this.ValueEncoder.DecodeValue(data); + if (data.IsNull) return (default(TValue), false); + return (this.ValueEncoder.DecodeValue(data), true); } /// Add or update an entry in the map @@ -118,10 +117,10 @@ public async Task> TryGetAsync([NotNull] IFdbReadOnlyTransactio /// If the entry did not exist, it will be created. If not, its value will be replace with . public void Set([NotNull] IFdbTransaction trans, TKey id, TValue value) { - if (trans == null) throw new ArgumentNullException("trans"); - if (id == null) throw new ArgumentNullException("id"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (id == null) throw new ArgumentNullException(nameof(id)); - trans.Set(this.Location.Keys.Encode(id), this.ValueEncoder.EncodeValue(value)); + trans.Set(this.Subspace.Keys[id], this.ValueEncoder.EncodeValue(value)); } /// Remove a single entry from the map @@ -130,10 +129,10 @@ public void Set([NotNull] IFdbTransaction trans, TKey id, TValue value) /// If the entry did not exist, the operation will not do anything. public void Remove([NotNull] IFdbTransaction trans, TKey id) { - if (trans == null) throw new ArgumentNullException("trans"); - if (id == null) throw new ArgumentNullException("id"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (id == null) throw new ArgumentNullException(nameof(id)); - trans.Clear(this.Location.Keys.Encode(id)); + trans.Clear(this.Subspace.Keys[id]); } /// Create a query that will attempt to read all the entries in the map within a single transaction. @@ -141,12 +140,12 @@ public void Remove([NotNull] IFdbTransaction trans, TKey id) /// Async sequence of pairs of keys and values, ordered by keys ascending. /// CAUTION: This can be dangerous if the map contains a lot of entries! You should always use .Take() to limit the number of results returned. [NotNull] - public IFdbAsyncEnumerable> All([NotNull] IFdbReadOnlyTransaction trans, FdbRangeOptions options = null) + public IAsyncEnumerable> All([NotNull] IFdbReadOnlyTransaction trans, FdbRangeOptions options = null) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); return trans - .GetRange(this.Location.ToRange(), options) + .GetRange(this.Subspace.ToRange(), options) .Select(this.DecodeItem); } @@ -154,14 +153,22 @@ public IFdbAsyncEnumerable> All([NotNull] IFdbReadOnl /// Transaction used for the operation /// List of the keys to read /// Array of results, in the same order as specified in . - public async Task[]> GetValuesAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNull] IEnumerable ids) + public async Task GetValuesAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNull] IEnumerable ids) { - if (trans == null) throw new ArgumentNullException("trans"); - if (ids == null) throw new ArgumentNullException("ids"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (ids == null) throw new ArgumentNullException(nameof(ids)); + + var kv = await trans.GetValuesAsync(ids.Select(id => this.Subspace.Keys[id])).ConfigureAwait(false); + if (kv.Length == 0) return Array.Empty(); - var results = await trans.GetValuesAsync(this.Location.Keys.Encode(ids)).ConfigureAwait(false); + var result = new TValue[kv.Length]; + var decoder = this.ValueEncoder; + for (int i = 0; i < kv.Length; i++) + { + result[i] = decoder.DecodeValue(kv[i]); + } - return Optional.DecodeRange(this.ValueEncoder, results); + return result; } #endregion @@ -171,7 +178,7 @@ public async Task[]> GetValuesAsync([NotNull] IFdbReadOnlyTrans private KeyValuePair DecodeItem(KeyValuePair item) { return new KeyValuePair( - this.Location.Keys.Decode(item.Key), + this.Subspace.Keys.Decode(item.Key), this.ValueEncoder.DecodeValue(item.Value) ); } @@ -181,7 +188,7 @@ private KeyValuePair[] DecodeItems(KeyValuePair[] ba { Contract.Requires(batch != null); - var keyEncoder = this.Location.Keys; + var keyEncoder = this.Subspace.Keys; var valueEncoder = this.ValueEncoder; var items = new KeyValuePair[batch.Length]; @@ -200,9 +207,9 @@ private KeyValuePair[] DecodeItems(KeyValuePair[] ba /// This will delete EVERYTHING in the map! public void Clear([NotNull] IFdbTransaction trans) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); - trans.ClearRange(this.Location.ToRange()); + trans.ClearRange(this.Subspace.ToRange()); } #region Export... @@ -210,96 +217,96 @@ public void Clear([NotNull] IFdbTransaction trans) /// Exports the content of this map out of the database, by using as many transactions as necessary. /// Database used for the operation /// Handler called for each entry in the map. Calls to the handler are serialized, so it does not need to take locks. Any exception will abort the export and be thrown to the caller - /// Token used to cancel the operation. + /// Token used to cancel the operation. /// Task that completes once all the entries have been processed. /// This method does not guarantee that the export will be a complete and coherent snapshot of the map. Any change made to the map while the export is running may be partially exported. - public Task ExportAsync([NotNull] IFdbDatabase db, [NotNull] Action> handler, CancellationToken cancellationToken) + public Task ExportAsync([NotNull] IFdbDatabase db, [NotNull] Action> handler, CancellationToken ct) { - if (db == null) throw new ArgumentNullException("db"); - if (handler == null) throw new ArgumentNullException("handler"); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (handler == null) throw new ArgumentNullException(nameof(handler)); return Fdb.Bulk.ExportAsync( db, - this.Location.ToRange(), - (batch, _, ct) => + this.Subspace.ToRange(), + (batch, _, __) => { foreach (var item in batch) { handler(DecodeItem(item)); } - return TaskHelpers.CompletedTask; + return Task.CompletedTask; }, - cancellationToken + ct ); } /// Exports the content of this map out of the database, by using as many transactions as necessary. /// Database used for the operation /// Handler called for each entry in the map. Calls to the handler are serialized, so it does not need to take locks. Any exception will abort the export and be thrown to the caller - /// Token used to cancel the operation. + /// Token used to cancel the operation. /// Task that completes once all the entries have been processed. /// This method does not guarantee that the export will be a complete and coherent snapshot of the map. Any change made to the map while the export is running may be partially exported. - public Task ExportAsync([NotNull] IFdbDatabase db, [NotNull] Func, CancellationToken, Task> handler, CancellationToken cancellationToken) + public Task ExportAsync([NotNull] IFdbDatabase db, [NotNull] Func, CancellationToken, Task> handler, CancellationToken ct) { - if (db == null) throw new ArgumentNullException("db"); - if (handler == null) throw new ArgumentNullException("handler"); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (handler == null) throw new ArgumentNullException(nameof(handler)); return Fdb.Bulk.ExportAsync( db, - this.Location.ToRange(), - async (batch, _, ct) => + this.Subspace.ToRange(), + async (batch, _, __) => { foreach (var item in batch) { - await handler(DecodeItem(item), cancellationToken); + await handler(DecodeItem(item), ct); } }, - cancellationToken + ct ); } /// Exports the content of this map out of the database, by using as many transactions as necessary. /// Database used for the operation /// Handler called for each batch of items in the map. Calls to the handler are serialized, so it does not need to take locks. Any exception will abort the export and be thrown to the caller - /// Token used to cancel the operation. + /// Token used to cancel the operation. /// Task that completes once all the entries have been processed. /// This method does not guarantee that the export will be a complete and coherent snapshot of the map, except that all the items in a single batch are from the same snapshot. Any change made to the map while the export is running may be partially exported. - public Task ExportAsync([NotNull] IFdbDatabase db, [NotNull] Action[]> handler, CancellationToken cancellationToken) + public Task ExportAsync([NotNull] IFdbDatabase db, [NotNull] Action[]> handler, CancellationToken ct) { - if (db == null) throw new ArgumentNullException("db"); - if (handler == null) throw new ArgumentNullException("handler"); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (handler == null) throw new ArgumentNullException(nameof(handler)); return Fdb.Bulk.ExportAsync( db, - this.Location.ToRange(), - (batch, _, ct) => + this.Subspace.ToRange(), + (batch, _, __) => { if (batch.Length > 0) { handler(DecodeItems(batch)); } - return TaskHelpers.CompletedTask; + return Task.CompletedTask; }, - cancellationToken + ct ); } /// Exports the content of this map out of the database, by using as many transactions as necessary. /// Database used for the operation /// Handler called for each batch of items in the map. Calls to the handler are serialized, so it does not need to take locks. Any exception will abort the export and be thrown to the caller - /// Token used to cancel the operation. + /// Token used to cancel the operation. /// Task that completes once all the entries have been processed. /// This method does not guarantee that the export will be a complete and coherent snapshot of the map, except that all the items in a single batch are from the same snapshot. Any change made to the map while the export is running may be partially exported. - public Task ExportAsync([NotNull] IFdbDatabase db, [NotNull] Func[], CancellationToken, Task> handler, CancellationToken cancellationToken) + public Task ExportAsync([NotNull] IFdbDatabase db, [NotNull] Func[], CancellationToken, Task> handler, CancellationToken ct) { - if (db == null) throw new ArgumentNullException("db"); - if (handler == null) throw new ArgumentNullException("handler"); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (handler == null) throw new ArgumentNullException(nameof(handler)); return Fdb.Bulk.ExportAsync( db, - this.Location.ToRange(), - (batch, _, ct) => handler(DecodeItems(batch), ct), - cancellationToken + this.Subspace.ToRange(), + (batch, _, tok) => handler(DecodeItems(batch), tok), + ct ); } @@ -307,13 +314,13 @@ public Task ExportAsync([NotNull] IFdbDatabase db, [NotNull] FuncDatabase used for the operation /// Handler that is called once before the first batch, to produce the initial state. /// Handler called for each batch of items in the map. It is given the previous state, and should return the updated state. Calls to the handler are serialized, so it does not need to take locks. Any exception will abort the export and be thrown to the caller - /// Token used to cancel the operation. + /// Token used to cancel the operation. /// Task that completes once all the entries have been processed and return the result of the last call to if there was at least one batch, or the result of if the map was empty. /// This method does not guarantee that the export will be a complete and coherent snapshot of the map, except that all the items in a single batch are from the same snapshot. Any change made to the map while the export is running may be partially exported. - public async Task AggregateAsync([NotNull] IFdbDatabase db, Func init, [NotNull] Func[], TResult> handler, CancellationToken cancellationToken) + public async Task AggregateAsync([NotNull] IFdbDatabase db, Func init, [NotNull] Func[], TResult> handler, CancellationToken ct) { - if (db == null) throw new ArgumentNullException("db"); - if (handler == null) throw new ArgumentNullException("handler"); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (handler == null) throw new ArgumentNullException(nameof(handler)); var state = default(TResult); if (init != null) @@ -323,13 +330,13 @@ public async Task AggregateAsync([NotNull] IFdbDatabase db, Fu await Fdb.Bulk.ExportAsync( db, - this.Location.ToRange(), - (batch, _, ct) => + this.Subspace.ToRange(), + (batch, _, __) => { state = handler(state, DecodeItems(batch)); - return TaskHelpers.CompletedTask; + return Task.CompletedTask; }, - cancellationToken + ct ); return state; @@ -340,13 +347,13 @@ await Fdb.Bulk.ExportAsync( /// Handler that is called once before the first batch, to produce the initial state. /// Handler called for each batch of items in the map. It is given the previous state, and should return the updated state. Calls to the handler are serialized, so it does not need to take locks. Any exception will abort the export and be thrown to the caller /// Handler that is called one after the last batch, to produce the final result out of the last state. - /// Token used to cancel the operation. + /// Token used to cancel the operation. /// Task that completes once all the entries have been processed and return the result of calling with the state return by the last call to if there was at least one batch, or the result of if the map was empty. /// This method does not guarantee that the export will be a complete and coherent snapshot of the map, except that all the items in a single batch are from the same snapshot. Any change made to the map while the export is running may be partially exported. - public async Task AggregateAsync([NotNull] IFdbDatabase db, Func init, [NotNull] Func[], TState> handler, Func finish, CancellationToken cancellationToken) + public async Task AggregateAsync([NotNull] IFdbDatabase db, Func init, [NotNull] Func[], TState> handler, Func finish, CancellationToken ct) { - if (db == null) throw new ArgumentNullException("db"); - if (handler == null) throw new ArgumentNullException("handler"); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (handler == null) throw new ArgumentNullException(nameof(handler)); var state = default(TState); if (init != null) @@ -356,16 +363,16 @@ public async Task AggregateAsync([NotNull] IFdbDatabas await Fdb.Bulk.ExportAsync( db, - this.Location.ToRange(), - (batch, _, ct) => + this.Subspace.ToRange(), + (batch, _, __) => { state = handler(state, DecodeItems(batch)); - return TaskHelpers.CompletedTask; + return Task.CompletedTask; }, - cancellationToken + ct ); - cancellationToken.ThrowIfCancellationRequested(); + ct.ThrowIfCancellationRequested(); var result = default(TResult); if (finish != null) @@ -383,22 +390,22 @@ await Fdb.Bulk.ExportAsync( /// Imports a potentially large sequence of items into the map. /// Database used for the operation /// Sequence of items to import. If the item already exists in the map, its value will be overwritten. - /// Token used to cancel the operation + /// Token used to cancel the operation /// ///

Any previously existing items in the map will remain. If you want to get from the previous content, you need to clear the map before hand.

///

Other transactions may see a partial view of the map while the sequence is being imported. If this is a problem, you may need to import the map into a temporary subspace, and then 'publish' the final result using an indirection layer (like the Directory Layer)

///

If the import operation fails midway, all items that have already been successfully imported will be kept in the database.

///
- public Task ImportAsync([NotNull] IFdbDatabase db, [NotNull] IEnumerable> items, CancellationToken cancellationToken) + public Task ImportAsync([NotNull] IFdbDatabase db, [NotNull] IEnumerable> items, CancellationToken ct) { - if (db == null) throw new ArgumentNullException("db"); - if (items == null) throw new ArgumentNullException("items"); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (items == null) throw new ArgumentNullException(nameof(items)); return Fdb.Bulk.InsertAsync( db, items, (item, tr) => this.Set(tr, item.Key, item.Value), - cancellationToken + ct ); } @@ -406,23 +413,23 @@ public Task ImportAsync([NotNull] IFdbDatabase db, [NotNull] IEnumerableDatabase used for the operation /// Sequence of elements to import. If an item with the same key already exists in the map, its value will be overwritten. /// Lambda that will extract the key of an element - /// Token used to cancel the operation + /// Token used to cancel the operation /// ///

Any previously existing items in the map will remain. If you want to get from the previous content, you need to clear the map before hand.

///

Other transactions may see a partial view of the map while the sequence is being imported. If this is a problem, you may need to import the map into a temporary subspace, and then 'publish' the final result using an indirection layer (like the Directory Layer)

///

If the import operation fails midway, all items that have already been successfully imported will be kept in the database.

///
- public Task ImportAsync([NotNull] IFdbDatabase db, [NotNull] IEnumerable items, [NotNull] Func keySelector, CancellationToken cancellationToken) + public Task ImportAsync([NotNull] IFdbDatabase db, [NotNull] IEnumerable items, [NotNull] Func keySelector, CancellationToken ct) { - if (db == null) throw new ArgumentNullException("db"); - if (items == null) throw new ArgumentNullException("items"); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (items == null) throw new ArgumentNullException(nameof(items)); if (keySelector == null) throw new ArgumentException("keySelector"); return Fdb.Bulk.InsertAsync( db, items, (item, tr) => this.Set(tr, keySelector(item), item), - cancellationToken + ct ); } @@ -431,16 +438,16 @@ public Task ImportAsync([NotNull] IFdbDatabase db, [NotNull] IEnumerable /// Sequence of elements to import. If an item with the same key already exists in the map, its value will be overwritten. /// Lambda that will return the key of an element /// Lambda that will return the value of an element - /// Token used to cancel the operation + /// Token used to cancel the operation /// ///

Any previously existing items in the map will remain. If you want to get from the previous content, you need to clear the map before hand.

///

Other transactions may see a partial view of the map while the sequence is being imported. If this is a problem, you may need to import the map into a temporary subspace, and then 'publish' the final result using an indirection layer (like the Directory Layer)

///

If the import operation fails midway, all items that have already been successfully imported will be kept in the database.

///
- public Task ImportAsync([NotNull] IFdbDatabase db, [NotNull] IEnumerable items, [NotNull] Func keySelector, [NotNull] Func valueSelector, CancellationToken cancellationToken) + public Task ImportAsync([NotNull] IFdbDatabase db, [NotNull] IEnumerable items, [NotNull] Func keySelector, [NotNull] Func valueSelector, CancellationToken ct) { - if (db == null) throw new ArgumentNullException("db"); - if (items == null) throw new ArgumentNullException("items"); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (items == null) throw new ArgumentNullException(nameof(items)); if (keySelector == null) throw new ArgumentException("keySelector"); if (valueSelector == null) throw new ArgumentException("valueSelector"); @@ -448,7 +455,7 @@ public Task ImportAsync([NotNull] IFdbDatabase db, [NotNull] IEnumerab db, items, (item, tr) => this.Set(tr, keySelector(item), valueSelector(item)), - cancellationToken + ct ); } diff --git a/FoundationDB.Layers.Common/Collections/FdbMultimap`2.cs b/FoundationDB.Layers.Common/Collections/FdbMultimap`2.cs index 4739173a2..8ae8ed464 100644 --- a/FoundationDB.Layers.Common/Collections/FdbMultimap`2.cs +++ b/FoundationDB.Layers.Common/Collections/FdbMultimap`2.cs @@ -28,21 +28,21 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Collections { - using FoundationDB.Client; - using FoundationDB.Linq; - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.Diagnostics; using System.Threading.Tasks; + using Doxense.Linq; + using FoundationDB.Client; + using JetBrains.Annotations; /// Multimap that tracks the number of times a specific key/value pair has been inserted or removed. /// Type of the keys of the map /// Type of the values of the map - [DebuggerDisplay("Subspace={Subspace}")] + [DebuggerDisplay("Subspace={" + nameof(FdbMultiMap.Subspace) + "}")] public class FdbMultiMap { - // Inspired by https://foundationdb.com/recipes/developer/multimaps + // Inspired by https://apple.github.io/foundationdb/multimaps.html // It is the logical equivalent of a Map, long> where the value would be incremented each time a specific pair of (key, value) is added (and subtracted when removed) // The layer stores each key/value using the following format: @@ -54,34 +54,30 @@ public class FdbMultiMap /// Create a new multimap /// Location where the map will be stored in the database /// If true, allow negative or zero values to stay in the map. - public FdbMultiMap(IFdbSubspace subspace, bool allowNegativeValues) - : this(subspace, allowNegativeValues, KeyValueEncoders.Tuples.CompositeKey()) + public FdbMultiMap(IKeySubspace subspace, bool allowNegativeValues) + : this(subspace.AsTyped(), allowNegativeValues) { } /// Create a new multimap, using a specific key and value encoder /// Location where the map will be stored in the database /// If true, allow negative or zero values to stay in the map. /// Encoder for the key/value pairs - public FdbMultiMap(IFdbSubspace subspace, bool allowNegativeValues, ICompositeKeyEncoder encoder) + public FdbMultiMap(ITypedKeySubspace subspace, bool allowNegativeValues) { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (encoder == null) throw new ArgumentNullException("encoder"); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); - this.Subspace = subspace; this.AllowNegativeValues = allowNegativeValues; - this.Location = subspace.UsingEncoder(encoder); + this.Subspace = subspace; } #region Public Properties... - /// Subspace used as a prefix for all items in this map - public IFdbSubspace Subspace { [NotNull] get; private set; } + /// Subspace used to encoded the keys for the items + [NotNull] + public ITypedKeySubspace Subspace { get; } /// If true, allow negative or zero values to stay in the map. - public bool AllowNegativeValues { get; private set; } - - /// Subspace used to encoded the keys for the items - protected IFdbEncoderSubspace Location { [NotNull] get; private set; } + public bool AllowNegativeValues { get; } #endregion @@ -96,10 +92,10 @@ public FdbMultiMap(IFdbSubspace subspace, bool allowNegativeValues, ICompositeKe public Task AddAsync([NotNull] IFdbTransaction trans, TKey key, TValue value) { //note: this method does not need to be async, but subtract is, so it's better if both methods have the same shape. - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); - trans.AtomicAdd(this.Location.Keys.Encode(key, value), PlusOne); - return FoundationDB.Async.TaskHelpers.CompletedTask; + trans.AtomicAdd(this.Subspace.Keys[key, value], PlusOne); + return Task.CompletedTask; } /// Decrements the count of an (index, value) pair in the multimap, and optionally removes it if the count reaches zero. @@ -109,9 +105,9 @@ public Task AddAsync([NotNull] IFdbTransaction trans, TKey key, TValue value) /// If the updated count reaches zero or less, and AllowNegativeValues is not set, the key will be cleared from the map. public async Task SubtractAsync([NotNull] IFdbTransaction trans, TKey key, TValue value) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); - Slice k = this.Location.Keys.Encode(key, value); + Slice k = this.Subspace.Keys[key, value]; if (this.AllowNegativeValues) { trans.AtomicAdd(k, MinusOne); @@ -137,9 +133,9 @@ public async Task SubtractAsync([NotNull] IFdbTransaction trans, TKey key, TValu /// Checks if a (key, value) pair exists public async Task ContainsAsync([NotNull] IFdbReadOnlyTransaction trans, TKey key, TValue value) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); - var v = await trans.GetAsync(this.Location.Keys.Encode(key, value)).ConfigureAwait(false); + var v = await trans.GetAsync(this.Subspace.Keys[key, value]).ConfigureAwait(false); return this.AllowNegativeValues ? v.IsPresent : v.ToInt64() > 0; } @@ -151,9 +147,9 @@ public async Task ContainsAsync([NotNull] IFdbReadOnlyTransaction trans, T /// The count can be zero or negative if AllowNegativeValues is enable. public async Task GetCountAsync([NotNull] IFdbReadOnlyTransaction trans, TKey key, TValue value) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); - Slice v = await trans.GetAsync(this.Location.Keys.Encode(key, value)).ConfigureAwait(false); + Slice v = await trans.GetAsync(this.Subspace.Keys[key, value]).ConfigureAwait(false); if (v.IsNullOrEmpty) return null; long c = v.ToInt64(); return this.AllowNegativeValues || c > 0 ? c : default(long?); @@ -164,23 +160,23 @@ public async Task ContainsAsync([NotNull] IFdbReadOnlyTransaction trans, T /// /// [NotNull] - public IFdbAsyncEnumerable Get([NotNull] IFdbReadOnlyTransaction trans, TKey key) + public IAsyncEnumerable Get([NotNull] IFdbReadOnlyTransaction trans, TKey key) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); - var range = FdbKeyRange.StartsWith(this.Location.Partial.Keys.Encode(key)); + var range = KeyRange.StartsWith(this.Subspace.Keys.EncodePartial(key)); if (this.AllowNegativeValues) { return trans .GetRange(range) - .Select(kvp => this.Location.Keys.Decode(kvp.Key).Item2); + .Select(kvp => this.Subspace.Keys.Decode(kvp.Key).Item2); } else { return trans .GetRange(range) .Where(kvp => kvp.Value.ToInt64() > 0) // we need to filter out zero or negative values (possible artefacts) - .Select(kvp => this.Location.Keys.Decode(kvp.Key).Item2); + .Select(kvp => this.Subspace.Keys.Decode(kvp.Key).Item2); } } @@ -198,22 +194,17 @@ public Task> GetAsync([NotNull] IFdbReadOnlyTransaction trans, TKey /// /// [NotNull] - public IFdbAsyncEnumerable> GetCounts([NotNull] IFdbReadOnlyTransaction trans, TKey key) + public IAsyncEnumerable<(TValue Value, long Count)> GetCounts([NotNull] IFdbReadOnlyTransaction trans, TKey key) { - var range = FdbKeyRange.StartsWith(this.Location.Partial.Keys.Encode(key)); + var range = KeyRange.StartsWith(this.Subspace.Keys.EncodePartial(key)); var query = trans .GetRange(range) - .Select(kvp => new KeyValuePair(this.Location.Keys.Decode(kvp.Key).Item2, kvp.Value.ToInt64())); + .Select(kvp => (Value: this.Subspace.Keys.Decode(kvp.Key).Item2, Count: kvp.Value.ToInt64())); - if (this.AllowNegativeValues) - { - return query; - } - else - { - return query.Where(kvp => kvp.Value > 0); - } + return this.AllowNegativeValues + ? query + : query.Where(x => x.Count > 0); } /// Returns a dictionary with of the counts of each value for a specific key @@ -223,7 +214,7 @@ public IFdbAsyncEnumerable> GetCounts([NotNull] IFdbR /// public Task> GetCountsAsync([NotNull] IFdbReadOnlyTransaction trans, TKey key, IEqualityComparer comparer = null) { - return GetCounts(trans, key).ToDictionaryAsync(comparer); + return GetCounts(trans, key).ToDictionaryAsync(x => x.Value, x => x.Count, comparer); } /// Remove all the values for a specific key @@ -232,9 +223,9 @@ public Task> GetCountsAsync([NotNull] IFdbReadOnlyTrans /// public void Remove([NotNull] IFdbTransaction trans, TKey key) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); - trans.ClearRange(FdbKeyRange.StartsWith(this.Location.Partial.Keys.Encode(key))); + trans.ClearRange(KeyRange.StartsWith(this.Subspace.Keys.EncodePartial(key))); } /// Remove a value for a specific key @@ -244,9 +235,9 @@ public void Remove([NotNull] IFdbTransaction trans, TKey key) /// public void Remove([NotNull] IFdbTransaction trans, TKey key, TValue value) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); - trans.Clear(this.Location.Keys.Encode(key, value)); + trans.Clear(this.Subspace.Keys[key, value]); } #endregion diff --git a/FoundationDB.Layers.Common/Collections/FdbQueue`1.cs b/FoundationDB.Layers.Common/Collections/FdbQueue`1.cs index 86b791899..b29cb08cd 100644 --- a/FoundationDB.Layers.Common/Collections/FdbQueue`1.cs +++ b/FoundationDB.Layers.Common/Collections/FdbQueue`1.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,79 +28,74 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Collections { + using System; + using System.Collections.Generic; + using System.Threading; + using System.Threading.Tasks; + using Doxense.Collections.Tuples; + using Doxense.Serialization.Encoders; using FoundationDB.Client; #if DEBUG using FoundationDB.Filters.Logging; #endif using JetBrains.Annotations; - using System; - using System.Collections.Generic; - using System.Threading; - using System.Threading.Tasks; - using FoundationDB.Async; - /// - /// Provides a high-contention Queue class - /// + /// Provides a high-contention Queue class public class FdbQueue { - // from https://github.com/FoundationDB/python-layers/blob/master/lib/queue.py + // from https://apple.github.io/foundationdb/queues.html // TODO: should we use a PRNG ? If two counter instances are created at the same moment, they could share the same seed ? private readonly Random Rng = new Random(); - /// Create a new High Contention Queue - /// Subspace where the queue will be stored - /// Uses the default Tuple serializer - public FdbQueue([NotNull] FdbSubspace subspace) - : this(subspace, highContention: true, encoder: KeyValueEncoders.Tuples.Value()) - { } - /// Create a new queue using either High Contention mode or Simple mode /// Subspace where the queue will be stored /// If true, uses High Contention Mode (lots of popping clients). If true, uses the Simple Mode (a few popping clients). + /// Encoder for the values stored in this queue /// Uses the default Tuple serializer - public FdbQueue([NotNull] FdbSubspace subspace, bool highContention) - : this(subspace, highContention: highContention, encoder: KeyValueEncoders.Tuples.Value()) + public FdbQueue([NotNull] IKeySubspace subspace, bool highContention = true, IValueEncoder encoder = null) + : this(subspace.AsDynamic(), highContention, encoder) { } /// Create a new queue using either High Contention mode or Simple mode /// Subspace where the queue will be stored /// If true, uses High Contention Mode (lots of popping clients). If true, uses the Simple Mode (a few popping clients). - public FdbQueue([NotNull] IFdbSubspace subspace, bool highContention, [NotNull] IValueEncoder encoder) + /// Encoder for the values stored in this queue + public FdbQueue([NotNull] IDynamicKeySubspace subspace, bool highContention = false, IValueEncoder encoder = null) { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (encoder == null) throw new ArgumentNullException("encoder"); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); - this.Subspace = subspace.Using(TypeSystem.Tuples); + this.Subspace = subspace; this.HighContention = highContention; - this.Encoder = encoder; + this.Encoder = encoder ?? TuPack.Encoding.GetValueEncoder(); //TODO: rewrite this, using FdbEncoderSubpsace<..> ! - this.ConflictedPop = this.Subspace.Partition.ByKey(Slice.FromAscii("pop")); - this.ConflictedItem = this.Subspace.Partition.ByKey(Slice.FromAscii("conflict")); - this.QueueItem = this.Subspace.Partition.ByKey(Slice.FromAscii("item")); + this.ConflictedPop = this.Subspace.Partition.ByKey(Slice.FromStringAscii("pop")); + this.ConflictedItem = this.Subspace.Partition.ByKey(Slice.FromStringAscii("conflict")); + this.QueueItem = this.Subspace.Partition.ByKey(Slice.FromStringAscii("item")); } /// Subspace used as a prefix for all items in this table - public IFdbDynamicSubspace Subspace { [NotNull] get; private set; } + [NotNull] + public IDynamicKeySubspace Subspace { get; } /// If true, the queue is operating in High Contention mode that will scale better with a lot of popping clients. - public bool HighContention { get; private set; } + public bool HighContention { get; } /// Serializer for the elements of the queue - public IValueEncoder Encoder { [NotNull] get; private set; } + [NotNull] + public IValueEncoder Encoder { get; } - internal IFdbDynamicSubspace ConflictedPop { get; private set; } + internal IDynamicKeySubspace ConflictedPop { get; } - internal IFdbDynamicSubspace ConflictedItem { get; private set; } + internal IDynamicKeySubspace ConflictedItem { get; } - internal IFdbDynamicSubspace QueueItem { get; private set; } + internal IDynamicKeySubspace QueueItem { get; } /// Remove all items from the queue. public void Clear([NotNull] IFdbTransaction trans) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); trans.ClearRange(this.Subspace); } @@ -108,7 +103,7 @@ public void Clear([NotNull] IFdbTransaction trans) /// Push a single item onto the queue. public async Task PushAsync([NotNull] IFdbTransaction trans, T value) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); #if DEBUG trans.Annotate("Push({0})", value); @@ -124,22 +119,22 @@ public async Task PushAsync([NotNull] IFdbTransaction trans, T value) } /// Pop the next item from the queue. Cannot be composed with other functions in a single transaction. - public Task> PopAsync([NotNull] IFdbDatabase db, CancellationToken cancellationToken) + public Task<(T Value, bool HasValue)> PopAsync([NotNull] IFdbDatabase db, CancellationToken ct) { - if (db == null) throw new ArgumentNullException("db"); + if (db == null) throw new ArgumentNullException(nameof(db)); - if (cancellationToken.IsCancellationRequested) + if (ct.IsCancellationRequested) { - return TaskHelpers.FromCancellation>(cancellationToken); + return Task.FromCanceled<(T, bool)>(ct); } if (this.HighContention) { - return PopHighContentionAsync(db, cancellationToken); + return PopHighContentionAsync(db, ct); } else { - return db.ReadWriteAsync((tr) => this.PopSimpleAsync(tr), cancellationToken); + return db.ReadWriteAsync((tr) => PopSimpleAsync(tr), ct); } } @@ -150,101 +145,99 @@ public async Task EmptyAsync([NotNull] IFdbReadOnlyTransaction tr) } /// Get the value of the next item in the queue without popping it. - public async Task> PeekAsync([NotNull] IFdbReadOnlyTransaction tr) + public async Task<(T Value, bool HasValue)> PeekAsync([NotNull] IFdbReadOnlyTransaction tr) { var firstItem = await GetFirstItemAsync(tr).ConfigureAwait(false); if (firstItem.Key.IsNull) { - return default(Optional); - } - else - { - return this.Encoder.DecodeValue(firstItem.Value); + return default; } + + return (this.Encoder.DecodeValue(firstItem.Value), true); } #region Bulk Operations - public Task ExportAsync(IFdbDatabase db, Action handler, CancellationToken cancellationToken) + public Task ExportAsync(IFdbDatabase db, Action handler, CancellationToken ct) { - if (db == null) throw new ArgumentNullException("db"); - if (handler == null) throw new ArgumentNullException("handler"); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (handler == null) throw new ArgumentNullException(nameof(handler)); //REVIEW: is this approach correct ? return Fdb.Bulk.ExportAsync( db, this.QueueItem.Keys.ToRange(), - (kvs, offset, ct) => + (kvs, offset, _) => { foreach(var kv in kvs) { - if (cancellationToken.IsCancellationRequested) cancellationToken.ThrowIfCancellationRequested(); + if (ct.IsCancellationRequested) ct.ThrowIfCancellationRequested(); handler(this.Encoder.DecodeValue(kv.Value), offset); ++offset; } - return TaskHelpers.CompletedTask; + return Task.CompletedTask; }, - cancellationToken + ct ); } - public Task ExportAsync(IFdbDatabase db, Func handler, CancellationToken cancellationToken) + public Task ExportAsync(IFdbDatabase db, Func handler, CancellationToken ct) { - if (db == null) throw new ArgumentNullException("db"); - if (handler == null) throw new ArgumentNullException("handler"); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (handler == null) throw new ArgumentNullException(nameof(handler)); //REVIEW: is this approach correct ? return Fdb.Bulk.ExportAsync( db, this.QueueItem.Keys.ToRange(), - async (kvs, offset, ct) => + async (kvs, offset, _) => { foreach (var kv in kvs) { - if (cancellationToken.IsCancellationRequested) cancellationToken.ThrowIfCancellationRequested(); + if (ct.IsCancellationRequested) ct.ThrowIfCancellationRequested(); await handler(this.Encoder.DecodeValue(kv.Value), offset); ++offset; } }, - cancellationToken + ct ); } - public Task ExportAsync(IFdbDatabase db, Action handler, CancellationToken cancellationToken) + public Task ExportAsync(IFdbDatabase db, Action handler, CancellationToken ct) { - if (db == null) throw new ArgumentNullException("db"); - if (handler == null) throw new ArgumentNullException("handler"); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (handler == null) throw new ArgumentNullException(nameof(handler)); //REVIEW: is this approach correct ? return Fdb.Bulk.ExportAsync( db, this.QueueItem.Keys.ToRange(), - (kvs, offset, ct) => + (kvs, offset, _) => { handler(this.Encoder.DecodeValues(kvs), offset); - return TaskHelpers.CompletedTask; + return Task.CompletedTask; }, - cancellationToken + ct ); } - public Task ExportAsync(IFdbDatabase db, Func handler, CancellationToken cancellationToken) + public Task ExportAsync(IFdbDatabase db, Func handler, CancellationToken ct) { - if (db == null) throw new ArgumentNullException("db"); - if (handler == null) throw new ArgumentNullException("handler"); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (handler == null) throw new ArgumentNullException(nameof(handler)); //REVIEW: is this approach correct ? return Fdb.Bulk.ExportAsync( db, this.QueueItem.Keys.ToRange(), - (kvs, offset, ct) => handler(this.Encoder.DecodeValues(kvs), offset), - cancellationToken + (kvs, offset, _) => handler(this.Encoder.DecodeValues(kvs), offset), + ct ); } @@ -277,11 +270,11 @@ private async Task PushAtAsync([NotNull] IFdbTransaction tr, T value, long index tr.Set(key, this.Encoder.EncodeValue(value)); } - private async Task GetNextIndexAsync([NotNull] IFdbReadOnlyTransaction tr, IFdbDynamicSubspace subspace) + private async Task GetNextIndexAsync([NotNull] IFdbReadOnlyTransaction tr, IDynamicKeySubspace subspace) { var range = subspace.Keys.ToRange(); - var lastKey = await tr.GetKeyAsync(FdbKeySelector.LastLessThan(range.End)).ConfigureAwait(false); + var lastKey = await tr.GetKeyAsync(KeySelector.LastLessThan(range.End)).ConfigureAwait(false); if (lastKey < range.Begin) { @@ -297,17 +290,17 @@ private Task> GetFirstItemAsync([NotNull] IFdbReadOnl return tr.GetRange(range).FirstOrDefaultAsync(); } - private async Task> PopSimpleAsync([NotNull] IFdbTransaction tr) + private async Task<(T Value, bool HasValue)> PopSimpleAsync([NotNull] IFdbTransaction tr) { #if DEBUG tr.Annotate("PopSimple()"); #endif var firstItem = await GetFirstItemAsync(tr).ConfigureAwait(false); - if (firstItem.Key.IsNull) return default(Optional); + if (firstItem.Key.IsNull) return default; tr.Clear(firstItem.Key); - return this.Encoder.DecodeValue(firstItem.Value); + return (this.Encoder.DecodeValue(firstItem.Value), true); } private Task AddConflictedPopAsync([NotNull] IFdbDatabase db, bool forced, CancellationToken ct) @@ -344,7 +337,7 @@ private Task>> GetItemsAsync([NotNull] IFdbReadO private async Task FulfillConflictedPops([NotNull] IFdbDatabase db, CancellationToken ct) { - const int numPops = 100; + const int NUM_POPS = 100; using (var tr = db.BeginTransaction(ct)) { @@ -353,8 +346,8 @@ private async Task FulfillConflictedPops([NotNull] IFdbDatabase db, Cancel #endif var ts = await Task.WhenAll( - GetWaitingPopsAsync(tr.Snapshot, numPops), - GetItemsAsync(tr.Snapshot, numPops) + GetWaitingPopsAsync(tr.Snapshot, NUM_POPS), + GetItemsAsync(tr.Snapshot, NUM_POPS) ).ConfigureAwait(false); var pops = ts[0]; @@ -402,11 +395,11 @@ private async Task FulfillConflictedPops([NotNull] IFdbDatabase db, Cancel // commit await tr.CommitAsync().ConfigureAwait(false); - return pops.Count < numPops; + return pops.Count < NUM_POPS; } } - private async Task> PopHighContentionAsync([NotNull] IFdbDatabase db, CancellationToken ct) + private async Task<(T Value, bool HasValue)> PopHighContentionAsync([NotNull] IFdbDatabase db, CancellationToken ct) { int backOff = 10; Slice waitKey = Slice.Empty; @@ -419,7 +412,6 @@ private async Task> PopHighContentionAsync([NotNull] IFdbDatabase db tr.Annotate("PopHighContention()"); #endif - FdbException error = null; try { // Check if there are other people waiting to be popped. If so, we cannot pop before them. @@ -435,13 +427,7 @@ private async Task> PopHighContentionAsync([NotNull] IFdbDatabase db await tr.CommitAsync().ConfigureAwait(false); } } - catch (FdbException e) - { - // note: cannot await inside a catch(..) block, so flag the error and process it below - error = e; - } - - if (error != null) + catch (FdbException) { // If we didn't succeed, then register our pop request waitKey = await AddConflictedPopAsync(db, forced: true, ct: ct).ConfigureAwait(false); } @@ -456,7 +442,6 @@ private async Task> PopHighContentionAsync([NotNull] IFdbDatabase db while (!ct.IsCancellationRequested) { - error = null; try { while (!(await FulfillConflictedPops(db, ct).ConfigureAwait(false))) @@ -464,24 +449,18 @@ private async Task> PopHighContentionAsync([NotNull] IFdbDatabase db //NOP ? } } - catch (FdbException e) - { - // cannot await in catch(..) block so process it below - error = e; - } - - if (error != null && error.Code != FdbError.NotCommitted) + catch (FdbException e) when (e.Code != FdbError.NotCommitted) { // If the error is 1020 (not_committed), then there is a good chance // that somebody else has managed to fulfill some outstanding pops. In // that case, we proceed to check whether our request has been fulfilled. // Otherwise, we handle the error in the usual fashion. - await tr.OnErrorAsync(error.Code).ConfigureAwait(false); + await tr.OnErrorAsync(e.Code).ConfigureAwait(false); continue; } - error = null; + try { tr.Reset(); @@ -509,22 +488,17 @@ private async Task> PopHighContentionAsync([NotNull] IFdbDatabase db if (result.IsNullOrEmpty) { - return default(Optional); + return default; } tr.Clear(resultKey); await tr.CommitAsync().ConfigureAwait(false); - return this.Encoder.DecodeValue(result); + return (this.Encoder.DecodeValue(result), true); } catch (FdbException e) { - error = e; - } - - if (error != null) - { - await tr.OnErrorAsync(error.Code).ConfigureAwait(false); + await tr.OnErrorAsync(e.Code).ConfigureAwait(false); } } diff --git a/FoundationDB.Layers.Common/Collections/FdbRankedSet.cs b/FoundationDB.Layers.Common/Collections/FdbRankedSet.cs index f72d5b84d..c4730edef 100644 --- a/FoundationDB.Layers.Common/Collections/FdbRankedSet.cs +++ b/FoundationDB.Layers.Common/Collections/FdbRankedSet.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,13 +28,13 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Collections { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using FoundationDB.Linq; - using JetBrains.Annotations; using System; using System.Linq; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Linq; + using FoundationDB.Client; + using JetBrains.Annotations; /// /// Provides a high-contention Queue class @@ -51,28 +51,28 @@ public class FdbRankedSet /// Initializes a new ranked set at a given location /// Subspace where the set will be stored - public FdbRankedSet([NotNull] IFdbSubspace subspace) + public FdbRankedSet([NotNull] IKeySubspace subspace) { - if (subspace == null) throw new ArgumentNullException("subspace"); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); - this.Subspace = subspace.Using(TypeSystem.Tuples); + this.Subspace = subspace.AsDynamic(); } public Task OpenAsync([NotNull] IFdbTransaction trans) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); return SetupLevelsAsync(trans); } /// Subspace used as a prefix for all items in this table - public IFdbDynamicSubspace Subspace { [NotNull] get; private set; } + public IDynamicKeySubspace Subspace { [NotNull] get; private set; } /// Returns the number of items in the set. /// /// public Task SizeAsync([NotNull] IFdbReadOnlyTransaction trans) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); return trans .GetRange(this.Subspace.Partition.ByKey(MAX_LEVELS - 1).Keys.ToRange()) @@ -82,7 +82,7 @@ public Task SizeAsync([NotNull] IFdbReadOnlyTransaction trans) public async Task InsertAsync([NotNull] IFdbTransaction trans, Slice key) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); if (await ContainsAsync(trans, key).ConfigureAwait(false)) { @@ -98,7 +98,7 @@ public async Task InsertAsync([NotNull] IFdbTransaction trans, Slice key) if ((keyHash & ((1 << (level * LEVEL_FAN_POW)) - 1)) != 0) { //Console.WriteLine("> [" + level + "] Incrementing previous key: " + FdbKey.Dump(prevKey)); - trans.AtomicAdd(this.Subspace.Partition.ByKey(level, prevKey), EncodeCount(1)); + trans.AtomicAdd(this.Subspace.Keys.Encode(level, prevKey), EncodeCount(1)); } else { @@ -120,15 +120,15 @@ public async Task InsertAsync([NotNull] IFdbTransaction trans, Slice key) public async Task ContainsAsync([NotNull] IFdbReadOnlyTransaction trans, Slice key) { - if (trans == null) throw new ArgumentNullException("trans"); - if (key.IsNull) throw new ArgumentException("Empty key not allowed in set", "key"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (key.IsNull) throw new ArgumentException("Empty key not allowed in set", nameof(key)); return (await trans.GetAsync(this.Subspace.Keys.Encode(0, key)).ConfigureAwait(false)).HasValue; } public async Task EraseAsync([NotNull] IFdbTransaction trans, Slice key) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); if (!(await ContainsAsync(trans, key).ConfigureAwait(false))) { @@ -138,7 +138,7 @@ public async Task EraseAsync([NotNull] IFdbTransaction trans, Slice key) for (int level = 0; level < MAX_LEVELS; level++) { // This could be optimized with hash - var k = this.Subspace.Partition.ByKey(level, key); + var k = this.Subspace.Keys.Encode(level, key); var c = await trans.GetAsync(k).ConfigureAwait(false); if (c.HasValue) trans.Clear(k); if (level == 0) continue; @@ -154,8 +154,8 @@ public async Task EraseAsync([NotNull] IFdbTransaction trans, Slice key) public async Task Rank([NotNull] IFdbReadOnlyTransaction trans, Slice key) { - if (trans == null) throw new ArgumentNullException("trans"); - if (key.IsNull) throw new ArgumentException("Empty key not allowed in set", "key"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (key.IsNull) throw new ArgumentException("Empty key not allowed in set", nameof(key)); if (!(await ContainsAsync(trans, key).ConfigureAwait(false))) { @@ -169,8 +169,8 @@ public async Task EraseAsync([NotNull] IFdbTransaction trans, Slice key) var lss = this.Subspace.Partition.ByKey(level); long lastCount = 0; var kcs = await trans.GetRange( - FdbKeySelector.FirstGreaterOrEqual(lss.Keys.Encode(rankKey)), - FdbKeySelector.FirstGreaterThan(lss.Keys.Encode(key)) + KeySelector.FirstGreaterOrEqual(lss.Keys.Encode(rankKey)), + KeySelector.FirstGreaterThan(lss.Keys.Encode(key)) ).ToListAsync().ConfigureAwait(false); foreach (var kc in kcs) { @@ -278,12 +278,12 @@ private async Task GetPreviousNodeAsync(IFdbTransaction trans, int level, var k = this.Subspace.Keys.Encode(level, key); //Console.WriteLine(k); //Console.WriteLine("GetPreviousNode(" + level + ", " + key + ")"); - //Console.WriteLine(FdbKeySelector.LastLessThan(k) + " <= x < " + FdbKeySelector.FirstGreaterOrEqual(k)); + //Console.WriteLine(KeySelector.LastLessThan(k) + " <= x < " + KeySelector.FirstGreaterOrEqual(k)); var kv = await trans .Snapshot .GetRange( - FdbKeySelector.LastLessThan(k), - FdbKeySelector.FirstGreaterOrEqual(k) + KeySelector.LastLessThan(k), + KeySelector.FirstGreaterOrEqual(k) ) .FirstAsync() .ConfigureAwait(false); diff --git a/FoundationDB.Layers.Common/Collections/FdbVector`1.cs b/FoundationDB.Layers.Common/Collections/FdbVector`1.cs index 6ab0d44ed..8683600a1 100644 --- a/FoundationDB.Layers.Common/Collections/FdbVector`1.cs +++ b/FoundationDB.Layers.Common/Collections/FdbVector`1.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,18 +28,21 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Collections { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using FoundationDB.Linq; - using JetBrains.Annotations; using System; using System.Linq; using System.Threading.Tasks; + using Doxense.Collections.Tuples; + using Doxense.Diagnostics.Contracts; + using Doxense.Linq; + using Doxense.Serialization.Encoders; + using FoundationDB.Client; + using JetBrains.Annotations; /// Represents a potentially sparse array in FoundationDB. + [PublicAPI] public class FdbVector { - // from https://github.com/FoundationDB/python-layers/blob/master/lib/vector.py + // from https://apple.github.io/foundationdb/vector.html // Vector stores each of its values using its index as the key. // The size of a vector is equal to the index of its last key + 1. @@ -59,39 +62,40 @@ public class FdbVector /// Create a new sparse Vector /// Subspace where the vector will be stored - /// Sparse entries will be assigned the value Slice.Empty - public FdbVector([NotNull] FdbSubspace subspace) - : this(subspace, default(T)) + /// Default value for sparse entries + /// Encoder used for the values of this vector + public FdbVector([NotNull] IKeySubspace subspace, T defaultValue = default(T), IValueEncoder encoder = null) + : this(subspace.AsDynamic(), defaultValue, encoder) { } + /// Create a new sparse Vector /// Subspace where the vector will be stored /// Default value for sparse entries - public FdbVector([NotNull] IFdbSubspace subspace, T defaultValue) - : this(subspace, defaultValue, KeyValueEncoders.Tuples.Value()) - { } - public FdbVector([NotNull] IFdbSubspace subspace, T defaultValue, [NotNull] IValueEncoder encoder) + /// Encoder used for the values of this vector + public FdbVector([NotNull] IDynamicKeySubspace subspace, T defaultValue, IValueEncoder encoder = null) { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (encoder == null) throw new ArgumentNullException("encoder"); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); - this.Subspace = subspace.Using(TypeSystem.Tuples); + this.Subspace = subspace; this.DefaultValue = defaultValue; - this.Encoder = encoder; + this.Encoder = encoder ?? TuPack.Encoding.GetValueEncoder(); } /// Subspace used as a prefix for all items in this vector - public IFdbDynamicSubspace Subspace { [NotNull] get; private set; } + [NotNull] + public IDynamicKeySubspace Subspace { get; } /// Default value for sparse entries - public T DefaultValue { get; private set; } + public T DefaultValue { get; } - public IValueEncoder Encoder { [NotNull] get; private set; } + [NotNull] + public IValueEncoder Encoder { get; } /// Get the number of items in the Vector. This number includes the sparsely represented items. public Task SizeAsync([NotNull] IFdbReadOnlyTransaction tr) { - if (tr == null) throw new ArgumentNullException("tr"); + if (tr == null) throw new ArgumentNullException(nameof(tr)); return ComputeSizeAsync(tr); } @@ -99,7 +103,7 @@ public Task SizeAsync([NotNull] IFdbReadOnlyTransaction tr) /// Push a single item onto the end of the Vector. public async Task PushAsync([NotNull] IFdbTransaction tr, T value) { - if (tr == null) throw new ArgumentNullException("tr"); + if (tr == null) throw new ArgumentNullException(nameof(tr)); var size = await ComputeSizeAsync(tr).ConfigureAwait(false); @@ -109,7 +113,7 @@ public async Task PushAsync([NotNull] IFdbTransaction tr, T value) /// Get the value of the last item in the Vector. public Task BackAsync([NotNull] IFdbReadOnlyTransaction tr) { - if (tr == null) throw new ArgumentNullException("tr"); + if (tr == null) throw new ArgumentNullException(nameof(tr)); return tr .GetRange(this.Subspace.Keys.ToRange()) @@ -124,9 +128,9 @@ public Task FrontAsync([NotNull] IFdbReadOnlyTransaction tr) } /// Get and pops the last item off the Vector. - public async Task> PopAsync([NotNull] IFdbTransaction tr) + public async Task<(T Value, bool HasValue)> PopAsync([NotNull] IFdbTransaction tr) { - if (tr == null) throw new ArgumentNullException("tr"); + if (tr == null) throw new ArgumentNullException(nameof(tr)); var keyRange = this.Subspace.Keys.ToRange(); @@ -139,7 +143,7 @@ public async Task> PopAsync([NotNull] IFdbTransaction tr) .ConfigureAwait(false); // Vector was empty - if (lastTwo.Count == 0) return default(Optional); + if (lastTwo.Count == 0) return default; //note: keys are reversed so indices[0] = last, indices[1] = second to last var indices = lastTwo.Select(kvp => this.Subspace.Keys.DecodeFirst(kvp.Key)).ToList(); @@ -155,22 +159,22 @@ public async Task> PopAsync([NotNull] IFdbTransaction tr) tr.Clear(lastTwo[0].Key); - return this.Encoder.DecodeValue(lastTwo[0].Value); + return (this.Encoder.DecodeValue(lastTwo[0].Value), true); } /// Swap the items at positions i1 and i2. public async Task SwapAsync([NotNull] IFdbTransaction tr, long index1, long index2) { - if (tr == null) throw new ArgumentNullException("tr"); + if (tr == null) throw new ArgumentNullException(nameof(tr)); - if (index1 < 0 || index2 < 0) throw new IndexOutOfRangeException(String.Format("Indices ({0}, {1}) must be positive", index1, index2)); + if (index1 < 0 || index2 < 0) throw new IndexOutOfRangeException($"Indices ({index1}, {index2}) must be positive"); var k1 = GetKeyAt(index1); var k2 = GetKeyAt(index2); long currentSize = await ComputeSizeAsync(tr).ConfigureAwait(false); - if (index1 >= currentSize || index2 >= currentSize) throw new IndexOutOfRangeException(String.Format("Indices ({0}, {1}) are out of range", index1, index2)); + if (index1 >= currentSize || index2 >= currentSize) throw new IndexOutOfRangeException($"Indices ({index1}, {index2}) are out of range"); var vs = await tr.GetValuesAsync(new[] { k1, k2 }).ConfigureAwait(false); var v1 = vs[0]; @@ -198,8 +202,8 @@ public async Task SwapAsync([NotNull] IFdbTransaction tr, long index1, long inde /// Get the item at the specified index. public async Task GetAsync([NotNull] IFdbReadOnlyTransaction tr, long index) { - if (tr == null) throw new ArgumentNullException("tr"); - if (index < 0) throw new IndexOutOfRangeException(String.Format("Index {0} must be positive", index)); + if (tr == null) throw new ArgumentNullException(nameof(tr)); + if (index < 0) throw new IndexOutOfRangeException($"Index {index} must be positive"); var start = GetKeyAt(index); var end = this.Subspace.Keys.ToRange().End; @@ -221,13 +225,13 @@ public async Task GetAsync([NotNull] IFdbReadOnlyTransaction tr, long index) } // We requested a value past the end of the vector - throw new IndexOutOfRangeException(String.Format("Index {0} out of range", index)); + throw new IndexOutOfRangeException($"Index {index} out of range"); } /// [NOT YET IMPLEMENTED] Get a range of items in the Vector, returned as an async sequence. - public IFdbAsyncEnumerable GetRangeAsync([NotNull] IFdbReadOnlyTransaction tr, long startIndex, long endIndex, long step) + public IAsyncEnumerable GetRangeAsync([NotNull] IFdbReadOnlyTransaction tr, long startIndex, long endIndex, long step) { - if (tr == null) throw new ArgumentNullException("tr"); + if (tr == null) throw new ArgumentNullException(nameof(tr)); //BUGUBG: implement FdbVector.GetRangeAsync() ! @@ -237,7 +241,7 @@ public IFdbAsyncEnumerable GetRangeAsync([NotNull] IFdbReadOnlyTransaction tr /// Set the value at a particular index in the Vector. public void Set([NotNull] IFdbTransaction tr, long index, T value) { - if (tr == null) throw new ArgumentNullException("tr"); + if (tr == null) throw new ArgumentNullException(nameof(tr)); tr.Set(GetKeyAt(index), this.Encoder.EncodeValue(value)); } @@ -245,7 +249,7 @@ public void Set([NotNull] IFdbTransaction tr, long index, T value) /// Test whether the Vector is empty. public async Task EmptyAsync([NotNull] IFdbReadOnlyTransaction tr) { - if (tr == null) throw new ArgumentNullException("tr"); + if (tr == null) throw new ArgumentNullException(nameof(tr)); return (await ComputeSizeAsync(tr).ConfigureAwait(false)) == 0; } @@ -253,7 +257,7 @@ public async Task EmptyAsync([NotNull] IFdbReadOnlyTransaction tr) /// Grow or shrink the size of the Vector. public async Task ResizeAsync([NotNull] IFdbTransaction tr, long length) { - if (tr == null) throw new ArgumentNullException("tr"); + if (tr == null) throw new ArgumentNullException(nameof(tr)); long currentSize = await ComputeSizeAsync(tr).ConfigureAwait(false); @@ -276,7 +280,7 @@ public async Task ResizeAsync([NotNull] IFdbTransaction tr, long length) /// Remove all items from the Vector. public void Clear([NotNull] IFdbTransaction tr) { - if (tr == null) throw new ArgumentNullException("tr"); + if (tr == null) throw new ArgumentNullException(nameof(tr)); tr.ClearRange(this.Subspace); } @@ -289,7 +293,7 @@ private async Task ComputeSizeAsync(IFdbReadOnlyTransaction tr) var keyRange = this.Subspace.Keys.ToRange(); - var lastKey = await tr.GetKeyAsync(FdbKeySelector.LastLessOrEqual(keyRange.End)).ConfigureAwait(false); + var lastKey = await tr.GetKeyAsync(KeySelector.LastLessOrEqual(keyRange.End)).ConfigureAwait(false); if (lastKey < keyRange.Begin) { diff --git a/FoundationDB.Layers.Common/Counters/FdbCounterMap.cs b/FoundationDB.Layers.Common/Counters/FdbCounterMap.cs index 5e774248a..50634f263 100644 --- a/FoundationDB.Layers.Common/Counters/FdbCounterMap.cs +++ b/FoundationDB.Layers.Common/Counters/FdbCounterMap.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -32,6 +32,7 @@ namespace FoundationDB.Layers.Counters using JetBrains.Annotations; using System; using System.Threading.Tasks; + using Doxense.Serialization.Encoders; /// Providers a dictionary of 64-bit counters that can be updated atomically /// Type of the key in the counter map @@ -41,28 +42,25 @@ public sealed class FdbCounterMap private static readonly Slice MinusOne = Slice.FromFixed64(-1); /// Create a new counter map. - public FdbCounterMap([NotNull] IFdbSubspace subspace) - : this(subspace, KeyValueEncoders.Tuples.Key()) + public FdbCounterMap([NotNull] IKeySubspace subspace) + : this(subspace.AsTyped()) { } /// Create a new counter map, using a specific key encoder. - public FdbCounterMap([NotNull] IFdbSubspace subspace, [NotNull] IKeyEncoder keyEncoder) + public FdbCounterMap([NotNull] ITypedKeySubspace subspace) { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (keyEncoder == null) throw new ArgumentNullException("keyEncoder"); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); this.Subspace = subspace; - this.KeyEncoder = keyEncoder; - this.Location = subspace.UsingEncoder(keyEncoder); + this.Location = subspace; } /// Subspace used as a prefix for all items in this counter list - public IFdbSubspace Subspace { [NotNull] get; private set; } + [NotNull] + public IKeySubspace Subspace { get; } - /// Encoder for the keys of the counter map - public IKeyEncoder KeyEncoder { [NotNull] get; private set; } - - internal IFdbEncoderSubspace Location { [NotNull] get; private set; } + [NotNull] + internal ITypedKeySubspace Location { get; } /// Add a value to a counter in one atomic operation /// @@ -71,12 +69,12 @@ public FdbCounterMap([NotNull] IFdbSubspace subspace, [NotNull] IKeyEncoderThis operation will not cause the current transaction to conflict. It may create conflicts for transactions that would read the value of the counter. public void Add([NotNull] IFdbTransaction transaction, [NotNull] TKey counterKey, long value) { - if (transaction == null) throw new ArgumentNullException("transaction"); - if (counterKey == null) throw new ArgumentNullException("counterKey"); + if (transaction == null) throw new ArgumentNullException(nameof(transaction)); + if (counterKey == null) throw new ArgumentNullException(nameof(counterKey)); //REVIEW: we could no-op if value == 0 but this may change conflict behaviour for other transactions... Slice param = value == 1 ? PlusOne : value == -1 ? MinusOne : Slice.FromFixed64(value); - transaction.AtomicAdd(this.Location.Keys.Encode(counterKey), param); + transaction.AtomicAdd(this.Location.Keys[counterKey], param); } /// Subtract a value from a counter in one atomic operation @@ -113,10 +111,10 @@ public void Decrement([NotNull] IFdbTransaction transaction, [NotNull] TKey coun /// public async Task ReadAsync([NotNull] IFdbReadOnlyTransaction transaction, [NotNull] TKey counterKey) { - if (transaction == null) throw new ArgumentNullException("transaction"); - if (counterKey == null) throw new ArgumentNullException("counterKey"); + if (transaction == null) throw new ArgumentNullException(nameof(transaction)); + if (counterKey == null) throw new ArgumentNullException(nameof(counterKey)); - var data = await transaction.GetAsync(this.Location.Keys.Encode(counterKey)).ConfigureAwait(false); + var data = await transaction.GetAsync(this.Location.Keys[counterKey]).ConfigureAwait(false); if (data.IsNullOrEmpty) return default(long?); return data.ToInt64(); } @@ -128,8 +126,8 @@ public void Decrement([NotNull] IFdbTransaction transaction, [NotNull] TKey coun /// This method WILL conflict with other transactions! public async Task AddThenReadAsync([NotNull] IFdbTransaction transaction, [NotNull] TKey counterKey, long value) { - if (transaction == null) throw new ArgumentNullException("transaction"); - if (counterKey == null) throw new ArgumentNullException("counterKey"); + if (transaction == null) throw new ArgumentNullException(nameof(transaction)); + if (counterKey == null) throw new ArgumentNullException(nameof(counterKey)); var key = this.Location.Keys.Encode(counterKey); var res = await transaction.GetAsync(key).ConfigureAwait(false); @@ -162,10 +160,10 @@ public Task DecrementThenReadAsync([NotNull] IFdbTransaction transaction, /// This method WILL conflict with other transactions! public async Task ReadThenAddAsync([NotNull] IFdbTransaction transaction, [NotNull] TKey counterKey, long value) { - if (transaction == null) throw new ArgumentNullException("transaction"); - if (counterKey == null) throw new ArgumentNullException("counterKey"); + if (transaction == null) throw new ArgumentNullException(nameof(transaction)); + if (counterKey == null) throw new ArgumentNullException(nameof(counterKey)); - var key = this.Location.Keys.Encode(counterKey); + var key = this.Location.Keys[counterKey]; var res = await transaction.GetAsync(key).ConfigureAwait(false); long previous = res.IsNullOrEmpty ? 0 : res.ToInt64(); diff --git a/FoundationDB.Layers.Common/Counters/FdbHighContentionCounter.cs b/FoundationDB.Layers.Common/Counters/FdbHighContentionCounter.cs index cb8e7c21b..e8a17ad02 100644 --- a/FoundationDB.Layers.Common/Counters/FdbHighContentionCounter.cs +++ b/FoundationDB.Layers.Common/Counters/FdbHighContentionCounter.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -33,6 +33,8 @@ namespace FoundationDB.Layers.Counters using System; using System.Threading; using System.Threading.Tasks; + using Doxense.Collections.Tuples; + using Doxense.Serialization.Encoders; /// Represents an integer value which can be incremented without conflict. /// Uses a sharded representation (which scales with contention) along with background coalescing... @@ -54,33 +56,33 @@ public class FdbHighContentionCounter /// Create a new High Contention counter. /// Database used by this layer /// Subspace to be used for storing the counter - public FdbHighContentionCounter([NotNull] IFdbDatabase db, [NotNull] IFdbSubspace subspace) - : this(db, subspace, KeyValueEncoders.Tuples.Value()) + public FdbHighContentionCounter([NotNull] IFdbDatabase db, [NotNull] IKeySubspace subspace) + : this(db, subspace.AsDynamic(), TuPack.Encoding.GetValueEncoder()) { } /// Create a new High Contention counter, using a specific value encoder. /// Database used by this layer /// Subspace to be used for storing the counter /// Encoder for the counter values - public FdbHighContentionCounter([NotNull] IFdbDatabase db, [NotNull] IFdbSubspace subspace, [NotNull] IValueEncoder encoder) + public FdbHighContentionCounter([NotNull] IFdbDatabase db, [NotNull] IDynamicKeySubspace subspace, [NotNull] IValueEncoder encoder) { - if (db == null) throw new ArgumentNullException("db"); - if (subspace == null) throw new ArgumentNullException("subspace"); - if (encoder == null) throw new ArgumentNullException("encoder"); + if (db == null) throw new ArgumentNullException(nameof(db)); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); + if (encoder == null) throw new ArgumentNullException(nameof(encoder)); this.Database = db; - this.Subspace = subspace.Using(TypeSystem.Tuples); + this.Subspace = subspace.AsDynamic(); this.Encoder = encoder; } /// Subspace used as a prefix for all items in this table - public IFdbDynamicSubspace Subspace {[NotNull] get; private set; } + public IDynamicKeySubspace Subspace {[NotNull] get; } /// Database instance that is used to perform background coalescing of the counter - public IFdbDatabase Database {[NotNull] get; private set; } + public IFdbDatabase Database {[NotNull] get; } /// Encoder for the integer values of the counter - public IValueEncoder Encoder {[NotNull] get; private set; } + public IValueEncoder Encoder {[NotNull] get; } /// Generate a new random slice protected virtual Slice RandomId() @@ -129,7 +131,7 @@ private async Task Coalesce(int N, CancellationToken ct) catch (FdbException x) { //TODO: logging ? - System.Diagnostics.Debug.WriteLine("Coalesce error: " + x.Message); + System.Diagnostics.Debug.WriteLine($"Coalesce error: {x.Message}"); return; } } @@ -155,7 +157,7 @@ private void BackgroundCoalesce(int n, CancellationToken ct) { var x = t.Exception; //TODO: logging ? - System.Diagnostics.Debug.WriteLine("Background Coalesce error: " + x.ToString()); + System.Diagnostics.Debug.WriteLine($"Background Coalesce error: {x}"); } }); } @@ -172,7 +174,7 @@ private void BackgroundCoalesce(int n, CancellationToken ct) /// public async Task GetTransactional([NotNull] IFdbReadOnlyTransaction trans) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); long total = 0; await trans @@ -186,7 +188,7 @@ await trans /// Get the value of the counter with snapshot isolation (no transaction conflicts). public Task GetSnapshot([NotNull] IFdbReadOnlyTransaction trans) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); return GetTransactional(trans.Snapshot); } @@ -194,7 +196,7 @@ public Task GetSnapshot([NotNull] IFdbReadOnlyTransaction trans) /// Add the value x to the counter. public void Add([NotNull] IFdbTransaction trans, long x) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); trans.Set(this.Subspace.Keys.Encode(RandomId()), this.Encoder.EncodeValue(x)); @@ -212,7 +214,7 @@ public void Add([NotNull] IFdbTransaction trans, long x) /// Set the counter to value x. public async Task SetTotal([NotNull] IFdbTransaction trans, long x) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); long value = await GetSnapshot(trans).ConfigureAwait(false); Add(trans, x - value); @@ -223,27 +225,27 @@ public async Task SetTotal([NotNull] IFdbTransaction trans, long x) /// Get the value of the counter. /// Not recommended for use with read/write transactions when the counter is being frequently updated (conflicts will be very likely). /// - public Task GetTransactionalAsync(CancellationToken cancellationToken) + public Task GetTransactionalAsync(CancellationToken ct) { - return this.Database.ReadAsync((tr) => this.GetTransactional(tr), cancellationToken); + return this.Database.ReadAsync((tr) => GetTransactional(tr), ct); } /// Get the value of the counter with snapshot isolation (no transaction conflicts). - public Task GetSnapshotAsync(CancellationToken cancellationToken) + public Task GetSnapshotAsync(CancellationToken ct) { - return this.Database.ReadAsync((tr) => this.GetSnapshot(tr), cancellationToken); + return this.Database.ReadAsync((tr) => GetSnapshot(tr), ct); } /// Add the value x to the counter. - public Task AddAsync(long x, CancellationToken cancellationToken) + public Task AddAsync(long x, CancellationToken ct) { - return this.Database.WriteAsync((tr) => this.Add(tr, x), cancellationToken); + return this.Database.WriteAsync((tr) => Add(tr, x), ct); } /// Set the counter to value x. - public Task SetTotalAsync(long x, CancellationToken cancellationToken) + public Task SetTotalAsync(long x, CancellationToken ct) { - return this.Database.ReadWriteAsync((tr) => this.SetTotal(tr, x), cancellationToken); + return this.Database.ReadWriteAsync((tr) => SetTotal(tr, x), ct); } #endregion diff --git a/FoundationDB.Layers.Common/FoundationDB.Layers.Common.csproj b/FoundationDB.Layers.Common/FoundationDB.Layers.Common.csproj index d0b6420c4..58c62feb4 100644 --- a/FoundationDB.Layers.Common/FoundationDB.Layers.Common.csproj +++ b/FoundationDB.Layers.Common/FoundationDB.Layers.Common.csproj @@ -1,82 +1,39 @@ - - - + + - Debug - AnyCPU - {7C7717D6-A1E7-4541-AF8B-1AC762B5ED0F} - Library - Properties + netstandard2.0 FoundationDB.Layers.Common FoundationDB.Layers.Common - v4.5 - 512 - + true + ..\Common\foundationdb-net-client.snk + 5.1.0-alpha1 + Doxense + http://opensource.org/licenses/BSD-3-Clause + http://github.com/Doxense/foundationdb-dotnet-client + http://doxense.github.io/foundationdb-dotnet-client/nuget/foundationdb.png + http://github.com/Doxense/foundationdb-dotnet-client + foundationdb fdb nosql + This is a pre-release of the .NET Binding, the public API is still subject to changes. + Common Layers for the FoundationDB .NET Binding + Copyright 2013-2018 Doxense SAS - - true + + + true + latest full - false - bin\Debug\ - DEBUG;TRACE - prompt - 4 - false - 105,108,109,114,472,660,661,628,1066 - AnyCPU - - - pdbonly - true - bin\Release\ - TRACE - prompt - 4 - false - bin\Release\FoundationDB.Layers.Common.XML - 105,108,109,114,472,660,661,628,1066 - AnyCPU - - - true + true - - ..\Common\foundationdb-net-client.snk + + + true + 105,108,109,114,472,660,661,628,1066,NU1605 + bin\Release\netstandard2.0\FoundationDB.Layers.Common.xml + latest + - - - - - - - Properties\VersionInfo.cs - - - - - - - - - - - - - - - - - {773166b7-de74-4fcc-845c-84080cc89533} - FoundationDB.Client - + - - - \ No newline at end of file + + diff --git a/FoundationDB.Layers.Common/Indexes/FdbIndex`2.cs b/FoundationDB.Layers.Common/Indexes/FdbIndex`2.cs index b6276d998..9035effb0 100644 --- a/FoundationDB.Layers.Common/Indexes/FdbIndex`2.cs +++ b/FoundationDB.Layers.Common/Indexes/FdbIndex`2.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,14 +28,12 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Indexing { - using FoundationDB.Client; - using FoundationDB.Layers.Tuples; - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.Diagnostics; - using System.Globalization; using System.Threading.Tasks; + using FoundationDB.Client; + using JetBrains.Annotations; /// Simple index that maps values of type into lists of ids of type /// Type of the unique id of each document or entity @@ -44,34 +42,32 @@ namespace FoundationDB.Layers.Indexing public class FdbIndex { - public FdbIndex([NotNull] string name, [NotNull] IFdbSubspace subspace, IEqualityComparer valueComparer = null, bool indexNullValues = false) - : this(name, subspace, valueComparer, indexNullValues, KeyValueEncoders.Tuples.CompositeKey()) + public FdbIndex([NotNull] string name, [NotNull] IKeySubspace subspace, IEqualityComparer valueComparer = null, bool indexNullValues = false) + : this(name, subspace.AsTyped(), valueComparer, indexNullValues) { } - public FdbIndex([NotNull] string name, [NotNull] IFdbSubspace subspace, IEqualityComparer valueComparer, bool indexNullValues, [NotNull] ICompositeKeyEncoder encoder) + public FdbIndex([NotNull] string name, [NotNull] ITypedKeySubspace subspace, IEqualityComparer valueComparer, bool indexNullValues) { - if (name == null) throw new ArgumentNullException("name"); - if (subspace == null) throw new ArgumentNullException("subspace"); - if (encoder == null) throw new ArgumentNullException("encoder"); + if (name == null) throw new ArgumentNullException(nameof(name)); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); this.Name = name; this.Subspace = subspace; this.ValueComparer = valueComparer ?? EqualityComparer.Default; this.IndexNullValues = indexNullValues; - this.Location = subspace.UsingEncoder(encoder); } - public string Name { [NotNull] get; private set; } + public string Name { [NotNull] get; } - public IFdbSubspace Subspace { [NotNull] get; private set; } - - protected IFdbEncoderSubspace Location { [NotNull] get; private set; } + [NotNull] + public ITypedKeySubspace Subspace { get; } - public IEqualityComparer ValueComparer { [NotNull] get; private set; } + [NotNull] + public IEqualityComparer ValueComparer { get; } /// If true, null values are inserted in the index. If false (default), they are ignored /// This has no effect if is not a reference type - public bool IndexNullValues { get; private set; } + public bool IndexNullValues { get; } /// Insert a newly created entity to the index /// Transaction to use @@ -82,7 +78,7 @@ public bool Add([NotNull] IFdbTransaction trans, TId id, TValue value) { if (this.IndexNullValues || value != null) { - trans.Set(this.Location.Keys.Encode(value, id), Slice.Empty); + trans.Set(this.Subspace.Keys[value, id], Slice.Empty); return true; } return false; @@ -102,13 +98,13 @@ public bool Update([NotNull] IFdbTransaction trans, TId id, TValue newValue, TVa // remove previous value if (this.IndexNullValues || previousValue != null) { - trans.Clear(this.Location.Keys.Encode(previousValue, id)); + trans.Clear(this.Subspace.Keys[previousValue, id]); } // add new value if (this.IndexNullValues || newValue != null) { - trans.Set(this.Location.Keys.Encode(newValue, id), Slice.Empty); + trans.Set(this.Subspace.Keys[newValue, id], Slice.Empty); } // cannot be both null, so we did at least something) @@ -123,9 +119,9 @@ public bool Update([NotNull] IFdbTransaction trans, TId id, TValue newValue, TVa /// Previous value of the entity in the index public void Remove([NotNull] IFdbTransaction trans, TId id, TValue value) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); - trans.Clear(this.Location.Keys.Encode(value, id)); + trans.Clear(this.Subspace.Keys[value, id]); } /// Returns a list of ids matching a specific value @@ -148,43 +144,43 @@ public Task> LookupAsync([NotNull] IFdbReadOnlyTransaction trans, TVal [NotNull] public FdbRangeQuery Lookup(IFdbReadOnlyTransaction trans, TValue value, bool reverse = false) { - var prefix = this.Location.Partial.Keys.Encode(value); + var prefix = this.Subspace.Keys.EncodePartial(value); return trans - .GetRange(FdbKeyRange.StartsWith(prefix), new FdbRangeOptions { Reverse = reverse }) - .Select((kvp) => this.Location.Keys.Decode(kvp.Key).Item2); + .GetRange(KeyRange.StartsWith(prefix), new FdbRangeOptions { Reverse = reverse }) + .Select((kvp) => this.Subspace.Keys.Decode(kvp.Key).Item2); } [NotNull] public FdbRangeQuery LookupGreaterThan([NotNull] IFdbReadOnlyTransaction trans, TValue value, bool orEqual, bool reverse = false) { - var prefix = this.Location.Partial.Keys.Encode(value); + var prefix = this.Subspace.Keys.EncodePartial(value); if (!orEqual) prefix = FdbKey.Increment(prefix); - var space = new FdbKeySelectorPair( - FdbKeySelector.FirstGreaterThan(prefix), - FdbKeySelector.FirstGreaterOrEqual(this.Location.ToRange().End) + var space = new KeySelectorPair( + KeySelector.FirstGreaterThan(prefix), + KeySelector.FirstGreaterOrEqual(this.Subspace.ToRange().End) ); return trans .GetRange(space, new FdbRangeOptions { Reverse = reverse }) - .Select((kvp) => this.Location.Keys.Decode(kvp.Key).Item2); + .Select((kvp) => this.Subspace.Keys.Decode(kvp.Key).Item2); } [NotNull] public FdbRangeQuery LookupLessThan([NotNull] IFdbReadOnlyTransaction trans, TValue value, bool orEqual, bool reverse = false) { - var prefix = this.Location.Partial.Keys.Encode(value); + var prefix = this.Subspace.Keys.EncodePartial(value); if (orEqual) prefix = FdbKey.Increment(prefix); - var space = new FdbKeySelectorPair( - FdbKeySelector.FirstGreaterOrEqual(this.Location.ToRange().Begin), - FdbKeySelector.FirstGreaterThan(prefix) + var space = new KeySelectorPair( + KeySelector.FirstGreaterOrEqual(this.Subspace.ToRange().Begin), + KeySelector.FirstGreaterThan(prefix) ); return trans .GetRange(space, new FdbRangeOptions { Reverse = reverse }) - .Select((kvp) => this.Location.Keys.Decode(kvp.Key).Item2); + .Select((kvp) => this.Subspace.Keys.Decode(kvp.Key).Item2); } public override string ToString() diff --git a/FoundationDB.Layers.Common/Interning/FdbStringIntern.cs b/FoundationDB.Layers.Common/Interning/FdbStringIntern.cs index 0ff97aeed..f12879b9f 100644 --- a/FoundationDB.Layers.Common/Interning/FdbStringIntern.cs +++ b/FoundationDB.Layers.Common/Interning/FdbStringIntern.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,21 +26,20 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -#undef DEBUG_STRING_INTERNING +//#define DEBUG_STRING_INTERNING namespace FoundationDB.Layers.Interning { - using FoundationDB.Client; - using FoundationDB.Layers.Tuples; using System; using System.Collections.Generic; using System.Diagnostics; using System.Security.Cryptography; using System.Threading; using System.Threading.Tasks; + using FoundationDB.Client; /// Provides a class for interning (aka normalizing, aliasing) commonly-used long strings into shorter representations. - [DebuggerDisplay("Subspace={Subspace}")] + [DebuggerDisplay("Subspace={" + nameof(FdbStringIntern.Subspace) + "}")] [Obsolete("FIXME! This version of the layer has a MAJOR bug!")] public class FdbStringIntern : IDisposable { @@ -91,15 +90,15 @@ public override int GetHashCode() private readonly RandomNumberGenerator m_prng = RandomNumberGenerator.Create(); private readonly ReaderWriterLockSlim m_lock = new ReaderWriterLockSlim(); - public FdbStringIntern(IFdbSubspace subspace) + public FdbStringIntern(IKeySubspace subspace) { - if (subspace == null) throw new ArgumentNullException("subspace"); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); - this.Subspace = subspace.Using(TypeSystem.Tuples); + this.Subspace = subspace.AsDynamic(); } - public IFdbDynamicSubspace Subspace { get; private set; } + public IDynamicKeySubspace Subspace { get; } #region Private Helpers... @@ -135,8 +134,7 @@ private void EvictCache() m_uidsInCache.RemoveAt(m_uidsInCache.Count - 1); // remove from caches, account for bytes - string value; - if (!m_uidStringCache.TryGetValue(uidKey, out value) || value == null) + if (!m_uidStringCache.TryGetValue(uidKey, out string value) || value == null) { throw new InvalidOperationException("Error in cache evication: string not found"); } @@ -221,7 +219,7 @@ private async Task FindUidAsync(IFdbTransaction trans) } //TODO: another way ? - throw new InvalidOperationException("Failed to find a free uid for interned string after " + MAX_TRIES + " attempts"); + throw new InvalidOperationException($"Failed to find a free uid for interned string after {MAX_TRIES} attempts"); } #endregion @@ -235,8 +233,8 @@ private async Task FindUidAsync(IFdbTransaction trans) /// The length of the string must not exceed the maximum FoundationDB value size public Task InternAsync(IFdbTransaction trans, string value) { - if (trans == null) throw new ArgumentNullException("trans"); - if (value == null) throw new ArgumentNullException("value"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (value == null) throw new ArgumentNullException(nameof(value)); if (value.Length == 0) return Task.FromResult(Slice.Empty); @@ -244,9 +242,7 @@ public Task InternAsync(IFdbTransaction trans, string value) Debug.WriteLine("Want to intern: " + value); #endif - Uid uidKey; - - if (m_stringUidCache.TryGetValue(value, out uidKey)) + if (m_stringUidCache.TryGetValue(value, out Uid uidKey)) { #if DEBUG_STRING_INTERNING Debug.WriteLine("> found in cache! " + uidKey); @@ -300,13 +296,12 @@ private async Task InternSlowAsync(IFdbTransaction trans, string value) /// Return the long string associated with the normalized representation public Task LookupAsync(IFdbReadOnlyTransaction trans, Slice uid) { - if (trans == null) throw new ArgumentNullException("trans"); - if (uid.IsNull) throw new ArgumentException("String uid cannot be nil", "uid"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (uid.IsNull) throw new ArgumentException("String uid cannot be nil", nameof(uid)); if (uid.IsEmpty) return Task.FromResult(String.Empty); - string value; - if (m_uidStringCache.TryGetValue(new Uid(uid), out value)) + if (m_uidStringCache.TryGetValue(new Uid(uid), out string value)) { return Task.FromResult(value); } diff --git a/FoundationDB.Layers.Common/Optional`1.cs b/FoundationDB.Layers.Common/Optional`1.cs deleted file mode 100644 index 8a734a334..000000000 --- a/FoundationDB.Layers.Common/Optional`1.cs +++ /dev/null @@ -1,385 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Layers -{ - using FoundationDB.Client; - using System; - using System.Collections.Generic; - using System.Linq; - using System.Runtime.InteropServices; - - /// Helper class for the value type - public static class Optional - { - #region Wrapping... - - /// Returns an with the specified value - public static Optional Return(T value) - { - return new Optional(value); - } - - /// Returns an empty - public static Optional Empty() - { - return default(Optional); - } - - /// Returns an array of from an array of values - public static Optional[] Wrap(T[] values) - { - if (values == null) return null; - var tmp = new Optional[values.Length]; - for (int i = 0; i < values.Length; i++) - { - tmp[i] = new Optional(values[i]); - } - return tmp; - } - - /// Converts a into an - /// Nullable value type - public static Optional Wrap(Nullable value) - where T : struct - { - if (!value.HasValue) - return default(Optional); - return new Optional(value.Value); - } - - /// Converts an array of into an array of - /// Nullable value type - public static Optional[] Wrap(Nullable[] values) - where T : struct - { - if (values == null) throw new ArgumentNullException("values"); - var tmp = new Optional[values.Length]; - for (int i = 0; i < values.Length; i++) - { - if (values[i].HasValue) tmp[i] = new Optional(values[i].Value); - } - return tmp; - } - - /// Transforms a sequence of into a sequence of - public static IEnumerable> AsOptional(IEnumerable> source) - where T : struct - { - if (source == null) throw new ArgumentNullException("source"); - - return source.Select(value => value.HasValue ? new Optional(value.Value) : default(Optional)); - } - - #endregion - - #region Single... - - /// Converts a into a - /// Nullable value type - public static Nullable ToNullable(this Optional value) - where T : struct - { - return !value.HasValue ? default(Nullable) : value.Value; - } - - #endregion - - #region Array... - - /// Extract the values from an array of - /// Nullable value type - /// Array of optional values - /// Default value for empty values - /// Array of values - public static T[] Unwrap(Optional[] values, T defaultValue) - { - if (values == null) throw new ArgumentNullException("values"); - - var tmp = new T[values.Length]; - for (int i = 0; i < values.Length; i++) - { - tmp[i] = values[i].GetValueOrDefault(defaultValue); - } - return tmp; - } - - /// Converts an array of into an array of - /// Nullable value type - public static Nullable[] ToNullable(Optional[] values) - where T : struct - { - if (values == null) throw new ArgumentNullException("values"); - - var tmp = new Nullable[values.Length]; - for (int i = 0; i < values.Length; i++) - { - if (values[i].HasValue) tmp[i] = values[i].Value; - } - return tmp; - } - - /// Converts an array of into an array of - /// Nullable value type - public static T[] Unwrap(Optional[] values) - where T : class - { - if (values == null) throw new ArgumentNullException("values"); - - var tmp = new T[values.Length]; - for (int i = 0; i < values.Length; i++) - { - if (values[i].HasValue) tmp[i] = values[i].Value; - } - return tmp; - } - - #endregion - - #region Enumerable... - - /// Transforms a sequence of into a sequence of values. - /// Type of the elements of - /// Sequence of optional values - /// Default value for empty entries - /// Sequence of values, using for empty entries - public static IEnumerable Unwrap(this IEnumerable> source, T defaultValue) - { - if (source == null) throw new ArgumentNullException("source"); - - return source.Select(value => value.GetValueOrDefault(defaultValue)); - } - - /// Transforms a sequence of into a sequence of - /// Type of the elements of - /// Source of optional values - /// Sequence of nullable values - public static IEnumerable> AsNullable(this IEnumerable> source) - where T : struct - { - if (source == null) throw new ArgumentNullException("source"); - - return source.Select(value => !value.HasValue ? default(Nullable) : value.Value); - } - - /// Transforms a squence of into a sequence of values - /// Type of the elements of - /// Source of optional values - /// Sequence of values, using the default of for empty entries - public static IEnumerable Unwrap(this IEnumerable> source) - where T : class - { - if (source == null) throw new ArgumentNullException("source"); - - return source.Select(value => value.GetValueOrDefault()); - } - - #endregion - - #region Decoding... - - /// Decode an array of slices into an array of - /// Type of the decoded values - /// Decoder used to produce the values - /// Array of slices to decode. Entries equal to will not be decoded and returned as an empty optional. - /// Array of decoded . - public static Optional[] DecodeRange(IValueEncoder decoder, Slice[] data) - { - if (decoder == null) throw new ArgumentNullException("decoder"); - if (data == null) throw new ArgumentNullException("data"); - - var values = new Optional[data.Length]; - for (int i = 0; i < data.Length; i++) - { - Slice item; - if ((item = data[i]).HasValue) - { - values[i] = new Optional(decoder.DecodeValue(item)); - } - } - return values; - } - - /// Decode a sequence of slices into a sequence of - /// Type of the decoded values - /// Sequence of slices to decode. Entries equal to will not be decoded and returned as an empty optional. - /// Decoder used to produce the values - /// Sequence of decoded . - public static IEnumerable> Decode(this IEnumerable source, IValueEncoder decoder) - { - if (decoder == null) throw new ArgumentNullException("decoder"); - if (source == null) throw new ArgumentNullException("source"); - - return source.Select(value => value.HasValue ? decoder.DecodeValue(value) : default(Optional)); - } - - #endregion - - } - - /// Container that is either empty (no value) or null (for reference types), or contains a value of type . - /// Type of the value - [Serializable, StructLayout(LayoutKind.Sequential)] - public struct Optional : IEquatable>, IEquatable - { - // This is the equivalent of Nullable that would accept reference types. - // The main difference is that, 'null' is a legal value for reference types, which is distinct from "no value" - // i.e.: new Optional(null).HasValue == true - - //REVIEW: this looks very similar to Maybe, except without the handling of errors. Maybe we could merge both? - - private readonly bool m_hasValue; - - private readonly T m_value; - - /// Initializes a new instance of the structure to the specified value. - public Optional(T value) - { - m_hasValue = true; - m_value = value; - } - - /// Gets the value of the current value. - /// This can return null for reference types! - public T Value - { - get - { - if (!m_hasValue) - { // we construct and throw the exception in a static helper, to help with inlining - NoValue(); - } - return m_value; - } - } - - /// Gets a value indicating whether the current object has a value. - public bool HasValue { get { return m_hasValue; } } - - /// Retrieves the value of the current object, or the object's default value. - public T GetValueOrDefault() - { - return m_value; - } - - /// Retrieves the value of the current object, or the specified default value. - public T GetValueOrDefault(T defaultValue) - { - return m_hasValue ? m_value : defaultValue; - } - - public override string ToString() - { - if (!m_hasValue || m_value == null) return String.Empty; - return m_value.ToString(); - } - - public bool Equals(Optional value) - { - return m_hasValue == value.m_hasValue && EqualityComparer.Default.Equals(m_value, value.m_value); - } - - public bool Equals(T value) - { - return m_hasValue && EqualityComparer.Default.Equals(m_value, value); - } - - public override int GetHashCode() - { - if (!m_hasValue || m_value == null) return 0; - return m_value.GetHashCode(); - } - - /// Indicates whether the current object is equal to a specified object. - public override bool Equals(object obj) - { - if (obj is T) return Equals((T)obj); - if (obj is Optional) return Equals((Optional)obj); - return m_hasValue ? object.Equals(m_value, obj) : object.ReferenceEquals(obj, null); - } - - public static bool operator ==(Optional a, Optional b) - { - return a.Equals(b); - } - - public static bool operator !=(Optional a, Optional b) - { - return !a.Equals(b); - } - - public static bool operator ==(Optional a, T b) - { - return a.Equals(b); - } - - public static bool operator !=(Optional a, T b) - { - return !a.Equals(b); - } - - public static bool operator ==(T a, Optional b) - { - return b.Equals(a); - } - - public static bool operator !=(T a, Optional b) - { - return !b.Equals(a); - } - - public static bool operator ==(Optional? a, Optional? b) - { - // Needed to be able to write stuff like "if (optional == null)", the compiler will automatically lift "foo == null" to nullables if foo is a struct that implements the '==' operator - return a.GetValueOrDefault().Equals(b.GetValueOrDefault()); - } - - public static bool operator !=(Optional? a, Optional? b) - { - // Needed to be able to write stuff like "if (optional != null)", the compiler will automatically lift "foo != null" to nullables if foo is a struct implements the '!=' operator - return !a.GetValueOrDefault().Equals(b.GetValueOrDefault()); - } - - public static explicit operator T(Optional value) - { - return value.Value; - } - - public static implicit operator Optional(T value) - { - return new Optional(value); - } - - private static void NoValue() - { - throw new InvalidOperationException("Nullable object must have a value."); - } - - } - -} diff --git a/FoundationDB.Layers.Common/Properties/AssemblyInfo.cs b/FoundationDB.Layers.Common/Properties/AssemblyInfo.cs index 4646390c0..c4485a375 100644 --- a/FoundationDB.Layers.Common/Properties/AssemblyInfo.cs +++ b/FoundationDB.Layers.Common/Properties/AssemblyInfo.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -29,10 +29,6 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY using System.Reflection; using System.Runtime.InteropServices; -[assembly: AssemblyTitle("FoundationDB.Layers.Common")] -[assembly: AssemblyDescription("Common Layers for the FoundationDB .NET Binding")] -[assembly: AssemblyConfiguration("")] - [assembly: ComVisible(false)] [assembly: Guid("0fce138d-cb61-49fd-bb0a-a0ecb37abe79")] diff --git a/FoundationDB.Layers.Experimental/Documents/FdbDocumentCollection.cs b/FoundationDB.Layers.Experimental/Documents/FdbDocumentCollection.cs index 76611e893..d85d118c4 100644 --- a/FoundationDB.Layers.Experimental/Documents/FdbDocumentCollection.cs +++ b/FoundationDB.Layers.Experimental/Documents/FdbDocumentCollection.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,16 +28,12 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Documents { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using FoundationDB.Layers.Directories; - using FoundationDB.Layers.Tuples; - using FoundationDB.Linq; using System; using System.Collections.Generic; using System.Linq; - using System.Threading; using System.Threading.Tasks; + using Doxense.Serialization.Encoders; + using FoundationDB.Client; /// Represents a collection of dictionaries of fields. public class FdbDocumentCollection @@ -46,29 +42,27 @@ public class FdbDocumentCollection public const int DefaultChunkSize = 1 << 20; // 1 MB - public FdbDocumentCollection(FdbSubspace subspace, Func selector, IValueEncoder valueEncoder) - : this(subspace, selector, KeyValueEncoders.Tuples.CompositeKey(), valueEncoder) + public FdbDocumentCollection(IKeySubspace subspace, Func selector, IValueEncoder valueEncoder) + : this(subspace.AsTyped(), selector, valueEncoder) { } - public FdbDocumentCollection(FdbSubspace subspace, Func selector, ICompositeKeyEncoder keyEncoder, IValueEncoder valueEncoder) + public FdbDocumentCollection(ITypedKeySubspace subspace, Func selector, IValueEncoder valueEncoder) { - if (subspace == null) throw new ArgumentNullException("subspace"); - if (selector == null) throw new ArgumentNullException("selector"); - if (keyEncoder == null) throw new ArgumentNullException("keyEncoder"); - if (valueEncoder == null) throw new ArgumentNullException("valueEncoder"); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); + if (selector == null) throw new ArgumentNullException(nameof(selector)); + if (valueEncoder == null) throw new ArgumentNullException(nameof(valueEncoder)); this.Subspace = subspace; this.IdSelector = selector; this.ValueEncoder = valueEncoder; - this.Location = subspace.UsingEncoder(keyEncoder); } protected virtual Task> LoadPartsAsync(IFdbReadOnlyTransaction trans, TId id) { - var key = this.Location.Partial.Keys.Encode(id); + var key = this.Subspace.Keys.EncodePartial(id); return trans - .GetRange(FdbKeyRange.StartsWith(key)) //TODO: options ? + .GetRange(KeyRange.StartsWith(key)) //TODO: options ? .Select(kvp => kvp.Value) .ToListAsync(); } @@ -80,15 +74,13 @@ protected virtual TDocument DecodeParts(List parts) } /// Subspace used as a prefix for all hashsets in this collection - public FdbSubspace Subspace { get; private set; } - - protected IFdbEncoderSubspace Location { get; private set; } + public ITypedKeySubspace Subspace { get; } /// Encoder that packs/unpacks the documents - public IValueEncoder ValueEncoder { get; private set; } + public IValueEncoder ValueEncoder { get; } /// Lambda function used to extract the ID from a document - public Func IdSelector { get; private set; } + public Func IdSelector { get; } /// Maximum size of a document chunk (1 MB by default) public int ChunkSize { get; private set; } @@ -96,8 +88,8 @@ protected virtual TDocument DecodeParts(List parts) /// Insert a new document in the collection public void Insert(IFdbTransaction trans, TDocument document) { - if (trans == null) throw new ArgumentNullException("trans"); - if (document == null) throw new ArgumentNullException("document"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (document == null) throw new ArgumentNullException(nameof(document)); var id = this.IdSelector(document); if (id == null) throw new InvalidOperationException("Cannot insert a document with a null identifier"); @@ -106,10 +98,10 @@ public void Insert(IFdbTransaction trans, TDocument document) var packed = this.ValueEncoder.EncodeValue(document); // Key Prefix = ...(id,) - var key = this.Location.Partial.Keys.Encode(id); + var key = this.Subspace.Keys.EncodePartial(id); // clear previous value - trans.ClearRange(FdbKeyRange.StartsWith(key)); + trans.ClearRange(KeyRange.StartsWith(key)); int remaining = packed.Count; if (remaining <= this.ChunkSize) @@ -127,7 +119,7 @@ public void Insert(IFdbTransaction trans, TDocument document) while (remaining > 0) { int sz = Math.Max(remaining, this.ChunkSize); - trans.Set(this.Location.Keys.Encode(id, index), packed.Substring(p, sz)); + trans.Set(this.Subspace.Keys[id, index], packed.Substring(p, sz)); ++index; p += sz; remaining -= sz; @@ -141,8 +133,8 @@ public void Insert(IFdbTransaction trans, TDocument document) /// public async Task LoadAsync(IFdbReadOnlyTransaction trans, TId id) { - if (trans == null) throw new ArgumentNullException("trans"); - if (id == null) throw new ArgumentNullException("id"); // only for ref types + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (id == null) throw new ArgumentNullException(nameof(id)); // only for ref types var parts = await LoadPartsAsync(trans, id).ConfigureAwait(false); @@ -155,8 +147,8 @@ public async Task LoadAsync(IFdbReadOnlyTransaction trans, TId id) /// public async Task> LoadMultipleAsync(IFdbReadOnlyTransaction trans, IEnumerable ids) { - if (trans == null) throw new ArgumentNullException("trans"); - if (ids == null) throw new ArgumentNullException("ids"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (ids == null) throw new ArgumentNullException(nameof(ids)); var results = await Task.WhenAll(ids.Select(id => LoadPartsAsync(trans, id))); @@ -168,11 +160,11 @@ public async Task> LoadMultipleAsync(IFdbReadOnlyTransaction tra /// public void Delete(IFdbTransaction trans, TId id) { - if (trans == null) throw new ArgumentNullException("trans"); - if (id == null) throw new ArgumentNullException("id"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (id == null) throw new ArgumentNullException(nameof(id)); - var key = this.Location.Partial.Keys.Encode(id); - trans.ClearRange(FdbKeyRange.StartsWith(key)); + var key = this.Subspace.Keys.EncodePartial(id); + trans.ClearRange(KeyRange.StartsWith(key)); } @@ -181,12 +173,13 @@ public void Delete(IFdbTransaction trans, TId id) /// public void DeleteMultiple(IFdbTransaction trans, IEnumerable ids) { - if (trans == null) throw new ArgumentNullException("trans"); - if (ids == null) throw new ArgumentNullException("ids"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (ids == null) throw new ArgumentNullException(nameof(ids)); - foreach (var key in this.Location.Partial.Keys.Encode(ids)) + foreach (var id in ids) { - trans.ClearRange(FdbKeyRange.StartsWith(key)); + var key = this.Subspace.Keys.EncodePartial(id); + trans.ClearRange(KeyRange.StartsWith(key)); } } @@ -195,8 +188,8 @@ public void DeleteMultiple(IFdbTransaction trans, IEnumerable ids) /// public void Delete(IFdbTransaction trans, TDocument document) { - if (trans == null) throw new ArgumentNullException("trans"); - if (document == null) throw new ArgumentNullException("document"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (document == null) throw new ArgumentNullException(nameof(document)); var id = this.IdSelector(document); if (id == null) throw new InvalidOperationException(); @@ -209,8 +202,8 @@ public void Delete(IFdbTransaction trans, TDocument document) /// public void DeleteMultiple(IFdbTransaction trans, IEnumerable documents) { - if (trans == null) throw new ArgumentNullException("trans"); - if (documents == null) throw new ArgumentNullException("documents"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (documents == null) throw new ArgumentNullException(nameof(documents)); DeleteMultiple(trans, documents.Select(document => this.IdSelector(document))); } diff --git a/FoundationDB.Layers.Experimental/Documents/FdbDocumentHandlers.cs b/FoundationDB.Layers.Experimental/Documents/FdbDocumentHandlers.cs index 35a301f3b..b34986d50 100644 --- a/FoundationDB.Layers.Experimental/Documents/FdbDocumentHandlers.cs +++ b/FoundationDB.Layers.Experimental/Documents/FdbDocumentHandlers.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,21 +28,16 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Documents { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using FoundationDB.Layers.Tuples; - using FoundationDB.Linq; using System; using System.Collections.Generic; using System.Linq; - using System.Threading; - using System.Threading.Tasks; + using Doxense.Collections.Tuples; /// Interface that defines a class that knows of to chop instances of into slices /// Type of documents public interface IDocumentSplitter { - KeyValuePair[] Split(TDocument document); + KeyValuePair[] Split(TDocument document); } /// Interface that defines a class that knows of to reconstruct instances of from slices @@ -50,7 +45,7 @@ public interface IDocumentSplitter public interface IDocumentBuilder { - TDocument Build(KeyValuePair[] parts); + TDocument Build(KeyValuePair[] parts); } /// Interface that defines a class that knows of to store and retrieve serialized versions of instances into a document collection @@ -83,7 +78,7 @@ public static class FdbDocumentHandlers /// Docuemnt handler that handle dictionarys of string to objects /// /// - public sealed class DictionaryHandler : IDocumentHandler>> + public sealed class DictionaryHandler : IDocumentHandler>> where TDictionary : IDictionary, new() { @@ -95,33 +90,33 @@ public DictionaryHandler(string idName = null, IEqualityComparer compare private readonly IEqualityComparer m_keyComparer; - public string IdName { get; private set; } + public string IdName { get; } - public KeyValuePair[] Split(List> document) + public KeyValuePair[] Split(List> document) { - if (document == null) throw new ArgumentNullException("document"); + if (document == null) throw new ArgumentNullException(nameof(document)); return document // don't include the id .Where(kvp => !m_keyComparer.Equals(kvp.Key, this.IdName)) // convert into tuples - .Select(kvp => new KeyValuePair( - FdbTuple.Create(kvp.Key), - FdbTuple.Create(kvp.Value).ToSlice() + .Select(kvp => new KeyValuePair( + STuple.Create(kvp.Key), + TuPack.Pack(kvp.Value) )) .ToArray(); } - public List> Build(KeyValuePair[] parts) + public List> Build(KeyValuePair[] parts) { - if (parts == null) throw new ArgumentNullException("parts"); + if (parts == null) throw new ArgumentNullException(nameof(parts)); - var list = new List>(parts.Length); + var list = new List>(parts.Length); foreach(var part in parts) { - list.Add(new KeyValuePair( + list.Add(new KeyValuePair( part.Key.Last(), - FdbTuple.Unpack(part.Value) + TuPack.Unpack(part.Value) )); } return list; @@ -132,28 +127,28 @@ public TId GetId(TDictionary document) return (TId)document[this.IdName]; } - public void SetId(Dictionary document, TId id) + public void SetId(Dictionary document, TId id) { - document[this.IdName] = FdbTuple.Create(id); + document[this.IdName] = STuple.Create(id); } - public List> Pack(TDictionary document) + public List> Pack(TDictionary document) { - var dic = new List>(document.Count); + var dic = new List>(document.Count); // convert everything, except the Id foreach(var kvp in document) { if (!m_keyComparer.Equals(kvp.Key, this.IdName)) { - dic.Add(new KeyValuePair(kvp.Key, FdbTuple.Create(kvp.Key))); + dic.Add(new KeyValuePair(kvp.Key, STuple.Create(kvp.Key))); } } return dic; } - public TDictionary Unpack(List> packed, TId id) + public TDictionary Unpack(List> packed, TId id) { var dic = new TDictionary(); dic.Add(this.IdName, id); diff --git a/FoundationDB.Layers.Experimental/Documents/FdbHashSetCollection.cs b/FoundationDB.Layers.Experimental/Documents/FdbHashSetCollection.cs index bd41e125f..d30df3854 100644 --- a/FoundationDB.Layers.Experimental/Documents/FdbHashSetCollection.cs +++ b/FoundationDB.Layers.Experimental/Documents/FdbHashSetCollection.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,18 +28,13 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Blobs { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using FoundationDB.Layers.Tuples; - using FoundationDB.Linq; - using JetBrains.Annotations; using System; using System.Collections.Generic; - using System.Globalization; - using System.IO; - using System.Linq; - using System.Threading; using System.Threading.Tasks; + using Doxense.Collections.Tuples; + using Doxense.Diagnostics.Contracts; + using FoundationDB.Client; + using JetBrains.Annotations; // THIS IS NOT AN OFFICIAL LAYER, JUST A PROTOTYPE TO TEST A FEW THINGS ! @@ -47,20 +42,20 @@ namespace FoundationDB.Layers.Blobs public class FdbHashSetCollection { - public FdbHashSetCollection(IFdbSubspace subspace) + public FdbHashSetCollection(IKeySubspace subspace) { - if (subspace == null) throw new ArgumentNullException("subspace"); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); - this.Subspace = subspace.Using(TypeSystem.Tuples); + this.Subspace = subspace.AsDynamic(); } /// Subspace used as a prefix for all hashsets in this collection - public IFdbDynamicSubspace Subspace { get; private set; } + public IDynamicKeySubspace Subspace { get; } /// Returns the key prefix of an HashSet: (subspace, id, ) /// /// - protected virtual Slice GetKey(IFdbTuple id) + protected virtual Slice GetKey(ITuple id) { //REVIEW: should the id be encoded as a an embedded tuple or not? return this.Subspace.Keys.Pack(id); @@ -70,13 +65,13 @@ protected virtual Slice GetKey(IFdbTuple id) /// /// /// - protected virtual Slice GetFieldKey(IFdbTuple id, string field) + protected virtual Slice GetFieldKey(ITuple id, string field) { //REVIEW: should the id be encoded as a an embedded tuple or not? return this.Subspace.Keys.Pack(id.Append(field)); } - protected virtual string ParseFieldKey(IFdbTuple key) + protected virtual string ParseFieldKey(ITuple key) { return key.Last(); } @@ -88,11 +83,11 @@ protected virtual string ParseFieldKey(IFdbTuple key) /// Unique identifier of the hashset /// Name of the field to read /// Value of the corresponding field, or Slice.Nil if it the hashset does not exist, or doesn't have a field with this name - public Task GetValueAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNull] IFdbTuple id, string field) + public Task GetValueAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNull] ITuple id, string field) { - if (trans == null) throw new ArgumentNullException("trans"); - if (id == null) throw new ArgumentNullException("id"); - if (string.IsNullOrEmpty(field)) throw new ArgumentNullException("field"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (id == null) throw new ArgumentNullException(nameof(id)); + if (string.IsNullOrEmpty(field)) throw new ArgumentNullException(nameof(field)); return trans.GetAsync(GetFieldKey(id, field)); } @@ -101,16 +96,16 @@ public Task GetValueAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNu /// Transaction that will be used for this request /// Unique identifier of the hashset /// Dictionary containing, for all fields, their associated values - public async Task> GetAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNull] IFdbTuple id) + public async Task> GetAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNull] ITuple id) { - if (trans == null) throw new ArgumentNullException("trans"); - if (id == null) throw new ArgumentNullException("id"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (id == null) throw new ArgumentNullException(nameof(id)); var prefix = GetKey(id); var results = new Dictionary(StringComparer.OrdinalIgnoreCase); await trans - .GetRange(FdbKeyRange.StartsWith(prefix)) + .GetRange(KeyRange.StartsWith(prefix)) .ForEachAsync((kvp) => { string field = this.Subspace.Keys.DecodeLast(kvp.Key); @@ -126,13 +121,13 @@ await trans /// Unique identifier of the hashset /// List of the fields to read /// Dictionary containing the values of the selected fields, or Slice.Empty if that particular field does not exist. - public async Task> GetAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNull] IFdbTuple id, [NotNull] params string[] fields) + public async Task> GetAsync([NotNull] IFdbReadOnlyTransaction trans, [NotNull] ITuple id, [NotNull] params string[] fields) { - if (trans == null) throw new ArgumentNullException("trans"); - if (id == null) throw new ArgumentNullException("id"); - if (fields == null) throw new ArgumentNullException("fields"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (id == null) throw new ArgumentNullException(nameof(id)); + if (fields == null) throw new ArgumentNullException(nameof(fields)); - var keys = FdbTuple.EncodePrefixedKeys(GetKey(id), fields); + var keys = TuPack.EncodePrefixedKeys(GetKey(id), fields); var values = await trans.GetValuesAsync(keys).ConfigureAwait(false); Contract.Assert(values != null && values.Length == fields.Length); @@ -149,24 +144,24 @@ public async Task> GetAsync([NotNull] IFdbReadOnlyTra #region Set - public void SetValue(IFdbTransaction trans, IFdbTuple id, string field, Slice value) + public void SetValue(IFdbTransaction trans, ITuple id, string field, Slice value) { - if (trans == null) throw new ArgumentNullException("trans"); - if (id == null) throw new ArgumentNullException("id"); - if (string.IsNullOrEmpty(field)) throw new ArgumentNullException("field"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (id == null) throw new ArgumentNullException(nameof(id)); + if (string.IsNullOrEmpty(field)) throw new ArgumentNullException(nameof(field)); trans.Set(GetFieldKey(id, field), value); } - public void Set(IFdbTransaction trans, IFdbTuple id, IEnumerable> fields) + public void Set(IFdbTransaction trans, ITuple id, IEnumerable> fields) { - if (trans == null) throw new ArgumentNullException("trans"); - if (id == null) throw new ArgumentNullException("id"); - if (fields == null) throw new ArgumentNullException("fields"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (id == null) throw new ArgumentNullException(nameof(id)); + if (fields == null) throw new ArgumentNullException(nameof(fields)); foreach (var field in fields) { - if (string.IsNullOrEmpty(field.Key)) throw new ArgumentException("Field cannot have an empty name", "fields"); + if (string.IsNullOrEmpty(field.Key)) throw new ArgumentException("Field cannot have an empty name", nameof(fields)); trans.Set(GetFieldKey(id, field.Key), field.Value); } } @@ -179,39 +174,39 @@ public void Set(IFdbTransaction trans, IFdbTuple id, IEnumerable /// /// - public void DeleteValue(IFdbTransaction trans, IFdbTuple id, string field) + public void DeleteValue(IFdbTransaction trans, ITuple id, string field) { - if (trans == null) throw new ArgumentNullException("trans"); - if (id == null) throw new ArgumentNullException("id"); - if (string.IsNullOrEmpty(field)) throw new ArgumentNullException("field"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (id == null) throw new ArgumentNullException(nameof(id)); + if (string.IsNullOrEmpty(field)) throw new ArgumentNullException(nameof(field)); trans.Clear(GetFieldKey(id, field)); } /// Remove all fields of an hashset /// - public void Delete(IFdbTransaction trans, IFdbTuple id) + public void Delete(IFdbTransaction trans, ITuple id) { - if (trans == null) throw new ArgumentNullException("trans"); - if (id == null) throw new ArgumentNullException("id"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (id == null) throw new ArgumentNullException(nameof(id)); // remove all fields of the hash - trans.ClearRange(FdbKeyRange.StartsWith(GetKey(id))); + trans.ClearRange(KeyRange.StartsWith(GetKey(id))); } /// Remove one or more fields of an hashset /// /// /// - public void Delete(IFdbTransaction trans, IFdbTuple id, params string[] fields) + public void Delete(IFdbTransaction trans, ITuple id, params string[] fields) { - if (trans == null) throw new ArgumentNullException("trans"); - if (id == null) throw new ArgumentNullException("id"); - if (fields == null) throw new ArgumentNullException("fields"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (id == null) throw new ArgumentNullException(nameof(id)); + if (fields == null) throw new ArgumentNullException(nameof(fields)); foreach (var field in fields) { - if (string.IsNullOrEmpty(field)) throw new ArgumentException("Field cannot have an empty name", "fields"); + if (string.IsNullOrEmpty(field)) throw new ArgumentException("Field cannot have an empty name", nameof(fields)); trans.Clear(GetFieldKey(id, field)); } } @@ -224,21 +219,21 @@ public void Delete(IFdbTransaction trans, IFdbTuple id, params string[] fields) /// Transaction that will be used for this request /// Unique identifier of the hashset /// List of all fields. If the list is empty, the hashset does not exist - public Task> GetKeys(IFdbReadOnlyTransaction trans, IFdbTuple id, CancellationToken cancellationToken = default(CancellationToken)) + public Task> GetKeys(IFdbReadOnlyTransaction trans, ITuple id) { //note: As of Beta2, FDB does not have a fdb_get_range that only return the keys. That means that we will have to also read the values from the db, in order to just get the names of the fields :( //TODO: find a way to optimize this ? - if (trans == null) throw new ArgumentNullException("trans"); - if (id == null) throw new ArgumentNullException("id"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); + if (id == null) throw new ArgumentNullException(nameof(id)); var prefix = GetKey(id); var results = new Dictionary(StringComparer.OrdinalIgnoreCase); return trans - .GetRange(FdbKeyRange.StartsWith(prefix)) - .Select((kvp) => ParseFieldKey(FdbTuple.Unpack(kvp.Key))) - .ToListAsync(cancellationToken); + .GetRange(KeyRange.StartsWith(prefix)) + .Select((kvp) => ParseFieldKey(TuPack.Unpack(kvp.Key))) + .ToListAsync(); } #endregion diff --git a/FoundationDB.Layers.Experimental/FoundationDB.Layers.Experimental.csproj b/FoundationDB.Layers.Experimental/FoundationDB.Layers.Experimental.csproj index f5c6c3fb2..d48d35b80 100644 --- a/FoundationDB.Layers.Experimental/FoundationDB.Layers.Experimental.csproj +++ b/FoundationDB.Layers.Experimental/FoundationDB.Layers.Experimental.csproj @@ -1,88 +1,40 @@ - - - + + - Debug - AnyCPU - {E631BCD4-386C-4EB1-AD4D-CABCE77BB4C8} - Library - Properties + netstandard2.0 FoundationDB.Layers FoundationDB.Layers.Experimental - v4.5 - 512 + true + ..\Common\foundationdb-net-client.snk + 5.1.0-alpha1 + Doxense + http://opensource.org/licenses/BSD-3-Clause + http://github.com/Doxense/foundationdb-dotnet-client + http://doxense.github.io/foundationdb-dotnet-client/nuget/foundationdb.png + http://github.com/Doxense/foundationdb-dotnet-client + foundationdb fdb nosql + This is a pre-release of the .NET Binding, the public API is still subject to changes. + Experimental Layers for the FoundationDB .NET Binding + Copyright 2013-2018 Doxense SAS - - true - full - false - bin\Debug\ - DEBUG;TRACE - prompt - 4 - 105,108,109,114,472,660,661,628,1066 - AnyCPU + + true + latest + full + true - - pdbonly - true - bin\Release\ - TRACE - prompt - 4 - bin\Release\FoundationDB.Layers.Experimental.xml - 105,108,109,114,472,660,661,628,1066 - AnyCPU + + true + 105,108,109,114,472,660,661,628,1066,NU1605 + bin\Release\netstandard2.0\FoundationDB.Layers.Common.xml + latest - - true - - - ..\Common\foundationdb-net-client.snk - - - - - - - - - Properties\VersionInfo.cs - - - - - - - - - - - - - - - - - + - - {773166b7-de74-4fcc-845c-84080cc89533} - FoundationDB.Client - - - {7c7717d6-a1e7-4541-af8b-1ac762b5ed0f} - FoundationDB.Layers.Common - + + - - - \ No newline at end of file + + diff --git a/FoundationDB.Layers.Experimental/Indexes/Bitmaps/BitRange.cs b/FoundationDB.Layers.Experimental/Indexes/Bitmaps/BitRange.cs index 54bf20d80..5d3df5b4d 100644 --- a/FoundationDB.Layers.Experimental/Indexes/Bitmaps/BitRange.cs +++ b/FoundationDB.Layers.Experimental/Indexes/Bitmaps/BitRange.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,30 +28,25 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Experimental.Indexing { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using FoundationDB.Layers.Tuples; using System; - using System.Collections.Generic; using System.Diagnostics; - using System.Globalization; - using System.Text; /// Bounds of a Compressed Bitmaps, from the Lowest Set Bit to the Highest Set Bit [DebuggerDisplay("[{Lowest}, {Highest}]")] - public struct BitRange : IEquatable + public readonly struct BitRange : IEquatable { private const int LOWEST_UNDEFINED = 0; private const int HIGHEST_UNDEFINED = -1; - public static BitRange Empty { get { return new BitRange(LOWEST_UNDEFINED, HIGHEST_UNDEFINED); } } + public static BitRange Empty => new BitRange(LOWEST_UNDEFINED, HIGHEST_UNDEFINED); /// Index of the lowest bit that is set to 1 in the source Bitmap public readonly int Lowest; + /// Index of the highest bit that is set to 1 in the source Bitmap public readonly int Highest; - public bool IsEmpty { get { return this.Highest < this.Lowest; } } + public bool IsEmpty => this.Highest < this.Lowest; public BitRange(int lowest, int highest) { diff --git a/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmap.cs b/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmap.cs index 622701aa5..71b0346da 100644 --- a/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmap.cs +++ b/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmap.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -43,14 +43,14 @@ public sealed class CompressedBitmap : IEnumerable /// Returns a new instance of an empty bitmap public static readonly CompressedBitmap Empty = new CompressedBitmap(Slice.Empty, BitRange.Empty); - private /*readonly*/ Slice m_data; - private /*readonly*/ BitRange m_bounds; + private readonly Slice m_data; + private readonly BitRange m_bounds; public CompressedBitmap(Slice data) { - if (data.IsNull) throw new ArgumentNullException("data"); - if (data.Count > 0 && data.Count < 8) throw new ArgumentException("A compressed bitmap must either be empty, or at least 8 bytes long", "data"); - if ((data.Count & 3) != 0) throw new ArgumentException("A compressed bitmap size must be a multiple of 4 bytes", "data"); + if (data.IsNull) throw new ArgumentNullException(nameof(data)); + if (data.Count > 0 && data.Count < 8) throw new ArgumentException("A compressed bitmap must either be empty, or at least 8 bytes long", nameof(data)); + if ((data.Count & 3) != 0) throw new ArgumentException("A compressed bitmap size must be a multiple of 4 bytes", nameof(data)); if (data.Count == 0) { @@ -66,7 +66,7 @@ public CompressedBitmap(Slice data) internal CompressedBitmap(Slice data, BitRange bounds) { - if (data.IsNull) throw new ArgumentNullException("data"); + if (data.IsNull) throw new ArgumentNullException(nameof(data)); if (data.Count == 0) { @@ -75,8 +75,8 @@ internal CompressedBitmap(Slice data, BitRange bounds) } else { - if ((data.Count & 3) != 0) throw new ArgumentException("A compressed bitmap size must be a multiple of 4 bytes", "data"); - if (data.Count < 4) throw new ArgumentException("A compressed bitmap must be at least 4 bytes long", "data"); + if ((data.Count & 3) != 0) throw new ArgumentException("A compressed bitmap size must be a multiple of 4 bytes", nameof(data)); + if (data.Count < 4) throw new ArgumentException("A compressed bitmap must be at least 4 bytes long", nameof(data)); m_data = data; m_bounds = bounds; } @@ -93,16 +93,13 @@ public CompressedBitmapBuilder ToBuilder() /// Gets the underlying buffer of the compressed bitmap /// The content of the buffer MUST NOT be modified directly - internal Slice Data { get { return m_data; } } + internal Slice Data => m_data; /// Gets the bounds of the compressed bitmap - public BitRange Bounds { get { return m_bounds; } } + public BitRange Bounds => m_bounds; /// Number of Data Words in the compressed bitmap - public int Count - { - get { return m_data.IsNullOrEmpty ? 0 : (m_data.Count >> 2) - 1; } - } + public int Count => m_data.IsNullOrEmpty ? 0 : (m_data.Count >> 2) - 1; /// Test if the specified bit is set /// Offset of the bit to test @@ -149,8 +146,8 @@ public int CountBits() internal static BitRange ComputeBounds(Slice data, int words = -1) { int count = data.Count; - if (count > 0 && count < 8) throw new ArgumentException("Bitmap buffer size is too small", "data"); - if ((count & 3) != 0) throw new ArgumentException("Bitmap buffer size must be a multiple of 4 bytes", "data"); + if (count > 0 && count < 8) throw new ArgumentException("Bitmap buffer size is too small", nameof(data)); + if ((count & 3) != 0) throw new ArgumentException("Bitmap buffer size must be a multiple of 4 bytes", nameof(data)); // if the bitmap is empty, return 0..0 if (count == 0) return BitRange.Empty; diff --git a/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmapBitView.cs b/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmapBitView.cs index 06ebad93c..c9d48465d 100644 --- a/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmapBitView.cs +++ b/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmapBitView.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,11 +26,13 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion + namespace FoundationDB.Layers.Experimental.Indexing { - using JetBrains.Annotations; using System; using System.Collections.Generic; + using Doxense.Diagnostics.Contracts; + using JetBrains.Annotations; /// View that reads the indexes of all the set bits in a bitmap public class CompressedBitmapBitView : IEnumerable @@ -39,7 +41,7 @@ public class CompressedBitmapBitView : IEnumerable public CompressedBitmapBitView(CompressedBitmap bitmap) { - if (bitmap == null) throw new ArgumentNullException("bitmap"); + Contract.NotNull(bitmap, nameof(bitmap)); m_bitmap = bitmap; } diff --git a/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmapBuilder.cs b/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmapBuilder.cs index cd9f5c322..325467032 100644 --- a/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmapBuilder.cs +++ b/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmapBuilder.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,40 +28,40 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Experimental.Indexing { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Collections.Generic; + using Doxense.Diagnostics.Contracts; + using Doxense.Memory; + using FoundationDB.Client; + using JetBrains.Annotations; /// Builder of compressed bitmaps that can set or clear bits in a random order, in memory public sealed class CompressedBitmapBuilder { - private static readonly CompressedWord[] s_emptyArray = new CompressedWord[0]; /// Returns a new instance of an empty bitmap builder - public static CompressedBitmapBuilder Empty - { - get { return new CompressedBitmapBuilder(s_emptyArray, 0, BitRange.Empty); } - } + public static CompressedBitmapBuilder Empty => new CompressedBitmapBuilder(Array.Empty(), 0, BitRange.Empty); /// Buffer of compressed words private CompressedWord[] m_words; + /// Number of words used in the buffer private int m_size; + /// Index of the lowest bit that is set (or int.MaxValue) private int m_lowest; + /// Index of the highest bit that is set (or -1) private int m_highest; public CompressedBitmapBuilder(CompressedBitmap bitmap) { - if (bitmap == null) throw new ArgumentNullException("bitmap"); - if ((bitmap.Data.Count & 3) != 0) throw new ArgumentException("Bitmap's underlying buffer size should be a multiple of 4 bytes", "bitmap"); + if (bitmap == null) throw new ArgumentNullException(nameof(bitmap)); + if ((bitmap.Data.Count & 3) != 0) throw new ArgumentException("Bitmap's underlying buffer size should be a multiple of 4 bytes", nameof(bitmap)); if (bitmap.Count == 0) { - m_words = s_emptyArray; + m_words = Array.Empty(); var range = BitRange.Empty; m_lowest = range.Lowest; m_highest = range.Highest; @@ -95,7 +95,7 @@ internal static CompressedWord[] DecodeWords(Slice data, int size, BitRange boun { Contract.Requires(size >= 0 && data.Count >= 4 && (data.Count & 3) == 0); - int capacity = SliceHelpers.NextPowerOfTwo(size); + int capacity = BitHelpers.NextPowerOfTwo(size); if (capacity < 0) capacity = size; var words = new CompressedWord[capacity]; @@ -110,10 +110,7 @@ internal static CompressedWord[] DecodeWords(Slice data, int size, BitRange boun } /// Returns the number of compressed words in the builder - public int Count - { - get { return m_size; } - } + public int Count => m_size; /// Compute the word index, and mask of a bit offset /// Bit offset (0-based) @@ -158,16 +155,11 @@ public void EnsureCapacity(int minSize) { if (minSize > m_size) { - int newSize = SliceHelpers.NextPowerOfTwo(minSize); + int newSize = BitHelpers.NextPowerOfTwo(minSize); if (newSize < 0) newSize = minSize; if (newSize < 8) newSize = 8; - //Console.WriteLine("> resize buffer to {0} words", newSize); Array.Resize(ref m_words, newSize); } - //else - //{ - // Console.WriteLine("> buffer has enough capacity {0} for min size {1}", m_words.Length, minSize); - //} } /// Gets or sets the value of a bit in the bitmap. @@ -203,7 +195,7 @@ internal void Shift(int position, int count) /// True if the bit was changed from 0 to 1; or false if it was already set. public bool Set(int index) { - if (index < 0) throw new ArgumentException("Bit index cannot be less than zero.", "index"); + if (index < 0) throw new ArgumentException("Bit index cannot be less than zero.", nameof(index)); //Console.WriteLine("Set({0}) on {1}-words bitmap", index, m_size); @@ -288,7 +280,7 @@ public bool Set(int index) /// True if the bit was changed from 1 to 0; or false if it was already unset. public bool Clear(int index) { - if (index < 0) throw new ArgumentException("Bit index cannot be less than zero.", "index"); + if (index < 0) throw new ArgumentException("Bit index cannot be less than zero.", nameof(index)); uint mask; int wordIndex = GetWordIndex(index, out mask); diff --git a/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmapWordIterator.cs b/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmapWordIterator.cs index c70fe17a7..5a77a644c 100644 --- a/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmapWordIterator.cs +++ b/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmapWordIterator.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,10 +28,10 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Experimental.Indexing { - using FoundationDB.Client; - using FoundationDB.Client.Utils; using System; using System.Collections.Generic; + using Doxense.Diagnostics.Contracts; + using Doxense.Memory; /// Iterator that reads 32-bit compressed words from a compressed bitmap public struct CompressedBitmapWordIterator : IEnumerator @@ -65,15 +65,9 @@ public bool MoveNext() return true; } - public CompressedWord Current - { - get { return new CompressedWord(m_current); } - } + public CompressedWord Current => new CompressedWord(m_current); - object System.Collections.IEnumerator.Current - { - get { return this.Current; } - } + object System.Collections.IEnumerator.Current => this.Current; public void Reset() { diff --git a/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmapWriter.cs b/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmapWriter.cs index 8f7a0ae63..75406e89e 100644 --- a/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmapWriter.cs +++ b/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedBitmapWriter.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,10 +28,11 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Experimental.Indexing { + using System; + using Doxense.Diagnostics.Contracts; + using Doxense.Memory; using FoundationDB.Client; - using FoundationDB.Client.Utils; using JetBrains.Annotations; - using System; /// Writer that compresses a stream of bits into a , in memory public sealed class CompressedBitmapWriter @@ -64,7 +65,7 @@ public sealed class CompressedBitmapWriter /// Create a new compressed bitmap writer public CompressedBitmapWriter() - : this(SliceWriter.Empty, true) + : this(default(SliceWriter), true) { } /// Create a new compressed bitmap writer, with a hint for the initial capacity @@ -73,7 +74,7 @@ public CompressedBitmapWriter() public CompressedBitmapWriter(int capacity) : this(new SliceWriter(Math.Max(4 + capacity * 4, 20)), true) { - if (capacity < 0) throw new ArgumentOutOfRangeException("capacity"); + if (capacity < 0) throw new ArgumentOutOfRangeException(nameof(capacity)); } /// Create a new compressed bitmap writer, with a specific underlying buffer diff --git a/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedWord.cs b/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedWord.cs index c2fdf1871..93deadbf8 100644 --- a/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedWord.cs +++ b/FoundationDB.Layers.Experimental/Indexes/Bitmaps/CompressedWord.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,14 +28,14 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Experimental.Indexing { - using FoundationDB.Client.Utils; using System; using System.Diagnostics; using System.Globalization; + using Doxense.Diagnostics.Contracts; /// Represent a 32-bit word in a Compressed Bitmap [DebuggerDisplay("Literal={IsLiteral}, {WordCount} x {WordValue}")] - public struct CompressedWord + public readonly struct CompressedWord { internal const uint ALL_ZEROES = 0x0; internal const uint ALL_ONES = 0x7FFFFFFF; @@ -52,10 +52,7 @@ public CompressedWord(uint raw) /// Checks if this word is a literal /// Literal words have their MSB unset (0) - public bool IsLiteral - { - get { return (this.RawValue & WordAlignHybridEncoder.TYPE_MASK) == WordAlignHybridEncoder.BIT_TYPE_LITERAL; } - } + public bool IsLiteral => (this.RawValue & WordAlignHybridEncoder.TYPE_MASK) == WordAlignHybridEncoder.BIT_TYPE_LITERAL; /// Value of the 31-bit uncompressed word /// This word is repeated times in the in the uncompressed bitmap. @@ -88,31 +85,19 @@ public int WordValue /// Number of times the value is repeated in the uncompressed bitmap /// This value is 1 for literal words, and for filler words - public int WordCount - { - get { return this.IsLiteral ? 1 : this.FillCount; } - } + public int WordCount => this.IsLiteral ? 1 : this.FillCount; /// Value of a literal word /// Only valid if is true - public int Literal - { - get { return (int)(this.RawValue & WordAlignHybridEncoder.LITERAL_MASK); } - } + public int Literal => (int)(this.RawValue & WordAlignHybridEncoder.LITERAL_MASK); /// Value of the fill bit (either 0 or 1) /// Only valid if is false - public int FillBit - { - get { return (int)((this.RawValue & WordAlignHybridEncoder.FILL_MASK) >> WordAlignHybridEncoder.FILL_SHIFT); } - } + public int FillBit => (int)((this.RawValue & WordAlignHybridEncoder.FILL_MASK) >> WordAlignHybridEncoder.FILL_SHIFT); - /// Number of 31-bit words that are filled by + /// Number of 31-bit words that are filled by /// Only valid if is false - public int FillCount - { - get { return 1 + (int)(this.RawValue & WordAlignHybridEncoder.LENGTH_MASK); } - } + public int FillCount => 1 + (int)(this.RawValue & WordAlignHybridEncoder.LENGTH_MASK); /// Return the position of the lowest set bit, or -1 /// Index from 0 to 30 of the lowest set bit, or -1 if the word is empty diff --git a/FoundationDB.Layers.Experimental/Indexes/Bitmaps/WordAlignHybridCoding.cs b/FoundationDB.Layers.Experimental/Indexes/Bitmaps/WordAlignHybridCoding.cs index dfe077c56..59d676d34 100644 --- a/FoundationDB.Layers.Experimental/Indexes/Bitmaps/WordAlignHybridCoding.cs +++ b/FoundationDB.Layers.Experimental/Indexes/Bitmaps/WordAlignHybridCoding.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,15 +28,12 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Experimental.Indexing { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using FoundationDB.Layers.Tuples; - using JetBrains.Annotations; using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Globalization; using System.Text; + using Doxense.Diagnostics.Contracts; + using Doxense.Memory; + using FoundationDB.Client; + using JetBrains.Annotations; public static class WordAlignHybridEncoder { @@ -111,7 +108,7 @@ public static class WordAlignHybridEncoder #endregion /// Helper class to read 31-bit words from an uncompressed source - private unsafe sealed class UncompressedWordReader + private sealed unsafe class UncompressedWordReader { /// Value returned by or when the input have less than 31 bits remaining public const uint NotEnough = 0xFFFFFFFF; @@ -245,10 +242,10 @@ public bool ReadIf(uint expected) } /// Returns the number of bits left in the register (0 if emtpy) - public int Bits { get { return m_bits; } } + public int Bits => m_bits; /// Returns the last word, padded with 0s - /// If there is at least one full word remaning + /// If there is at least one full word remaning public uint ReadLast() { if (m_bits >= 31) throw new InvalidOperationException("There are still words left to read in the source"); @@ -383,7 +380,7 @@ internal enum LogicalOperation [NotNull] public static CompressedBitmap Not([NotNull] this CompressedBitmap bitmap, int size) { - if (bitmap == null) throw new ArgumentNullException("bitmap"); + if (bitmap == null) throw new ArgumentNullException(nameof(bitmap)); // there is a high change that the final bitmap will have the same size, with an optional extra filler word at the end var writer = new CompressedBitmapWriter(bitmap.Count + 1); @@ -421,8 +418,8 @@ public static CompressedBitmap Not([NotNull] this CompressedBitmap bitmap, int s [NotNull] public static CompressedBitmap And([NotNull] this CompressedBitmap left, [NotNull] CompressedBitmap right) { - if (left == null) throw new ArgumentNullException("left"); - if (right == null) throw new ArgumentNullException("right"); + if (left == null) throw new ArgumentNullException(nameof(left)); + if (right == null) throw new ArgumentNullException(nameof(right)); if (left.Count == 0 || right.Count == 0) return CompressedBitmap.Empty; return CompressedBinaryExpression(left, right, LogicalOperation.And); @@ -435,8 +432,8 @@ public static CompressedBitmap And([NotNull] this CompressedBitmap left, [NotNul [NotNull] public static CompressedBitmap Or([NotNull] this CompressedBitmap left, [NotNull] CompressedBitmap right) { - if (left == null) throw new ArgumentNullException("left"); - if (right == null) throw new ArgumentNullException("right"); + if (left == null) throw new ArgumentNullException(nameof(left)); + if (right == null) throw new ArgumentNullException(nameof(right)); if (left.Count == 0) return right.Count == 0 ? CompressedBitmap.Empty : right; if (right.Count == 0) return left; @@ -450,8 +447,8 @@ public static CompressedBitmap Or([NotNull] this CompressedBitmap left, [NotNull [NotNull] public static CompressedBitmap Xor([NotNull] this CompressedBitmap left, [NotNull] CompressedBitmap right) { - if (left == null) throw new ArgumentNullException("left"); - if (right == null) throw new ArgumentNullException("right"); + if (left == null) throw new ArgumentNullException(nameof(left)); + if (right == null) throw new ArgumentNullException(nameof(right)); if (left.Count == 0) return right.Count == 0 ? CompressedBitmap.Empty : right; if (right.Count == 0) return left; @@ -465,8 +462,8 @@ public static CompressedBitmap Xor([NotNull] this CompressedBitmap left, [NotNul [NotNull] public static CompressedBitmap AndNot([NotNull] this CompressedBitmap left, [NotNull] CompressedBitmap right) { - if (left == null) throw new ArgumentNullException("left"); - if (right == null) throw new ArgumentNullException("right"); + if (left == null) throw new ArgumentNullException(nameof(left)); + if (right == null) throw new ArgumentNullException(nameof(right)); if (left.Count == 0 || right.Count == 0) return CompressedBitmap.Empty; return CompressedBinaryExpression(left, right, LogicalOperation.AndNot); @@ -479,8 +476,8 @@ public static CompressedBitmap AndNot([NotNull] this CompressedBitmap left, [Not [NotNull] public static CompressedBitmap OrNot([NotNull] this CompressedBitmap left, [NotNull] CompressedBitmap right) { - if (left == null) throw new ArgumentNullException("left"); - if (right == null) throw new ArgumentNullException("right"); + if (left == null) throw new ArgumentNullException(nameof(left)); + if (right == null) throw new ArgumentNullException(nameof(right)); if (left.Count == 0) return right.Count == 0 ? CompressedBitmap.Empty : right; if (right.Count == 0) return left; @@ -494,8 +491,8 @@ public static CompressedBitmap OrNot([NotNull] this CompressedBitmap left, [NotN [NotNull] public static CompressedBitmap XorNot([NotNull] this CompressedBitmap left, [NotNull] CompressedBitmap right) { - if (left == null) throw new ArgumentNullException("left"); - if (right == null) throw new ArgumentNullException("right"); + if (left == null) throw new ArgumentNullException(nameof(left)); + if (right == null) throw new ArgumentNullException(nameof(right)); if (left.Count == 0) return right.Count == 0 ? CompressedBitmap.Empty : right; if (right.Count == 0) return left; diff --git a/FoundationDB.Layers.Experimental/Indexes/FdbCompressedBitmapIndex.cs b/FoundationDB.Layers.Experimental/Indexes/FdbCompressedBitmapIndex.cs index cafdf3d70..7c9c66455 100644 --- a/FoundationDB.Layers.Experimental/Indexes/FdbCompressedBitmapIndex.cs +++ b/FoundationDB.Layers.Experimental/Indexes/FdbCompressedBitmapIndex.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,6 +26,8 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion +using Doxense.Diagnostics.Contracts; + namespace FoundationDB.Layers.Experimental.Indexing { using FoundationDB.Client; @@ -36,6 +38,7 @@ namespace FoundationDB.Layers.Experimental.Indexing using System.Globalization; using System.Linq; using System.Threading.Tasks; + using Doxense.Serialization.Encoders; /// Simple index that maps values of type into lists of ids of type /// Type of the unique id of each document or entity @@ -44,34 +47,33 @@ namespace FoundationDB.Layers.Experimental.Indexing public class FdbCompressedBitmapIndex { - public FdbCompressedBitmapIndex([NotNull] string name, [NotNull] FdbSubspace subspace, IEqualityComparer valueComparer = null, bool indexNullValues = false) - : this(name, subspace, valueComparer, indexNullValues, KeyValueEncoders.Tuples.Key()) + public FdbCompressedBitmapIndex([NotNull] string name, [NotNull] IKeySubspace subspace, IEqualityComparer valueComparer = null, bool indexNullValues = false) + : this(name, subspace.AsTyped(), valueComparer, indexNullValues) { } - public FdbCompressedBitmapIndex([NotNull] string name, [NotNull] FdbSubspace subspace, IEqualityComparer valueComparer, bool indexNullValues, [NotNull] IKeyEncoder encoder) + public FdbCompressedBitmapIndex([NotNull] string name, [NotNull] ITypedKeySubspace subspace, IEqualityComparer valueComparer = null, bool indexNullValues = false) { - if (name == null) throw new ArgumentNullException("name"); - if (subspace == null) throw new ArgumentNullException("subspace"); - if (encoder == null) throw new ArgumentNullException("encoder"); + Contract.NotNull(name, nameof(name)); + Contract.NotNull(subspace, nameof(subspace)); this.Name = name; this.Subspace = subspace; this.ValueComparer = valueComparer ?? EqualityComparer.Default; this.IndexNullValues = indexNullValues; - this.Location = subspace.UsingEncoder(encoder); } - public string Name { [NotNull] get; private set; } - - public FdbSubspace Subspace { [NotNull] get; private set; } + [NotNull] + public string Name { get; } - protected IFdbEncoderSubspace Location { [NotNull] get; private set; } + [NotNull] + public ITypedKeySubspace Subspace { get; } - public IEqualityComparer ValueComparer { [NotNull] get; private set; } + [NotNull] + public IEqualityComparer ValueComparer { get; } /// If true, null values are inserted in the index. If false (default), they are ignored /// This has no effect if is not a reference type - public bool IndexNullValues { get; private set; } + public bool IndexNullValues { get; } /// Insert a newly created entity to the index /// Transaction to use @@ -80,11 +82,11 @@ public FdbCompressedBitmapIndex([NotNull] string name, [NotNull] FdbSubspace sub /// True if a value was inserted into the index; or false if is null and is false, or if this was already indexed at this . public async Task AddAsync([NotNull] IFdbTransaction trans, long id, TValue value) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); if (this.IndexNullValues || value != null) { - var key = this.Location.Keys.Encode(value); + var key = this.Subspace.Keys[value]; var data = await trans.GetAsync(key).ConfigureAwait(false); var builder = data.HasValue ? new CompressedBitmapBuilder(data) : CompressedBitmapBuilder.Empty; @@ -107,14 +109,14 @@ public async Task AddAsync([NotNull] IFdbTransaction trans, long id, TValu /// If and are identical, then nothing will be done. Otherwise, the old index value will be deleted and the new value will be added public async Task UpdateAsync([NotNull] IFdbTransaction trans, long id, TValue newValue, TValue previousValue) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); if (!this.ValueComparer.Equals(newValue, previousValue)) { // remove previous value if (this.IndexNullValues || previousValue != null) { - var key = this.Location.Keys.Encode(previousValue); + var key = this.Subspace.Keys[previousValue]; var data = await trans.GetAsync(key).ConfigureAwait(false); if (data.HasValue) { @@ -127,7 +129,7 @@ public async Task UpdateAsync([NotNull] IFdbTransaction trans, long id, TV // add new value if (this.IndexNullValues || newValue != null) { - var key = this.Location.Keys.Encode(newValue); + var key = this.Subspace.Keys[newValue]; var data = await trans.GetAsync(key).ConfigureAwait(false); var builder = data.HasValue ? new CompressedBitmapBuilder(data) : CompressedBitmapBuilder.Empty; builder.Set((int)id); //BUGBUG: 64 bit id! @@ -146,9 +148,9 @@ public async Task UpdateAsync([NotNull] IFdbTransaction trans, long id, TV /// Previous value of the entity in the index public async Task RemoveAsync([NotNull] IFdbTransaction trans, long id, TValue value) { - if (trans == null) throw new ArgumentNullException("trans"); + if (trans == null) throw new ArgumentNullException(nameof(trans)); - var key = this.Location.Keys.Encode(value); + var key = this.Subspace.Keys[value]; var data = await trans.GetAsync(key).ConfigureAwait(false); if (data.HasValue) { @@ -167,7 +169,7 @@ public async Task RemoveAsync([NotNull] IFdbTransaction trans, long id, TV /// List of document ids matching this value for this particular index (can be empty if no document matches) public async Task> LookupAsync([NotNull] IFdbReadOnlyTransaction trans, TValue value, bool reverse = false) { - var key = this.Location.Keys.Encode(value); + var key = this.Subspace.Keys[value]; var data = await trans.GetAsync(key).ConfigureAwait(false); if (data.IsNull) return null; if (data.IsEmpty) return Enumerable.Empty(); diff --git a/FoundationDB.Layers.Experimental/Messaging/FdbWorkerPool.cs b/FoundationDB.Layers.Experimental/Messaging/FdbWorkerPool.cs index a9c44da08..9b946b15d 100644 --- a/FoundationDB.Layers.Experimental/Messaging/FdbWorkerPool.cs +++ b/FoundationDB.Layers.Experimental/Messaging/FdbWorkerPool.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,19 +26,17 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -using FoundationDB.Client; -using FoundationDB.Layers.Tuples; -using FoundationDB.Filters.Logging; -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Security.Cryptography; -using System.Threading; -using System.Threading.Tasks; -using FoundationDB.Layers.Counters; - namespace FoundationDB.Layers.Messaging { + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.Security.Cryptography; + using System.Threading; + using System.Threading.Tasks; + using FoundationDB.Client; + using FoundationDB.Filters.Logging; + using FoundationDB.Layers.Counters; public class FdbWorkerMessage { @@ -64,17 +62,17 @@ public class FdbWorkerPool private readonly RandomNumberGenerator m_rng = RandomNumberGenerator.Create(); - public IFdbDynamicSubspace Subspace { get; private set; } + public IDynamicKeySubspace Subspace { get; } - internal IFdbDynamicSubspace TaskStore { get; private set; } + internal IDynamicKeySubspace TaskStore { get; } - internal IFdbDynamicSubspace IdleRing { get; private set; } + internal IDynamicKeySubspace IdleRing { get; } - internal IFdbDynamicSubspace BusyRing { get; private set; } + internal IDynamicKeySubspace BusyRing { get; } - internal IFdbDynamicSubspace UnassignedTaskRing { get; private set; } + internal IDynamicKeySubspace UnassignedTaskRing { get; } - internal FdbCounterMap Counters { get; private set; } + internal FdbCounterMap Counters { get; } #region Profiling... @@ -108,11 +106,11 @@ public class FdbWorkerPool #endregion - public FdbWorkerPool(IFdbSubspace subspace) + public FdbWorkerPool(IKeySubspace subspace) { - if (subspace == null) throw new ArgumentNullException("subspace"); + if (subspace == null) throw new ArgumentNullException(nameof(subspace)); - this.Subspace = subspace.Using(TypeSystem.Tuples); + this.Subspace = subspace.AsDynamic(); this.TaskStore = this.Subspace.Partition.ByKey(Slice.FromChar('T')); this.IdleRing = this.Subspace.Partition.ByKey(Slice.FromChar('I')); @@ -122,7 +120,7 @@ public FdbWorkerPool(IFdbSubspace subspace) this.Counters = new FdbCounterMap(this.Subspace.Partition.ByKey(Slice.FromChar('C'))); } - private async Task> FindRandomItem(IFdbTransaction tr, IFdbDynamicSubspace ring) + private async Task> FindRandomItem(IFdbTransaction tr, IDynamicKeySubspace ring) { var range = ring.Keys.ToRange(); @@ -154,7 +152,7 @@ private Slice GetRandomId() } } - private async Task PushQueueAsync(IFdbTransaction tr, IFdbDynamicSubspace queue, Slice taskId) + private async Task PushQueueAsync(IFdbTransaction tr, IDynamicKeySubspace queue, Slice taskId) { //TODO: use a high contention algo ? // - must support Push and Pop @@ -162,7 +160,7 @@ private async Task PushQueueAsync(IFdbTransaction tr, IFdbDynamicSubspace queue, // get the current size of the queue var range = queue.Keys.ToRange(); - var lastKey = await tr.Snapshot.GetKeyAsync(FdbKeySelector.LastLessThan(range.End)).ConfigureAwait(false); + var lastKey = await tr.Snapshot.GetKeyAsync(KeySelector.LastLessThan(range.End)).ConfigureAwait(false); int count = lastKey < range.Begin ? 0 : queue.Keys.DecodeFirst(lastKey) + 1; // set the value @@ -171,12 +169,12 @@ private async Task PushQueueAsync(IFdbTransaction tr, IFdbDynamicSubspace queue, private void StoreTask(IFdbTransaction tr, Slice taskId, DateTime scheduledUtc, Slice taskBody) { - tr.Annotate("Writing task {0}", taskId.ToAsciiOrHexaString()); + tr.Annotate("Writing task {0:P}", taskId); var prefix = this.TaskStore.Partition.ByKey(taskId); // store task body and timestamp - tr.Set(prefix.Key, taskBody); + tr.Set(prefix.GetPrefix(), taskBody); tr.Set(prefix.Keys.Encode(TASK_META_SCHEDULED), Slice.FromInt64(scheduledUtc.Ticks)); // increment total and pending number of tasks this.Counters.Increment(tr, COUNTER_TOTAL_TASKS); @@ -185,10 +183,10 @@ private void StoreTask(IFdbTransaction tr, Slice taskId, DateTime scheduledUtc, private void ClearTask(IFdbTransaction tr, Slice taskId) { - tr.Annotate("Deleting task {0}", taskId.ToAsciiOrHexaString()); + tr.Annotate("Deleting task {0:P}", taskId); // clear all metadata about the task - tr.ClearRange(FdbKeyRange.StartsWith(this.TaskStore.Keys.Encode(taskId))); + tr.ClearRange(KeyRange.StartsWith(this.TaskStore.Keys.Encode(taskId))); // decrement pending number of tasks this.Counters.Decrement(tr, COUNTER_PENDING_TASKS); } @@ -201,16 +199,16 @@ private void ClearTask(IFdbTransaction tr, Slice taskId) /// public async Task ScheduleTaskAsync(IFdbRetryable db, Slice taskId, Slice taskBody, CancellationToken ct = default(CancellationToken)) { - if (db == null) throw new ArgumentNullException("db"); + if (db == null) throw new ArgumentNullException(nameof(db)); var now = DateTime.UtcNow; await db.ReadWriteAsync(async (tr) => { Interlocked.Increment(ref m_schedulingAttempts); #if DEBUG - if (tr.Context.Retries > 0) Console.WriteLine("# retry n°" + tr.Context.Retries + " for task " + taskId.ToAsciiOrHexaString()); + if (tr.Context.Retries > 0) Console.WriteLine($"# retry n°{tr.Context.Retries} for task {taskId:P}"); #endif - tr.Annotate("I want to schedule {0}", taskId.ToAsciiOrHexaString()); + tr.Annotate("I want to schedule {0:P}", taskId); // find a random worker from the idle ring var randomWorkerKey = await FindRandomItem(tr, this.IdleRing).ConfigureAwait(false); @@ -219,7 +217,7 @@ await db.ReadWriteAsync(async (tr) => { Slice workerId = this.IdleRing.Keys.Decode(randomWorkerKey.Key); - tr.Annotate("Assigning {0} to {1}", taskId.ToAsciiOrHexaString(), workerId.ToAsciiOrHexaString()); + tr.Annotate("Assigning {0:P} to {1:P}", taskId, workerId); // remove worker from the idle ring tr.Clear(this.IdleRing.Keys.Encode(workerId)); @@ -231,7 +229,7 @@ await db.ReadWriteAsync(async (tr) => } else { - tr.Annotate("Queueing {0}", taskId.ToAsciiOrHexaString()); + tr.Annotate("Queueing {0:P}", taskId); await PushQueueAsync(tr, this.UnassignedTaskRing, taskId).ConfigureAwait(false); } @@ -243,7 +241,7 @@ await db.ReadWriteAsync(async (tr) => { Interlocked.Increment(ref m_schedulingMessages); }, - cancellationToken: ct).ConfigureAwait(false); + ct: ct).ConfigureAwait(false); } static int counter = 0; @@ -270,7 +268,7 @@ public async Task RunWorkerAsync(IFdbDatabase db, Func { - tr.Annotate("I'm worker #{0} with id {1}", num, workerId.ToAsciiOrHexaString()); + tr.Annotate("I'm worker #{0} with id {1:P}", num, workerId); myId = workerId; watch = default(FdbWatch); @@ -304,13 +302,13 @@ await db.ReadWriteAsync( { // mark this worker as busy // note: we need a random id so generate one if it is the first time... if (!myId.IsPresent) myId = GetRandomId(); - tr.Annotate("Found {0}, switch to busy with id {1}", msg.Id.ToAsciiOrHexaString(), myId.ToAsciiOrHexaString()); + tr.Annotate("Found {0:P}, switch to busy with id {1:P}", msg.Id, myId); tr.Set(this.BusyRing.Keys.Encode(myId), msg.Id); this.Counters.Increment(tr, COUNTER_BUSY); } else if (myId.IsPresent) { // remove ourselves from the busy ring - tr.Annotate("Found nothing, switch to idle with id {0}", myId.ToAsciiOrHexaString()); + tr.Annotate("Found nothing, switch to idle with id {0:P}", myId); //tr.Clear(this.BusyRing.Pack(myId)); } } @@ -318,11 +316,11 @@ await db.ReadWriteAsync( if (msg.Id.IsPresent) { // get the task body - tr.Annotate("Fetching body for task {0}", msg.Id.ToAsciiOrHexaString()); + tr.Annotate("Fetching body for task {0:P}", msg.Id); var prefix = this.TaskStore.Partition.ByKey(msg.Id); //TODO: replace this with a get_range ? var data = await tr.GetValuesAsync(new [] { - prefix.ToFoundationDbKey(), + prefix.GetPrefix(), prefix.Keys.Encode(TASK_META_SCHEDULED) }).ConfigureAwait(false); @@ -345,7 +343,7 @@ await db.ReadWriteAsync( // the idle key will also be used as the watch key to wake us up var watchKey = this.IdleRing.Keys.Encode(myId); - tr.Annotate("Will start watching on key {0} with id {1}", watchKey.ToAsciiOrHexaString(), myId.ToAsciiOrHexaString()); + tr.Annotate("Will start watching on key {0:P} with id {1:P}", watchKey, myId); tr.Set(watchKey, Slice.Empty); this.Counters.Increment(tr, COUNTER_IDLE); @@ -358,7 +356,7 @@ await db.ReadWriteAsync( previousTaskId = Slice.Nil; workerId = myId; }, - cancellationToken: ct + ct: ct ).ConfigureAwait(false); if (msg.Id.IsNullOrEmpty) @@ -383,7 +381,7 @@ await db.ReadWriteAsync( { // the task has been dropped? // TODO: loggin? #if DEBUG - Console.WriteLine("[####] Task[" + msg.Id.ToAsciiOrHexaString() + "] has vanished?"); + Console.WriteLine($"[####] Task[{msg.Id:P}] has vanished?"); #endif } else @@ -396,7 +394,7 @@ await db.ReadWriteAsync( { //TODO: logging? #if DEBUG - Console.Error.WriteLine("Task[" + msg.Id.ToAsciiOrHexaString() + "] failed: " + e.ToString()); + Console.Error.WriteLine($"Task[{msg.Id:P}] failed: {e}"); #endif } } diff --git a/FoundationDB.Layers.Experimental/Messaging/WorkerPoolTest.cs b/FoundationDB.Layers.Experimental/Messaging/WorkerPoolTest.cs index 41ea9f191..63d8ec3d6 100644 --- a/FoundationDB.Layers.Experimental/Messaging/WorkerPoolTest.cs +++ b/FoundationDB.Layers.Experimental/Messaging/WorkerPoolTest.cs @@ -1,16 +1,15 @@ -using FoundationDB.Async; -using FoundationDB.Client; -using FoundationDB.Filters.Logging; -using System; -using System.Diagnostics; -using System.Linq; -using System.Text; -using System.Threading; -using System.Threading.Tasks; - + namespace FoundationDB.Layers.Messaging { - + using System; + using System.Diagnostics; + using System.Linq; + using System.Text; + using System.Threading; + using System.Threading.Tasks; + using Doxense.Async; + using FoundationDB.Client; + using FoundationDB.Filters.Logging; public class WorkerPoolTest { @@ -58,9 +57,9 @@ public void Main() } } - private async Task RunAsync(IFdbDatabase db, IFdbDynamicSubspace location, CancellationToken ct, Action done, int N, int K, int W) + private async Task RunAsync(IFdbDatabase db, IDynamicKeySubspace location, CancellationToken ct, Action done, int N, int K, int W) { - if (db == null) throw new ArgumentNullException("db"); + if (db == null) throw new ArgumentNullException(nameof(db)); StringBuilder sb = new StringBuilder(); @@ -73,7 +72,7 @@ private async Task RunAsync(IFdbDatabase db, IFdbDynamicSubspace location, Cance { var workerPool = new FdbWorkerPool(location); - Console.WriteLine("workerPool at " + location.Key.ToAsciiOrHexaString()); + Console.WriteLine($"workerPool at {location.GetPrefix():P}"); var workerSignal = new AsyncCancelableMutex(ct); var clientSignal = new AsyncCancelableMutex(ct); @@ -134,14 +133,14 @@ private async Task RunAsync(IFdbDatabase db, IFdbDynamicSubspace location, Cance Func dump = async (label) => { - Console.WriteLine(""); + Console.WriteLine($""); using (var tr = db.BeginTransaction(ct)) { await tr.Snapshot - .GetRange(FdbKeyRange.StartsWith(location.Key)) + .GetRange(KeyRange.StartsWith(location.GetPrefix())) .ForEachAsync((kvp) => { - Console.WriteLine(" - " + location.Keys.Unpack(kvp.Key) + " = " + kvp.Value.ToAsciiOrHexaString()); + Console.WriteLine($" - {location.Keys.Unpack(kvp.Key)} = {kvp.Value:V}"); }).ConfigureAwait(false); } Console.WriteLine(""); diff --git a/FoundationDB.Layers.Experimental/Properties/AssemblyInfo.cs b/FoundationDB.Layers.Experimental/Properties/AssemblyInfo.cs index c4f33a5ad..8bbe8a266 100644 --- a/FoundationDB.Layers.Experimental/Properties/AssemblyInfo.cs +++ b/FoundationDB.Layers.Experimental/Properties/AssemblyInfo.cs @@ -1,11 +1,6 @@ using System.Reflection; -using System.Runtime.CompilerServices; using System.Runtime.InteropServices; -[assembly: AssemblyTitle("FoundationDB.Layers.Messaging")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] - [assembly: ComVisible(false)] [assembly: Guid("6f0b5dad-33c3-43c3-9b4f-6617b0ca965a")] diff --git a/FoundationDB.Linq.Providers/Expressions/FdbDebugStatementWriter.cs b/FoundationDB.Linq.Providers/Expressions/FdbDebugStatementWriter.cs index 1fd4d46ae..ce8f535ec 100644 --- a/FoundationDB.Linq.Providers/Expressions/FdbDebugStatementWriter.cs +++ b/FoundationDB.Linq.Providers/Expressions/FdbDebugStatementWriter.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/FoundationDB.Linq.Providers/Expressions/FdbExpressionHelpers.cs b/FoundationDB.Linq.Providers/Expressions/FdbExpressionHelpers.cs index 0f4ee241b..65a412cb5 100644 --- a/FoundationDB.Linq.Providers/Expressions/FdbExpressionHelpers.cs +++ b/FoundationDB.Linq.Providers/Expressions/FdbExpressionHelpers.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,19 +26,19 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion +using Doxense.Async; +using Doxense.Linq; + namespace FoundationDB.Linq.Expressions { - using FoundationDB.Async; - using FoundationDB.Client; - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.Diagnostics.Contracts; - using System.Linq; using System.Linq.Expressions; - using System.Reflection; using System.Threading; using System.Threading.Tasks; + using FoundationDB.Client; + using JetBrains.Annotations; /// Helper class for working with extension expressions public static class FdbExpressionHelpers @@ -97,12 +97,12 @@ private static Task Inline([NotNull] Func func { try { - if (ct.IsCancellationRequested) return TaskHelpers.FromCancellation(ct); + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); return Task.FromResult(func(trans)); } catch(Exception e) { - return TaskHelpers.FromException(e); + return Task.FromException(e); } } @@ -129,19 +129,19 @@ public static Expression ExecuteEnumerable([NotNull] Func> generator, [NotNull] Func, CancellationToken, Task> lambda, [NotNull] IFdbReadOnlyTransaction trans, CancellationToken ct) + internal static Task ExecuteEnumerable([NotNull] Func> generator, [NotNull] Func, CancellationToken, Task> lambda, [NotNull] IFdbReadOnlyTransaction trans, CancellationToken ct) { Contract.Requires(generator != null && lambda != null && trans != null); try { - if (ct.IsCancellationRequested) return TaskHelpers.FromCancellation(ct); + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); var enumerable = generator(trans); - if (enumerable == null) return TaskHelpers.FromException(new InvalidOperationException("Source query returned null")); + if (enumerable == null) return Task.FromException(new InvalidOperationException("Source query returned null")); return lambda(enumerable, ct); } catch (Exception e) { - return TaskHelpers.FromException(e); + return Task.FromException(e); } } diff --git a/FoundationDB.Linq.Providers/Expressions/FdbQueryAsyncEnumerableExpression.cs b/FoundationDB.Linq.Providers/Expressions/FdbQueryAsyncEnumerableExpression.cs index 03600624a..39840089c 100644 --- a/FoundationDB.Linq.Providers/Expressions/FdbQueryAsyncEnumerableExpression.cs +++ b/FoundationDB.Linq.Providers/Expressions/FdbQueryAsyncEnumerableExpression.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,19 +28,20 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Linq.Expressions { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Linq.Expressions; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using Doxense.Linq; + using FoundationDB.Client; + using JetBrains.Annotations; /// Expression that uses an async sequence as the source of elements public sealed class FdbQueryAsyncEnumerableExpression : FdbQuerySequenceExpression { - internal FdbQueryAsyncEnumerableExpression(IFdbAsyncEnumerable source) + internal FdbQueryAsyncEnumerableExpression(IAsyncEnumerable source) { Contract.Requires(source != null); this.Source = source; @@ -53,7 +54,7 @@ public override FdbQueryShape Shape } /// Source sequence of this expression - public IFdbAsyncEnumerable Source + public IAsyncEnumerable Source { [NotNull] get; private set; @@ -72,18 +73,18 @@ public override void WriteTo(FdbQueryExpressionStringBuilder builder) } /// Returns a new expression that will execute this query on a transaction and return a single result - public override Expression>>> CompileSingle() + public override Expression>>> CompileSingle() { return FdbExpressionHelpers.ToTask(CompileSequence()); } /// Returns a new expression that creates an async sequence that will execute this query on a transaction [NotNull] - public override Expression>> CompileSequence() + public override Expression>> CompileSequence() { var prmTrans = Expression.Parameter(typeof(IFdbReadOnlyTransaction), "trans"); - return Expression.Lambda>>( + return Expression.Lambda>>( Expression.Constant(this.Source), prmTrans ); diff --git a/FoundationDB.Linq.Providers/Expressions/FdbQueryExpression.cs b/FoundationDB.Linq.Providers/Expressions/FdbQueryExpression.cs index 330661957..d9519f26c 100644 --- a/FoundationDB.Linq.Providers/Expressions/FdbQueryExpression.cs +++ b/FoundationDB.Linq.Providers/Expressions/FdbQueryExpression.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,18 +28,17 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Linq.Expressions { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Linq.Expressions; - using System.Reflection; using System.Threading; using System.Threading.Tasks; + using Doxense.Diagnostics.Contracts; + using FoundationDB.Client; + using JetBrains.Annotations; /// Base class of all query expression extensions - public abstract class FdbQueryExpression : Expression - { + public abstract class FdbQueryExpression : Expression + { private readonly Type m_type; /// Base ctor @@ -51,17 +50,10 @@ protected FdbQueryExpression(Type type) } /// Type of the results of the query - public override Type Type - { - [NotNull] - get { return m_type; } - } + public override Type Type => m_type; /// Always return - public override ExpressionType NodeType - { - get { return ExpressionType.Extension; } - } + public override ExpressionType NodeType => ExpressionType.Extension; /// Shape of the query public abstract FdbQueryShape Shape { get; } diff --git a/FoundationDB.Linq.Providers/Expressions/FdbQueryExpressions.cs b/FoundationDB.Linq.Providers/Expressions/FdbQueryExpressions.cs index a2bec47b3..9c2f77bc7 100644 --- a/FoundationDB.Linq.Providers/Expressions/FdbQueryExpressions.cs +++ b/FoundationDB.Linq.Providers/Expressions/FdbQueryExpressions.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,15 +28,14 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Linq.Expressions { - using FoundationDB.Client; - using FoundationDB.Layers.Indexing; - using FoundationDB.Layers.Tuples; - using JetBrains.Annotations; using System; using System.Linq.Expressions; - using System.Reflection; using System.Threading; using System.Threading.Tasks; + using Doxense.Collections.Tuples; + using Doxense.Linq; + using FoundationDB.Client; + using JetBrains.Annotations; /// Helper class to construct Query Expressions public static class FdbQueryExpressions @@ -44,10 +43,10 @@ public static class FdbQueryExpressions /// Return a single result from the query [NotNull] - public static FdbQuerySingleExpression Single([NotNull] FdbQuerySequenceExpression source, string name, [NotNull] Expression, CancellationToken, Task>> lambda) + public static FdbQuerySingleExpression Single([NotNull] FdbQuerySequenceExpression source, string name, [NotNull] Expression, CancellationToken, Task>> lambda) { - if (source == null) throw new ArgumentNullException("source"); - if (lambda == null) throw new ArgumentNullException("lambda"); + if (source == null) throw new ArgumentNullException(nameof(source)); + if (lambda == null) throw new ArgumentNullException(nameof(lambda)); if (name == null) name = lambda.Name ?? "Lambda"; @@ -56,25 +55,25 @@ public static FdbQuerySingleExpression Single([NotNull] FdbQuerySequ /// Return a sequence of results from the query [NotNull] - public static FdbQueryAsyncEnumerableExpression Sequence([NotNull] IFdbAsyncEnumerable source) + public static FdbQueryAsyncEnumerableExpression Sequence([NotNull] IAsyncEnumerable source) { - if (source == null) throw new ArgumentNullException("source"); + if (source == null) throw new ArgumentNullException(nameof(source)); return new FdbQueryAsyncEnumerableExpression(source); } /// Execute a Range read from the database, and return all the keys and values [NotNull] - public static FdbQueryRangeExpression Range(FdbKeySelectorPair range, FdbRangeOptions options = null) + public static FdbQueryRangeExpression Range(KeySelectorPair range, FdbRangeOptions options = null) { return new FdbQueryRangeExpression(range, options); } /// Execute a Range read from the database, and return all the keys and values [NotNull] - public static FdbQueryRangeExpression Range(FdbKeySelector start, FdbKeySelector stop, FdbRangeOptions options = null) + public static FdbQueryRangeExpression Range(KeySelector start, KeySelector stop, FdbRangeOptions options = null) { - return Range(new FdbKeySelectorPair(start, stop), options); + return Range(new KeySelectorPair(start, stop), options); } /// Execute a Range read from the database, and return all the keys and values @@ -82,21 +81,22 @@ public static FdbQueryRangeExpression Range(FdbKeySelector start, FdbKeySelector public static FdbQueryRangeExpression RangeStartsWith(Slice prefix, FdbRangeOptions options = null) { // starts_with('A') means ['A', B') - return Range(FdbKeySelectorPair.StartsWith(prefix), options); + return Range(KeySelectorPair.StartsWith(prefix), options); } /// Execute a Range read from the database, and return all the keys and values [NotNull] - public static FdbQueryRangeExpression RangeStartsWith(IFdbTuple tuple, FdbRangeOptions options = null) + [Obsolete] + public static FdbQueryRangeExpression RangeStartsWith(ITuple tuple, FdbRangeOptions options = null) { - return Range(tuple.ToSelectorPair(), options); + return RangeStartsWith(TuPack.Pack(tuple), options); } /// Return the intersection between one of more sequences of results [NotNull] public static FdbQueryIntersectExpression Intersect(params FdbQuerySequenceExpression[] expressions) { - if (expressions == null) throw new ArgumentNullException("expressions"); + if (expressions == null) throw new ArgumentNullException(nameof(expressions)); if (expressions.Length <= 1) throw new ArgumentException("There must be at least two sequences to perform an intersection"); var type = expressions[0].Type; @@ -109,7 +109,7 @@ public static FdbQueryIntersectExpression Intersect(params FdbQuerySequenc [NotNull] public static FdbQueryUnionExpression Union(params FdbQuerySequenceExpression[] expressions) { - if (expressions == null) throw new ArgumentNullException("expressions"); + if (expressions == null) throw new ArgumentNullException(nameof(expressions)); if (expressions.Length <= 1) throw new ArgumentException("There must be at least two sequences to perform an intersection"); var type = expressions[0].Type; @@ -122,10 +122,10 @@ public static FdbQueryUnionExpression Union(params FdbQuerySequenceExpress [NotNull] public static FdbQueryTransformExpression Transform([NotNull] FdbQuerySequenceExpression source, [NotNull] Expression> transform) { - if (source == null) throw new ArgumentNullException("source"); - if (transform == null) throw new ArgumentNullException("transform"); + if (source == null) throw new ArgumentNullException(nameof(source)); + if (transform == null) throw new ArgumentNullException(nameof(transform)); - if (source.ElementType != typeof(T)) throw new ArgumentException(String.Format("Source sequence has type {0} that is not compatible with transform input type {1}", source.ElementType.Name, typeof(T).Name), "source"); + if (source.ElementType != typeof(T)) throw new ArgumentException(String.Format("Source sequence has type {0} that is not compatible with transform input type {1}", source.ElementType.Name, typeof(T).Name), nameof(source)); return new FdbQueryTransformExpression(source, transform); } @@ -134,10 +134,10 @@ public static FdbQueryTransformExpression Transform([NotNull] FdbQue [NotNull] public static FdbQueryFilterExpression Filter([NotNull] FdbQuerySequenceExpression source, [NotNull] Expression> filter) { - if (source == null) throw new ArgumentNullException("source"); - if (filter == null) throw new ArgumentNullException("filter"); + if (source == null) throw new ArgumentNullException(nameof(source)); + if (filter == null) throw new ArgumentNullException(nameof(filter)); - if (source.ElementType != typeof(T)) throw new ArgumentException(String.Format("Source sequence has type {0} that is not compatible with filter input type {1}", source.ElementType.Name, typeof(T).Name), "source"); + if (source.ElementType != typeof(T)) throw new ArgumentException(String.Format("Source sequence has type {0} that is not compatible with filter input type {1}", source.ElementType.Name, typeof(T).Name), nameof(source)); return new FdbQueryFilterExpression(source, filter); } @@ -146,7 +146,7 @@ public static FdbQueryFilterExpression Filter([NotNull] FdbQuerySequenceEx [NotNull] public static string ExplainSingle([NotNull] FdbQueryExpression expression, CancellationToken ct) { - if (expression == null) throw new ArgumentNullException("expression"); + if (expression == null) throw new ArgumentNullException(nameof(expression)); if (expression.Shape != FdbQueryShape.Single) throw new InvalidOperationException("Invalid shape (single expected)"); var expr = expression.CompileSingle(); @@ -158,7 +158,7 @@ public static string ExplainSingle([NotNull] FdbQueryExpression expression [NotNull] public static string ExplainSequence([NotNull] FdbQuerySequenceExpression expression) { - if (expression == null) throw new ArgumentNullException("expression"); + if (expression == null) throw new ArgumentNullException(nameof(expression)); if (expression.Shape != FdbQueryShape.Sequence) throw new InvalidOperationException("Invalid shape (sequence expected)"); var expr = expression.CompileSequence(); diff --git a/FoundationDB.Linq.Providers/Expressions/FdbQueryFilterExpression.cs b/FoundationDB.Linq.Providers/Expressions/FdbQueryFilterExpression.cs index 5f68c4c81..c2b7c55ad 100644 --- a/FoundationDB.Linq.Providers/Expressions/FdbQueryFilterExpression.cs +++ b/FoundationDB.Linq.Providers/Expressions/FdbQueryFilterExpression.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,12 +28,12 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Linq.Expressions { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Linq.Expressions; - using System.Threading; + using Doxense.Diagnostics.Contracts; + using Doxense.Linq; + using FoundationDB.Client; + using JetBrains.Annotations; /// Expression that represent a filter on a source sequence /// Type of elements in the source sequence @@ -79,7 +79,7 @@ public override void WriteTo(FdbQueryExpressionStringBuilder builder) /// Returns a new expression that creates an async sequence that will execute this query on a transaction [NotNull] - public override Expression>> CompileSequence() + public override Expression>> CompileSequence() { var lambda = this.Filter.Compile(); @@ -89,13 +89,13 @@ public override Expression> // (tr) => sourceEnumerable(tr).Where(lambda); - var body = FdbExpressionHelpers.RewriteCall, Func, IFdbAsyncEnumerable>>( + var body = FdbExpressionHelpers.RewriteCall, Func, IAsyncEnumerable>>( (sequence, predicate) => sequence.Where(predicate), FdbExpressionHelpers.RewriteCall(enumerable, prmTrans), Expression.Constant(lambda) ); - return Expression.Lambda>>(body, prmTrans); + return Expression.Lambda>>(body, prmTrans); } } diff --git a/FoundationDB.Linq.Providers/Expressions/FdbQueryIndexLookupExpression.cs b/FoundationDB.Linq.Providers/Expressions/FdbQueryIndexLookupExpression.cs index 658b41764..db179f004 100644 --- a/FoundationDB.Linq.Providers/Expressions/FdbQueryIndexLookupExpression.cs +++ b/FoundationDB.Linq.Providers/Expressions/FdbQueryIndexLookupExpression.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,14 +28,14 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Linq.Expressions { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using FoundationDB.Layers.Indexing; - using JetBrains.Annotations; using System; using System.Globalization; using System.Linq.Expressions; - using System.Threading; + using Doxense.Diagnostics.Contracts; + using Doxense.Linq; + using FoundationDB.Client; + using FoundationDB.Layers.Indexing; + using JetBrains.Annotations; /// Expression that represents a lookup on an FdbIndex /// Type of the Id of the enties being indexed @@ -119,7 +119,7 @@ public FdbIndex Index /// Returns a new expression that creates an async sequence that will execute this query on a transaction [NotNull] - public override Expression>> CompileSequence() + public override Expression>> CompileSequence() { var prmTrans = Expression.Parameter(typeof(IFdbReadOnlyTransaction), "trans"); Expression body; @@ -128,7 +128,7 @@ public override Expression> { case ExpressionType.Equal: { - body = FdbExpressionHelpers.RewriteCall, IFdbReadOnlyTransaction, V, bool, IFdbAsyncEnumerable>>( + body = FdbExpressionHelpers.RewriteCall, IFdbReadOnlyTransaction, V, bool, IAsyncEnumerable>>( (index, trans, value, reverse) => index.Lookup(trans, value, reverse), Expression.Constant(this.Index, typeof(FdbIndex)), prmTrans, @@ -141,7 +141,7 @@ public override Expression> case ExpressionType.GreaterThan: case ExpressionType.GreaterThanOrEqual: { - body = FdbExpressionHelpers.RewriteCall, IFdbReadOnlyTransaction, V, bool, IFdbAsyncEnumerable>>( + body = FdbExpressionHelpers.RewriteCall, IFdbReadOnlyTransaction, V, bool, IAsyncEnumerable>>( (index, trans, value, reverse) => index.LookupGreaterThan(trans, value, this.Operator == ExpressionType.GreaterThanOrEqual, reverse), Expression.Constant(this.Index, typeof(FdbIndex)), prmTrans, @@ -154,7 +154,7 @@ public override Expression> case ExpressionType.LessThan: case ExpressionType.LessThanOrEqual: { - body = FdbExpressionHelpers.RewriteCall, IFdbReadOnlyTransaction, V, bool, IFdbAsyncEnumerable>>( + body = FdbExpressionHelpers.RewriteCall, IFdbReadOnlyTransaction, V, bool, IAsyncEnumerable>>( (index, trans, value, reverse) => index.LookupLessThan(trans, value, this.Operator == ExpressionType.LessThanOrEqual, reverse), Expression.Constant(this.Index, typeof(FdbIndex)), prmTrans, @@ -170,7 +170,7 @@ public override Expression> } } - return Expression.Lambda>>(body, prmTrans); + return Expression.Lambda>>(body, prmTrans); } /// Returns a textual representation of expression diff --git a/FoundationDB.Linq.Providers/Expressions/FdbQueryIntersectExpression.cs b/FoundationDB.Linq.Providers/Expressions/FdbQueryIntersectExpression.cs index 08c3659fc..1b6e9819c 100644 --- a/FoundationDB.Linq.Providers/Expressions/FdbQueryIntersectExpression.cs +++ b/FoundationDB.Linq.Providers/Expressions/FdbQueryIntersectExpression.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,15 +28,13 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Linq.Expressions { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Collections.Generic; - using System.Linq; using System.Linq.Expressions; - using System.Reflection; - using System.Threading; + using Doxense.Diagnostics.Contracts; + using Doxense.Linq; + using FoundationDB.Client; + using JetBrains.Annotations; /// Mode of execution of a merge operation public enum FdbQueryMergeType @@ -108,7 +106,7 @@ public override void WriteTo(FdbQueryExpressionStringBuilder builder) /// Returns a new expression that creates an async sequence that will execute this query on a transaction [NotNull] - public override Expression>> CompileSequence() + public override Expression>> CompileSequence() { // compile the key selector var prmTrans = Expression.Parameter(typeof(IFdbReadOnlyTransaction), "trans"); @@ -120,13 +118,13 @@ public override Expression> enumerables[i] = FdbExpressionHelpers.RewriteCall(this.Expressions[i].CompileSequence(), prmTrans); } - var array = Expression.NewArrayInit(typeof(IFdbAsyncEnumerable), enumerables); + var array = Expression.NewArrayInit(typeof(IAsyncEnumerable), enumerables); Expression body; switch (this.MergeType) { case FdbQueryMergeType.Intersect: { - body = FdbExpressionHelpers.RewriteCall[], IComparer, IFdbAsyncEnumerable>>( + body = FdbExpressionHelpers.RewriteCall[], IComparer, IAsyncEnumerable>>( (sources, comparer) => FdbMergeQueryExtensions.Intersect(sources, comparer), array, Expression.Constant(this.KeyComparer, typeof(IComparer)) @@ -135,7 +133,7 @@ public override Expression> } case FdbQueryMergeType.Union: { - body = FdbExpressionHelpers.RewriteCall[], IComparer, IFdbAsyncEnumerable>>( + body = FdbExpressionHelpers.RewriteCall[], IComparer, IAsyncEnumerable>>( (sources, comparer) => FdbMergeQueryExtensions.Union(sources, comparer), array, Expression.Constant(this.KeyComparer, typeof(IComparer)) @@ -148,7 +146,7 @@ public override Expression> } } - return Expression.Lambda>>( + return Expression.Lambda>>( body, prmTrans ); diff --git a/FoundationDB.Linq.Providers/Expressions/FdbQueryNodeType.cs b/FoundationDB.Linq.Providers/Expressions/FdbQueryNodeType.cs index 681c8a53b..914923670 100644 --- a/FoundationDB.Linq.Providers/Expressions/FdbQueryNodeType.cs +++ b/FoundationDB.Linq.Providers/Expressions/FdbQueryNodeType.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/FoundationDB.Linq.Providers/Expressions/FdbQueryRangeExpression.cs b/FoundationDB.Linq.Providers/Expressions/FdbQueryRangeExpression.cs index 221dbacaa..c8d393d94 100644 --- a/FoundationDB.Linq.Providers/Expressions/FdbQueryRangeExpression.cs +++ b/FoundationDB.Linq.Providers/Expressions/FdbQueryRangeExpression.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,28 +26,29 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion + namespace FoundationDB.Linq.Expressions { - using FoundationDB.Client; - using JetBrains.Annotations; using System; using System.Collections.Generic; using System.Globalization; using System.Linq.Expressions; - using System.Threading; + using Doxense.Linq; + using FoundationDB.Client; + using JetBrains.Annotations; /// Expression that represents a GetRange query using a pair of key selectors public class FdbQueryRangeExpression : FdbQuerySequenceExpression> { - internal FdbQueryRangeExpression(FdbKeySelectorPair range, FdbRangeOptions options) + internal FdbQueryRangeExpression(KeySelectorPair range, FdbRangeOptions options) { this.Range = range; this.Options = options; } /// Returns the pair of key selectors for this range query - public FdbKeySelectorPair Range { get; private set; } + public KeySelectorPair Range { get; private set; } /// Returns the options for this range query public FdbRangeOptions Options { get; private set; } @@ -69,18 +70,18 @@ public override void WriteTo(FdbQueryExpressionStringBuilder builder) /// Returns a new expression that creates an async sequence that will execute this query on a transaction [NotNull] - public override Expression>>> CompileSequence() + public override Expression>>> CompileSequence() { var prmTrans = Expression.Parameter(typeof(IFdbReadOnlyTransaction), "trans"); - var body = FdbExpressionHelpers.RewriteCall>>>( + var body = FdbExpressionHelpers.RewriteCall>>>( (trans, range, options) => trans.GetRange(range, options), prmTrans, - Expression.Constant(this.Range, typeof(FdbKeySelectorPair)), + Expression.Constant(this.Range, typeof(KeySelectorPair)), Expression.Constant(this.Options, typeof(FdbRangeOptions)) ); - return Expression.Lambda>>>( + return Expression.Lambda>>>( body, prmTrans ); @@ -95,4 +96,4 @@ public override string ToString() } -} \ No newline at end of file +} diff --git a/FoundationDB.Linq.Providers/Expressions/FdbQuerySequenceExpression.cs b/FoundationDB.Linq.Providers/Expressions/FdbQuerySequenceExpression.cs index 04c38dd89..ddbf13a96 100644 --- a/FoundationDB.Linq.Providers/Expressions/FdbQuerySequenceExpression.cs +++ b/FoundationDB.Linq.Providers/Expressions/FdbQuerySequenceExpression.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -25,19 +25,20 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #endregion - + namespace FoundationDB.Linq.Expressions { - using FoundationDB.Client; using JetBrains.Annotations; using System; using System.Linq.Expressions; using System.Threading; using System.Threading.Tasks; + using Doxense.Linq; + using FoundationDB.Client; /// Base class of all queries that return a sequence of elements (Ranges, Index lookups, ...) /// Type of items returned - public abstract class FdbQuerySequenceExpression : FdbQueryExpression> + public abstract class FdbQuerySequenceExpression : FdbQueryExpression> { /// Type of elements returned by the sequence public Type ElementType @@ -53,10 +54,10 @@ public override FdbQueryShape Shape } /// Returns a new expression that creates an async sequence that will execute this query on a transaction - public abstract Expression>> CompileSequence(); + public abstract Expression>> CompileSequence(); /// Returns a new expression that creates an async sequence that will execute this query on a transaction - public override Expression>>> CompileSingle() + public override Expression>>> CompileSingle() { //REVIEW: why is it called CompileSingle ?? return FdbExpressionHelpers.ToTask(CompileSequence()); diff --git a/FoundationDB.Linq.Providers/Expressions/FdbQuerySingleExpression.cs b/FoundationDB.Linq.Providers/Expressions/FdbQuerySingleExpression.cs index 812f0e8b7..a9102abcf 100644 --- a/FoundationDB.Linq.Providers/Expressions/FdbQuerySingleExpression.cs +++ b/FoundationDB.Linq.Providers/Expressions/FdbQuerySingleExpression.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,9 +28,6 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Linq.Expressions { - using FoundationDB.Async; - using FoundationDB.Client; - using JetBrains.Annotations; using System; using System.Diagnostics.Contracts; using System.Globalization; @@ -38,6 +35,9 @@ namespace FoundationDB.Linq.Expressions using System.Reflection; using System.Threading; using System.Threading.Tasks; + using Doxense.Linq; + using FoundationDB.Client; + using JetBrains.Annotations; /// Base class of all queries that return a single element /// Type of the elements of the source sequence @@ -45,40 +45,26 @@ namespace FoundationDB.Linq.Expressions public class FdbQuerySingleExpression : FdbQueryExpression { /// Create a new expression that returns a single result from a source sequence - public FdbQuerySingleExpression(FdbQuerySequenceExpression sequence, string name, Expression, CancellationToken, Task>> lambda) + public FdbQuerySingleExpression(FdbQuerySequenceExpression sequence, string name, Expression, CancellationToken, Task>> lambda) { Contract.Requires(sequence != null && lambda != null); this.Sequence = sequence; this.Name = name; - this.Lambda = lambda; + this.Handler = lambda; } /// Always returns - public override FdbQueryShape Shape - { - get { return FdbQueryShape.Single; } - } + public override FdbQueryShape Shape => FdbQueryShape.Single; /// Source sequence - public FdbQuerySequenceExpression Sequence - { - [NotNull] get; - private set; - } + [NotNull] + public FdbQuerySequenceExpression Sequence { get; } /// Name of this query - public string Name - { - get; - private set; - } + public string Name { get; } - /// Opeartion that is applied to and that returns a single result - public new Expression, CancellationToken, Task>> Lambda - { - [NotNull] get; - private set; - } + [NotNull] + public Expression, CancellationToken, Task>> Handler { get; } /// Apply a custom visitor to this expression public override Expression Accept(FdbQueryExpressionVisitor visitor) @@ -110,7 +96,7 @@ public override Expression BE LIABLE FOR ANY namespace FoundationDB.Linq.Expressions { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using JetBrains.Annotations; using System; using System.Linq.Expressions; - using System.Reflection; - using System.Threading; + using Doxense.Diagnostics.Contracts; + using Doxense.Linq; + using FoundationDB.Client; + using JetBrains.Annotations; /// Expression that represent a projection from one type into another /// Type of elements in the inner sequence @@ -81,7 +80,7 @@ public override void WriteTo(FdbQueryExpressionStringBuilder builder) /// Returns a new expression that creates an async sequence that will execute this query on a transaction [NotNull] - public override Expression>> CompileSequence() + public override Expression>> CompileSequence() { var lambda = this.Transform.Compile(); @@ -91,13 +90,13 @@ public override Expression> // (tr) => sourceEnumerable(tr).Select(lambda); - var body = FdbExpressionHelpers.RewriteCall, Func, IFdbAsyncEnumerable>>( + var body = FdbExpressionHelpers.RewriteCall, Func, IAsyncEnumerable>>( (sequence, selector) => sequence.Select(selector), FdbExpressionHelpers.RewriteCall(enumerable, prmTrans), Expression.Constant(lambda) ); - return Expression.Lambda>>(body, prmTrans); + return Expression.Lambda>>(body, prmTrans); } } diff --git a/FoundationDB.Linq.Providers/FdbAsyncQueryable.cs b/FoundationDB.Linq.Providers/FdbAsyncQueryable.cs index 6bc03a809..97716da8e 100644 --- a/FoundationDB.Linq.Providers/FdbAsyncQueryable.cs +++ b/FoundationDB.Linq.Providers/FdbAsyncQueryable.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,16 +28,17 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Linq { - using FoundationDB.Async; - using FoundationDB.Client; - using FoundationDB.Layers.Indexing; - using FoundationDB.Linq.Expressions; - using FoundationDB.Linq.Providers; using System; using System.Collections.Generic; using System.Linq.Expressions; using System.Threading; using System.Threading.Tasks; + using Doxense.Async; + using Doxense.Linq; + using FoundationDB.Client; + using FoundationDB.Layers.Indexing; + using FoundationDB.Linq.Expressions; + using FoundationDB.Linq.Providers; /// Extensions methods that help create a query expression tree public static class FdbAsyncQueryable @@ -60,7 +61,7 @@ public static class FdbAsyncQueryable /// Query that will use this database as a source public static IFdbDatabaseQueryable Query(this IFdbDatabase db) { - if (db == null) throw new ArgumentNullException("db"); + if (db == null) throw new ArgumentNullException(nameof(db)); return new FdbDatabaseQuery(db); } @@ -69,9 +70,9 @@ public static IFdbDatabaseQueryable Query(this IFdbDatabase db) /// Source database query /// Pair of key selectors /// Query that will return the keys from the specified - public static IFdbAsyncSequenceQueryable> Range(this IFdbDatabaseQueryable query, FdbKeySelectorPair range) + public static IFdbAsyncSequenceQueryable> Range(this IFdbDatabaseQueryable query, KeySelectorPair range) { - if (query == null) throw new ArgumentNullException("query"); + if (query == null) throw new ArgumentNullException(nameof(query)); var expr = FdbQueryExpressions.Range(range); @@ -84,7 +85,7 @@ public static IFdbAsyncSequenceQueryable> Range(this /// Query that will return the keys that share the specified public static IFdbAsyncSequenceQueryable> RangeStartsWith(this IFdbDatabaseQueryable query, Slice prefix) { - if (query == null) throw new ArgumentNullException("query"); + if (query == null) throw new ArgumentNullException(nameof(query)); var expr = FdbQueryExpressions.RangeStartsWith(prefix); @@ -98,8 +99,8 @@ public static IFdbAsyncSequenceQueryable> RangeStarts /// Creates a new query on this index public static IFdbIndexQueryable Query(this FdbIndex index, IFdbDatabase db) { - if (index == null) throw new ArgumentNullException("index"); - if (db == null) throw new ArgumentNullException("db"); + if (index == null) throw new ArgumentNullException(nameof(index)); + if (db == null) throw new ArgumentNullException(nameof(db)); return new FdbIndexQuery(db, index); } @@ -117,8 +118,8 @@ public static IFdbAsyncSequenceQueryable Lookup(this IFdbIndex /// Projects each element of a sequence query into a new form. public static IFdbAsyncSequenceQueryable Select(this IFdbAsyncSequenceQueryable query, Expression> selector) { - if (query == null) throw new ArgumentNullException("query"); - if (selector == null) throw new ArgumentNullException("selector"); + if (query == null) throw new ArgumentNullException(nameof(query)); + if (selector == null) throw new ArgumentNullException(nameof(selector)); var sourceExpr = query.Expression as FdbQuerySequenceExpression; if (sourceExpr == null) throw new ArgumentException("query"); @@ -131,8 +132,8 @@ public static IFdbAsyncSequenceQueryable Select(this IFdbAsyncSequenceQ /// Filters a sequence query of values based on a predicate. public static IFdbAsyncSequenceQueryable Where(this IFdbAsyncSequenceQueryable query, Expression> predicate) { - if (query == null) throw new ArgumentNullException("query"); - if (predicate == null) throw new ArgumentNullException("predicate"); + if (query == null) throw new ArgumentNullException(nameof(query)); + if (predicate == null) throw new ArgumentNullException(nameof(predicate)); var sourceExpr = query.Expression as FdbQuerySequenceExpression; if (sourceExpr == null) throw new ArgumentException("query"); @@ -143,12 +144,12 @@ public static IFdbAsyncSequenceQueryable Where(this IFdbAsyncSequenceQuery } /// Returns an async sequence that would return the results of this query as they arrive. - public static IFdbAsyncEnumerable ToAsyncEnumerable(this IFdbAsyncSequenceQueryable query) + public static IAsyncEnumerable ToAsyncEnumerable(this IFdbAsyncSequenceQueryable query) { - if (query == null) throw new ArgumentNullException("query"); + if (query == null) throw new ArgumentNullException(nameof(query)); var sequenceQuery = query as FdbAsyncSequenceQuery; - if (sequenceQuery == null) throw new ArgumentException("Source query type not supported", "query"); + if (sequenceQuery == null) throw new ArgumentException("Source query type not supported", nameof(query)); return sequenceQuery.ToEnumerable(); } @@ -156,7 +157,7 @@ public static IFdbAsyncEnumerable ToAsyncEnumerable(this IFdbAsyncSequence /// Returns the first element of a sequence query public static Task CountAsync(this IFdbAsyncSequenceQueryable query, CancellationToken ct = default(CancellationToken)) { - if (query == null) throw new ArgumentNullException("query"); + if (query == null) throw new ArgumentNullException(nameof(query)); var expr = FdbQueryExpressions.Single( (FdbQuerySequenceExpression)query.Expression, @@ -170,8 +171,8 @@ public static IFdbAsyncEnumerable ToAsyncEnumerable(this IFdbAsyncSequence /// Returns the first element of a sequence query public static Task FirstAsync(this IFdbAsyncSequenceQueryable query, CancellationToken ct = default(CancellationToken)) { - if (query == null) throw new ArgumentNullException("query"); - if (ct.IsCancellationRequested) return TaskHelpers.FromCancellation(ct); + if (query == null) throw new ArgumentNullException(nameof(query)); + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); var expr = FdbQueryExpressions.Single( (FdbQuerySequenceExpression)query.Expression, @@ -185,8 +186,8 @@ public static IFdbAsyncEnumerable ToAsyncEnumerable(this IFdbAsyncSequence /// Returns the first element of a sequence query public static Task FirstOrDefaultAsync(this IFdbAsyncSequenceQueryable query, CancellationToken ct = default(CancellationToken)) { - if (query == null) throw new ArgumentNullException("query"); - if (ct.IsCancellationRequested) return TaskHelpers.FromCancellation(ct); + if (query == null) throw new ArgumentNullException(nameof(query)); + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); var expr = FdbQueryExpressions.Single( (FdbQuerySequenceExpression)query.Expression, @@ -200,8 +201,8 @@ public static IFdbAsyncEnumerable ToAsyncEnumerable(this IFdbAsyncSequence /// Immediately executes a sequence query and return a list of all the results once it has completed. public static Task> ToListAsync(this IFdbAsyncSequenceQueryable query, CancellationToken ct = default(CancellationToken)) { - if (query == null) throw new ArgumentNullException("query"); - if (ct.IsCancellationRequested) return TaskHelpers.FromCancellation>(ct); + if (query == null) throw new ArgumentNullException(nameof(query)); + if (ct.IsCancellationRequested) return Task.FromCanceled>(ct); return query.Provider.ExecuteAsync>(query.Expression, ct); @@ -210,8 +211,8 @@ public static IFdbAsyncEnumerable ToAsyncEnumerable(this IFdbAsyncSequence /// Immediately executes a sequence query and return an array of all the results once it has completed. public static Task ToArrayAsync(this IFdbAsyncSequenceQueryable query, CancellationToken ct = default(CancellationToken)) { - if (query == null) throw new ArgumentNullException("query"); - if (ct.IsCancellationRequested) return TaskHelpers.FromCancellation(ct); + if (query == null) throw new ArgumentNullException(nameof(query)); + if (ct.IsCancellationRequested) return Task.FromCanceled(ct); return query.Provider.ExecuteAsync(query.Expression, ct); } diff --git a/FoundationDB.Linq.Providers/FoundationDB.Linq.Providers.csproj b/FoundationDB.Linq.Providers/FoundationDB.Linq.Providers.csproj index dc4787cfc..044e1dce9 100644 --- a/FoundationDB.Linq.Providers/FoundationDB.Linq.Providers.csproj +++ b/FoundationDB.Linq.Providers/FoundationDB.Linq.Providers.csproj @@ -1,95 +1,40 @@ - - - + + - Debug - AnyCPU - {FAF14E3F-6662-4084-8B92-E6697F6B9D5A} - Library - Properties - FoundationDB.Linq + netstandard2.0 + FoundationDB.Linq.Providers FoundationDB.Linq.Providers - v4.5 - 512 + true + ..\Common\foundationdb-net-client.snk + 5.1.0-alpha1 + Doxense + http://opensource.org/licenses/BSD-3-Clause + http://github.com/Doxense/foundationdb-dotnet-client + http://doxense.github.io/foundationdb-dotnet-client/nuget/foundationdb.png + http://github.com/Doxense/foundationdb-dotnet-client + foundationdb fdb nosql + This is a pre-release of the .NET Binding, the public API is still subject to changes. + Experimental LINQ providers for the FoundationDB .NET Binding + Copyright 2013-2018 Doxense SAS - - true + + + true + latest full - false - bin\Debug\ - DEBUG;TRACE - prompt - 4 - 105,108,109,114,472,660,661,628,1066 - AnyCPU - - - pdbonly - true - bin\Release\ - TRACE - prompt - 4 - bin\Release\FoundationDB.Linq.Providers.xml - 105,108,109,114,472,660,661,628,1066 - AnyCPU - - - true + true - - ..\Common\foundationdb-net-client.snk + + + true + 105,108,109,114,472,660,661,628,1066,NU1605 + bin\Release\netstandard2.0\FoundationDB.Layers.Common.xml + latest + - - - - - - - Properties\VersionInfo.cs - - - - - - - - - - - - - - - - - - - - - - - - - - - - {773166b7-de74-4fcc-845c-84080cc89533} - FoundationDB.Client - - - {7c7717d6-a1e7-4541-af8b-1ac762b5ed0f} - FoundationDB.Layers.Common - + + - - - - \ No newline at end of file + + diff --git a/FoundationDB.Linq.Providers/Interfaces.cs b/FoundationDB.Linq.Providers/Interfaces.cs index b5c38f41a..4224cb89b 100644 --- a/FoundationDB.Linq.Providers/Interfaces.cs +++ b/FoundationDB.Linq.Providers/Interfaces.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/FoundationDB.Linq.Providers/Properties/AssemblyInfo.cs b/FoundationDB.Linq.Providers/Properties/AssemblyInfo.cs index 9a9a66e87..b01bdea0e 100644 --- a/FoundationDB.Linq.Providers/Properties/AssemblyInfo.cs +++ b/FoundationDB.Linq.Providers/Properties/AssemblyInfo.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -30,12 +30,8 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY using System.Runtime.CompilerServices; using System.Runtime.InteropServices; -[assembly: AssemblyTitle("FoundationDB.Linq")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] - [assembly: ComVisible(false)] [assembly: Guid("dfd43b61-0d9b-42d6-bbec-a74eafed2631")] -[assembly: InternalsVisibleTo("FoundationDB.Tests, PublicKey=0024000004800000940000000602000000240000525341310004000001000100a9e653303024d91e3e98cdb33228897aebc9aeb0dd5e0890a2362ff08231643525d86e955d52a9be450a9602eedbc1c0eb463d227320a6b6ad1c7129f21353b2b28242d712a0e7b3aaff55c0ab1019c92bea6806b9cf64e93d976143dc57e0a8e73a65c03422ab2624c1220d84f7e88c5a5c3c9edefcf4a76969d458348403ce")] \ No newline at end of file +[assembly: InternalsVisibleTo("FoundationDB.Tests, PublicKey=0024000004800000940000000602000000240000525341310004000001000100a9e653303024d91e3e98cdb33228897aebc9aeb0dd5e0890a2362ff08231643525d86e955d52a9be450a9602eedbc1c0eb463d227320a6b6ad1c7129f21353b2b28242d712a0e7b3aaff55c0ab1019c92bea6806b9cf64e93d976143dc57e0a8e73a65c03422ab2624c1220d84f7e88c5a5c3c9edefcf4a76969d458348403ce")] diff --git a/FoundationDB.Linq.Providers/Providers/FdbAsyncQuery.cs b/FoundationDB.Linq.Providers/Providers/FdbAsyncQuery.cs index 31c7094bf..63929e05d 100644 --- a/FoundationDB.Linq.Providers/Providers/FdbAsyncQuery.cs +++ b/FoundationDB.Linq.Providers/Providers/FdbAsyncQuery.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,6 +26,8 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion +using Doxense.Linq; + namespace FoundationDB.Linq.Providers { using FoundationDB.Client; @@ -57,21 +59,18 @@ protected FdbAsyncQuery([NotNull] IFdbReadOnlyTransaction trans, FdbQueryExpress } /// Query expression - public FdbQueryExpression Expression { get; private set; } + public FdbQueryExpression Expression { get; } /// Database used by the query (or null) - public IFdbDatabase Database { [CanBeNull] get; private set; } + public IFdbDatabase Database { [CanBeNull] get; } /// Transaction used by the query (or null) - public IFdbReadOnlyTransaction Transaction { [CanBeNull] get; private set; } + public IFdbReadOnlyTransaction Transaction { [CanBeNull] get; } /// Type of the elements returned by the query - public virtual Type Type { get { return this.Expression.Type; } } + public virtual Type Type => this.Expression.Type; - IFdbAsyncQueryProvider IFdbAsyncQueryable.Provider - { - get { return this; } - } + IFdbAsyncQueryProvider IFdbAsyncQueryable.Provider => this; /// Create a new query from a query expression public virtual IFdbAsyncQueryable CreateQuery(FdbQueryExpression expression) @@ -83,7 +82,7 @@ public virtual IFdbAsyncQueryable CreateQuery(FdbQueryExpression expression) /// Create a new typed query from a query expression public virtual IFdbAsyncQueryable CreateQuery([NotNull] FdbQueryExpression expression) { - if (expression == null) throw new ArgumentNullException("expression"); + if (expression == null) throw new ArgumentNullException(nameof(expression)); if (this.Transaction != null) return new FdbAsyncSingleQuery(this.Transaction, expression); @@ -94,7 +93,7 @@ public virtual IFdbAsyncQueryable CreateQuery([NotNull] FdbQueryExpression /// Create a new sequence query from a sequence expression public virtual IFdbAsyncSequenceQueryable CreateSequenceQuery([NotNull] FdbQuerySequenceExpression expression) { - if (expression == null) throw new ArgumentNullException("expression"); + if (expression == null) throw new ArgumentNullException(nameof(expression)); if (this.Transaction != null) return new FdbAsyncSequenceQuery(this.Transaction, expression); @@ -106,7 +105,7 @@ public virtual IFdbAsyncSequenceQueryable CreateSequenceQuery([NotNull] Fd /// Type of the expected result. Can be a for singleton queries or a for sequence queries public async Task ExecuteAsync([NotNull] FdbQueryExpression expression, CancellationToken ct) { - if (expression == null) throw new ArgumentNullException("ct"); + if (expression == null) throw new ArgumentNullException(nameof(ct)); ct.ThrowIfCancellationRequested(); var result = await ExecuteInternal(expression, typeof(R), ct).ConfigureAwait(false); @@ -180,7 +179,7 @@ protected virtual async Task ExecuteSingleInternal([NotNull] FdbQueryExp #region Sequence... [NotNull] - private Func> CompileSequence([NotNull] FdbQueryExpression expression) + private Func> CompileSequence([NotNull] FdbQueryExpression expression) { #if false //TODO: caching ! @@ -197,25 +196,25 @@ private Func> CompileSequence([N } [NotNull] - internal static IFdbAsyncEnumerator GetEnumerator([NotNull] FdbAsyncSequenceQuery sequence, FdbAsyncMode mode) + internal static IAsyncEnumerator GetEnumerator([NotNull] FdbAsyncSequenceQuery sequence, AsyncIterationHint mode) { var generator = sequence.CompileSequence(sequence.Expression); if (sequence.Transaction != null) { - return generator(sequence.Transaction).GetEnumerator(mode); + return generator(sequence.Transaction).GetEnumerator(sequence.Transaction.Cancellation, mode); } //BUGBUG: how do we get a CancellationToken without a transaction? var ct = CancellationToken.None; IFdbTransaction trans = null; - IFdbAsyncEnumerator iterator = null; + IAsyncEnumerator iterator = null; bool success = true; try { trans = sequence.Database.BeginTransaction(ct); - iterator = generator(trans).GetEnumerator(); + iterator = generator(trans).GetEnumerator(ct, mode); return new TransactionIterator(trans, iterator); } @@ -228,32 +227,29 @@ internal static IFdbAsyncEnumerator GetEnumerator([NotNull] FdbAsyncSequenceQ { if (!success) { - if (iterator != null) iterator.Dispose(); - if (trans != null) trans.Dispose(); + iterator?.Dispose(); + trans?.Dispose(); } } } - private sealed class TransactionIterator : IFdbAsyncEnumerator + private sealed class TransactionIterator : IAsyncEnumerator { - private readonly IFdbAsyncEnumerator m_iterator; + private readonly IAsyncEnumerator m_iterator; private readonly IFdbTransaction m_transaction; - public TransactionIterator(IFdbTransaction transaction, IFdbAsyncEnumerator iterator) + public TransactionIterator(IFdbTransaction transaction, IAsyncEnumerator iterator) { m_transaction = transaction; m_iterator = iterator; } - public Task MoveNext(CancellationToken cancellationToken) + public Task MoveNextAsync() { - return m_iterator.MoveNext(cancellationToken); + return m_iterator.MoveNextAsync(); } - public T Current - { - get { return m_iterator.Current; } - } + public T Current => m_iterator.Current; public void Dispose() { @@ -297,14 +293,14 @@ protected virtual async Task ExecuteSequenceInternal(FdbQueryExpression } else { - throw new InvalidOperationException(String.Format("Sequence result type {0} is not supported", resultType.Name)); + throw new InvalidOperationException($"Sequence result type {resultType.Name} is not supported"); } return result; } finally { - if (owned && trans != null) trans.Dispose(); + if (owned) trans?.Dispose(); } } diff --git a/FoundationDB.Linq.Providers/Providers/FdbAsyncSequenceQuery.cs b/FoundationDB.Linq.Providers/Providers/FdbAsyncSequenceQuery.cs index 2e04c4fdc..7cdb45bc5 100644 --- a/FoundationDB.Linq.Providers/Providers/FdbAsyncSequenceQuery.cs +++ b/FoundationDB.Linq.Providers/Providers/FdbAsyncSequenceQuery.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,11 +26,13 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion + namespace FoundationDB.Linq.Providers { using FoundationDB.Client; using FoundationDB.Linq.Expressions; using System; + using Doxense.Linq; /// Async LINQ query that returns an async sequence of items /// Type of the items in the sequence @@ -51,9 +53,9 @@ public FdbAsyncSequenceQuery(IFdbReadOnlyTransaction trans, FdbQueryExpression e public Type ElementType { get { return typeof(T); } } /// Return an async sequence that will return the results of this query - public IFdbAsyncEnumerable ToEnumerable(FdbAsyncMode mode = FdbAsyncMode.Default) + public IAsyncEnumerable ToEnumerable(AsyncIterationHint mode = AsyncIterationHint.Default) { - return FdbAsyncEnumerable.Create((_) => GetEnumerator(this, mode)); + return AsyncEnumerable.Create((_, __) => GetEnumerator(this, mode)); } } diff --git a/FoundationDB.Linq.Providers/Providers/FdbAsyncSingleQuery.cs b/FoundationDB.Linq.Providers/Providers/FdbAsyncSingleQuery.cs index 91a598185..4860cb927 100644 --- a/FoundationDB.Linq.Providers/Providers/FdbAsyncSingleQuery.cs +++ b/FoundationDB.Linq.Providers/Providers/FdbAsyncSingleQuery.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/FoundationDB.Linq.Providers/Providers/FdbDatabaseQuery.cs b/FoundationDB.Linq.Providers/Providers/FdbDatabaseQuery.cs index 97395d358..280839bd9 100644 --- a/FoundationDB.Linq.Providers/Providers/FdbDatabaseQuery.cs +++ b/FoundationDB.Linq.Providers/Providers/FdbDatabaseQuery.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/FoundationDB.Linq.Providers/Providers/FdbIndexQuery`2.cs b/FoundationDB.Linq.Providers/Providers/FdbIndexQuery`2.cs index ec508f1d2..d4f54fc1d 100644 --- a/FoundationDB.Linq.Providers/Providers/FdbIndexQuery`2.cs +++ b/FoundationDB.Linq.Providers/Providers/FdbIndexQuery`2.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/FoundationDB.Linq.Providers/Visitors/FdbQueryExpressionStringBuilder.cs b/FoundationDB.Linq.Providers/Visitors/FdbQueryExpressionStringBuilder.cs index fc73b17a8..25e828c5f 100644 --- a/FoundationDB.Linq.Providers/Visitors/FdbQueryExpressionStringBuilder.cs +++ b/FoundationDB.Linq.Providers/Visitors/FdbQueryExpressionStringBuilder.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/FoundationDB.Linq.Providers/Visitors/FdbQueryExpressionVisitor.cs b/FoundationDB.Linq.Providers/Visitors/FdbQueryExpressionVisitor.cs index dd60884c0..edb168485 100644 --- a/FoundationDB.Linq.Providers/Visitors/FdbQueryExpressionVisitor.cs +++ b/FoundationDB.Linq.Providers/Visitors/FdbQueryExpressionVisitor.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/FoundationDB.Samples/App.config b/FoundationDB.Samples/App.config index 59e07c2d4..564cf3734 100644 --- a/FoundationDB.Samples/App.config +++ b/FoundationDB.Samples/App.config @@ -1,7 +1,7 @@ - + - + @@ -17,4 +17,4 @@ - \ No newline at end of file + diff --git a/FoundationDB.Samples/Benchmarks/BenchRunner.cs b/FoundationDB.Samples/Benchmarks/BenchRunner.cs index 9e1d67694..5fddc44d0 100644 --- a/FoundationDB.Samples/Benchmarks/BenchRunner.cs +++ b/FoundationDB.Samples/Benchmarks/BenchRunner.cs @@ -2,22 +2,16 @@ namespace FoundationDB.Samples.Benchmarks { - using Doxense.Mathematics.Statistics; - using FoundationDB.Client; - using FoundationDB.Client.Native; - using FoundationDB.Layers.Directories; - using FoundationDB.Layers.Messaging; - using FoundationDB.Layers.Tuples; - using FoundationDB.Linq; using System; - using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.IO; using System.Linq; - using System.Text; using System.Threading; using System.Threading.Tasks; + using Doxense.Collections.Tuples; + using Doxense.Mathematics.Statistics; + using FoundationDB.Client; public class BenchRunner : IAsyncTest { @@ -40,15 +34,15 @@ public BenchRunner(BenchMode mode, int value = 1) this.Histo = new RobustHistogram(); } - public string Name { get { return "Bench" + this.Mode.ToString(); } } + public string Name => "Bench" + this.Mode.ToString(); - public int Value { get; private set; } + public int Value { get; set; } - public BenchMode Mode { get; private set; } + public BenchMode Mode { get; } - public IFdbDynamicSubspace Subspace { get; private set; } + public IDynamicKeySubspace Subspace { get; private set; } - public RobustHistogram Histo { get; private set; } + public RobustHistogram Histo { get; } /// @@ -57,7 +51,7 @@ public BenchRunner(BenchMode mode, int value = 1) public async Task Init(IFdbDatabase db, CancellationToken ct) { // open the folder where we will store everything - this.Subspace = await db.Directory.CreateOrOpenAsync("Benchmarks", cancellationToken: ct); + this.Subspace = await db.Directory.CreateOrOpenAsync("Benchmarks", ct: ct); } @@ -117,7 +111,7 @@ public async Task Run(IFdbDatabase db, TextWriter log, CancellationToken ct) } else { - var foos = FdbTuple.EncodePrefixedKeys(foo, Enumerable.Range(1, this.Value).ToArray()); + var foos = TuPack.EncodePrefixedKeys(foo, Enumerable.Range(1, this.Value).ToArray()); await db.ReadAsync(tr => tr.GetValuesAsync(foos), ct); } break; @@ -132,10 +126,8 @@ public async Task Run(IFdbDatabase db, TextWriter log, CancellationToken ct) var w = await db.GetAndWatch(foo, ct); var v = w.Value; - if (v == bar) - v = barf; - else - v = bar; + // swap + v = v == bar ? barf : bar; await db.WriteAsync((tr) => tr.Set(foo, v), ct); diff --git a/FoundationDB.Samples/Benchmarks/LeakTest.cs b/FoundationDB.Samples/Benchmarks/LeakTest.cs index 54e180f30..4a3468b28 100644 --- a/FoundationDB.Samples/Benchmarks/LeakTest.cs +++ b/FoundationDB.Samples/Benchmarks/LeakTest.cs @@ -2,14 +2,13 @@ namespace FoundationDB.Samples.Benchmarks { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using FoundationDB.Layers.Tuples; using System; using System.IO; using System.Text; using System.Threading; using System.Threading.Tasks; + using FoundationDB.Client; + using FoundationDB.Client.Utils; public class LeakTest : IAsyncTest { @@ -22,12 +21,15 @@ public LeakTest(int k, int m, int n, TimeSpan delay) this.Delay = delay; } - public int K { get; private set; } - public int M { get; private set; } - public int N { get; private set; } - public TimeSpan Delay { get; private set; } + public int K { get; } + + public int M { get; } + + public int N { get; } + + public TimeSpan Delay { get; } - public IFdbDynamicSubspace Subspace { get; private set; } + public IDynamicKeySubspace Subspace { get; private set; } /// /// Setup the initial state of the database @@ -35,7 +37,7 @@ public LeakTest(int k, int m, int n, TimeSpan delay) public async Task Init(IFdbDatabase db, CancellationToken ct) { // open the folder where we will store everything - this.Subspace = await db.Directory.CreateOrOpenAsync(new [] { "Benchmarks", "LeakTest" }, cancellationToken: ct); + this.Subspace = await db.Directory.CreateOrOpenAsync(new [] { "Benchmarks", "LeakTest" }, ct: ct); // clear all previous values await db.ClearRangeAsync(this.Subspace, ct); @@ -43,8 +45,8 @@ public async Task Init(IFdbDatabase db, CancellationToken ct) // insert all the classes await db.WriteAsync((tr) => { - tr.Set(this.Subspace.Key + FdbKey.MinValue, Slice.FromString("BEGIN")); - tr.Set(this.Subspace.Key + FdbKey.MaxValue, Slice.FromString("END")); + tr.Set(this.Subspace.GetPrefix() + FdbKey.MinValue, Slice.FromString("BEGIN")); + tr.Set(this.Subspace.GetPrefix() + FdbKey.MaxValue, Slice.FromString("END")); }, ct); } diff --git a/FoundationDB.Samples/FoundationDB.Samples.csproj b/FoundationDB.Samples/FoundationDB.Samples.csproj index ca72b7a19..30d498aaf 100644 --- a/FoundationDB.Samples/FoundationDB.Samples.csproj +++ b/FoundationDB.Samples/FoundationDB.Samples.csproj @@ -9,7 +9,7 @@ Properties FoundationDB.Samples FoundationDB.Samples - v4.5 + v4.6.1 512 diff --git a/FoundationDB.Samples/MessageQueue/MessageQueueRunner.cs b/FoundationDB.Samples/MessageQueue/MessageQueueRunner.cs index 3b3753f05..1ae9df56a 100644 --- a/FoundationDB.Samples/MessageQueue/MessageQueueRunner.cs +++ b/FoundationDB.Samples/MessageQueue/MessageQueueRunner.cs @@ -2,10 +2,6 @@ namespace FoundationDB.Samples.Tutorials { - using Doxense.Mathematics.Statistics; - using FoundationDB.Client; - using FoundationDB.Layers.Messaging; - using FoundationDB.Layers.Tuples; using System; using System.Collections.Generic; using System.Diagnostics; @@ -13,6 +9,10 @@ namespace FoundationDB.Samples.Tutorials using System.IO; using System.Threading; using System.Threading.Tasks; + using Doxense.Collections.Tuples; + using Doxense.Mathematics.Statistics; + using FoundationDB.Client; + using FoundationDB.Layers.Messaging; public class MessageQueueRunner : IAsyncTest { @@ -46,16 +46,19 @@ public MessageQueueRunner(string id, AgentRole role, TimeSpan delayMin, TimeSpan ); } - public string Id { get; private set; } - public AgentRole Role { get; private set; } - public TimeSpan DelayMin { get; private set; } - public TimeSpan DelayMax { get; private set; } + public string Id { get; } + + public AgentRole Role { get; } - public FdbSubspace Subspace { get; private set; } + public TimeSpan DelayMin { get; } + + public TimeSpan DelayMax { get; } + + public KeySubspace Subspace { get; private set; } public FdbWorkerPool WorkerPool { get; private set; } - public RobustTimeLine TimeLine { get; private set; } + public RobustTimeLine TimeLine { get; } /// /// Setup the initial state of the database @@ -63,7 +66,7 @@ public MessageQueueRunner(string id, AgentRole role, TimeSpan delayMin, TimeSpan public async Task Init(IFdbDatabase db, CancellationToken ct) { // open the folder where we will store everything - this.Subspace = await db.Directory.CreateOrOpenAsync(new [] { "Samples", "MessageQueueTest" }, cancellationToken: ct); + this.Subspace = await db.Directory.CreateOrOpenAsync(new [] { "Samples", "MessageQueueTest" }, ct: ct); this.WorkerPool = new FdbWorkerPool(this.Subspace); @@ -76,18 +79,13 @@ public async Task RunProducer(IFdbDatabase db, CancellationToken ct) { int cnt = 0; - var rnd = new Random(123456); - - DateTime last = DateTime.Now; - - rnd = new Random(); + var rnd = new Random(); this.TimeLine.Start(); while (!ct.IsCancellationRequested) { int k = cnt++; - Slice taskId = FdbTuple.EncodeKey(this.Id.GetHashCode(), k); + Slice taskId = TuPack.EncodeKey(this.Id.GetHashCode(), k); - var ts = Stopwatch.GetTimestamp(); string msg = "Message #" + k + " from producer " + this.Id + " (" + DateTime.UtcNow.ToString("O") + ")"; var latency = Stopwatch.StartNew(); @@ -98,7 +96,7 @@ public async Task RunProducer(IFdbDatabase db, CancellationToken ct) this.TimeLine.Add(latency.Elapsed.TotalMilliseconds); TimeSpan delay = TimeSpan.FromTicks(rnd.Next((int)this.DelayMin.Ticks, (int)this.DelayMax.Ticks)); - await Task.Delay(delay).ConfigureAwait(false); + await Task.Delay(delay, ct).ConfigureAwait(false); } this.TimeLine.Stop(); @@ -110,24 +108,20 @@ public async Task RunConsumer(IFdbDatabase db, CancellationToken ct) { var rnd = new Random(); - DateTime last = DateTime.Now; int received = 0; this.TimeLine.Start(); await this.WorkerPool.RunWorkerAsync(db, async (msg, _ct) => { - long ts = Stopwatch.GetTimestamp(); - var latency = msg.Received - msg.Scheduled; Interlocked.Increment(ref received); - Console.Write("[" + received.ToString("N0") + " msg, ~" + latency.TotalMilliseconds.ToString("N3") + " ms] " + msg.Id.ToAsciiOrHexaString() + " \r"); + Console.Write($"[{received:N0} msg, ~{latency.TotalMilliseconds:N3} ms] {msg.Id:P} \r"); this.TimeLine.Add(latency.TotalMilliseconds); - //Console.Write("."); TimeSpan delay = TimeSpan.FromTicks(rnd.Next((int)this.DelayMin.Ticks, (int)this.DelayMax.Ticks)); - await Task.Delay(delay).ConfigureAwait(false); + await Task.Delay(delay, ct).ConfigureAwait(false); }, ct); this.TimeLine.Stop(); @@ -161,22 +155,22 @@ public async Task RunStatus(IFdbDatabase db, CancellationToken ct) Console.WriteLine("> Idle"); await tr.Snapshot.GetRange(idleLocation.Keys.ToRange()).ForEachAsync((kvp) => { - Console.WriteLine("- Idle." + idleLocation.Keys.Unpack(kvp.Key) + " = " + kvp.Value.ToAsciiOrHexaString()); + Console.WriteLine($"- Idle.{idleLocation.Keys.Unpack(kvp.Key)} = {kvp.Value:V}"); }); Console.WriteLine("> Busy"); await tr.Snapshot.GetRange(busyLocation.Keys.ToRange()).ForEachAsync((kvp) => { - Console.WriteLine("- Busy." + busyLocation.Keys.Unpack(kvp.Key) + " = " + kvp.Value.ToAsciiOrHexaString()); + Console.WriteLine($"- Busy.{busyLocation.Keys.Unpack(kvp.Key)} = {kvp.Value:V}"); }); Console.WriteLine("> Unassigned"); await tr.Snapshot.GetRange(unassignedLocation.Keys.ToRange()).ForEachAsync((kvp) => { - Console.WriteLine("- Unassigned." + unassignedLocation.Keys.Unpack(kvp.Key) + " = " + kvp.Value.ToAsciiOrHexaString()); + Console.WriteLine($"- Unassigned.{unassignedLocation.Keys.Unpack(kvp.Key)} = {kvp.Value:V}"); }); Console.WriteLine("> Tasks"); await tr.Snapshot.GetRange(tasksLocation.Keys.ToRange()).ForEachAsync((kvp) => { - Console.WriteLine("- Tasks." + tasksLocation.Keys.Unpack(kvp.Key) + " = " + kvp.Value.ToAsciiOrHexaString()); + Console.WriteLine($"- Tasks.{tasksLocation.Keys.Unpack(kvp.Key)} = {kvp.Value:V}"); }); Console.WriteLine("<"); } @@ -184,7 +178,7 @@ await tr.Snapshot.GetRange(tasksLocation.Keys.ToRange()).ForEachAsync((kvp) => #region IAsyncTest... - public string Name { get { return "MessageQueueTest"; } } + public string Name => "MessageQueueTest"; public async Task Run(IFdbDatabase db, TextWriter log, CancellationToken ct) { diff --git a/FoundationDB.Samples/PerfCounters.cs b/FoundationDB.Samples/PerfCounters.cs index 3c90d03a5..7da654142 100644 --- a/FoundationDB.Samples/PerfCounters.cs +++ b/FoundationDB.Samples/PerfCounters.cs @@ -1,22 +1,9 @@ -using FoundationDB.Async; -using FoundationDB.Client; -using FoundationDB.Filters.Logging; -using FoundationDB.Layers.Directories; -using FoundationDB.Layers.Tuples; -using FoundationDB.Samples.Benchmarks; -using FoundationDB.Samples.Tutorials; -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.IO; -using System.Linq; -using System.Net; -using System.Text; -using System.Threading; -using System.Threading.Tasks; +//TODO: License for samples/tutorials ??? namespace FoundationDB.Samples { + using System; + using System.Diagnostics; public static class PerfCounters { diff --git a/FoundationDB.Samples/Program.cs b/FoundationDB.Samples/Program.cs index c2ba07494..cc1454548 100644 --- a/FoundationDB.Samples/Program.cs +++ b/FoundationDB.Samples/Program.cs @@ -1,22 +1,18 @@ -using FoundationDB.Async; -using FoundationDB.Client; -using FoundationDB.Filters.Logging; -using FoundationDB.Layers.Directories; -using FoundationDB.Layers.Tuples; -using FoundationDB.Samples.Benchmarks; -using FoundationDB.Samples.Tutorials; -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.IO; -using System.Linq; -using System.Net; -using System.Text; -using System.Threading; -using System.Threading.Tasks; +//TODO: License for samples/tutorials ??? namespace FoundationDB.Samples { + using System; + using System.Diagnostics; + using System.IO; + using System.Linq; + using System.Threading; + using System.Threading.Tasks; + using Doxense.Async; + using FoundationDB.Client; + using FoundationDB.Filters.Logging; + using FoundationDB.Samples.Benchmarks; + using FoundationDB.Samples.Tutorials; public interface IAsyncTest { @@ -28,7 +24,7 @@ public class Program { private static IFdbDatabase Db; - private static bool LogEnabled = false; + private static bool LogEnabled; private static string CurrentDirectoryPath = "/"; static StreamWriter GetLogFile(string name) diff --git a/FoundationDB.Samples/Properties/AssemblyInfo.cs b/FoundationDB.Samples/Properties/AssemblyInfo.cs index 97fb8218a..40b3f6b55 100644 --- a/FoundationDB.Samples/Properties/AssemblyInfo.cs +++ b/FoundationDB.Samples/Properties/AssemblyInfo.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -27,7 +27,6 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY #endregion using System.Reflection; -using System.Runtime.CompilerServices; using System.Runtime.InteropServices; [assembly: AssemblyTitle("FoundationDB.Samples")] diff --git a/FoundationDB.Samples/Tutorials/ClassScheduling.cs b/FoundationDB.Samples/Tutorials/ClassScheduling.cs index 4b237ad70..cfdceb91f 100644 --- a/FoundationDB.Samples/Tutorials/ClassScheduling.cs +++ b/FoundationDB.Samples/Tutorials/ClassScheduling.cs @@ -2,19 +2,15 @@ namespace FoundationDB.Samples.Tutorials { - using FoundationDB.Client; - using FoundationDB.Layers.Directories; - using FoundationDB.Layers.Tuples; - using FoundationDB.Linq; using System; using System.Collections.Generic; - using System.Diagnostics; - using System.Globalization; using System.IO; using System.Linq; - using System.Text; using System.Threading; using System.Threading.Tasks; + using Doxense.Collections.Tuples; + using Doxense.Linq; + using FoundationDB.Client; public class ClassScheduling : IAsyncTest { @@ -32,9 +28,9 @@ public ClassScheduling() .ToArray(); } - public string[] ClassNames { get; private set; } + public string[] ClassNames { get; } - public IFdbDynamicSubspace Subspace { get; private set; } + public IDynamicKeySubspace Subspace { get; private set; } protected Slice ClassKey(string c) { @@ -46,9 +42,9 @@ protected Slice AttendsKey(string s, string c) return this.Subspace.Keys.Encode("attends", s, c); } - protected FdbKeyRange AttendsKeys(string s) + protected KeyRange AttendsKeys(string s) { - return this.Subspace.Keys.ToRange(FdbTuple.Create("attends", s)); + return this.Subspace.Keys.ToRange(STuple.Create("attends", s)); } /// @@ -57,7 +53,7 @@ protected FdbKeyRange AttendsKeys(string s) public async Task Init(IFdbDatabase db, CancellationToken ct) { // open the folder where we will store everything - this.Subspace = await db.Directory.CreateOrOpenAsync(new [] { "Tutorials", "ClassScheduling" }, cancellationToken: ct); + this.Subspace = await db.Directory.CreateOrOpenAsync(new [] { "Tutorials", "ClassScheduling" }, ct: ct); // clear all previous values await db.ClearRangeAsync(this.Subspace, ct); @@ -66,7 +62,7 @@ await db.WriteAsync((tr) => { foreach (var c in this.ClassNames) { - tr.Set(ClassKey(c), Slice.FromAscii("100")); + tr.Set(ClassKey(c), Slice.FromStringAscii("100")); } }, ct); @@ -78,8 +74,8 @@ await db.WriteAsync((tr) => /// public Task> AvailableClasses(IFdbReadOnlyTransaction tr) { - return tr.GetRange(this.Subspace.Keys.ToRange(FdbTuple.Create("class"))) - .Where(kvp => { int _; return Int32.TryParse(kvp.Value.ToAscii(), out _); }) // (step 3) + return tr.GetRange(this.Subspace.Keys.ToRange(STuple.Create("class"))) + .Where(kvp => { int _; return Int32.TryParse(kvp.Value.ToStringAscii(), out _); }) // (step 3) .Select(kvp => this.Subspace.Keys.Decode(kvp.Key)) .ToListAsync(); } @@ -95,7 +91,7 @@ public async Task Signup(IFdbTransaction tr, string s, string c) { // already signed up return; } - int seatsLeft = Int32.Parse((await tr.GetAsync(ClassKey(c))).ToAscii()); + int seatsLeft = Int32.Parse((await tr.GetAsync(ClassKey(c))).ToStringAscii()); if (seatsLeft <= 0) { throw new InvalidOperationException("No remaining seats"); @@ -104,7 +100,7 @@ public async Task Signup(IFdbTransaction tr, string s, string c) var classes = await tr.GetRange(AttendsKeys(s)).ToListAsync(); if (classes.Count >= 5) throw new InvalidOperationException("Too many classes"); - tr.Set(ClassKey(c), Slice.FromAscii((seatsLeft - 1).ToString())); + tr.Set(ClassKey(c), Slice.FromStringAscii((seatsLeft - 1).ToString())); tr.Set(rec, Slice.Empty); } @@ -119,8 +115,8 @@ public async Task Drop(IFdbTransaction tr, string s, string c) return; } - var students = Int32.Parse((await tr.GetAsync(ClassKey(c))).ToAscii()); - tr.Set(ClassKey(c), Slice.FromAscii((students + 1).ToString())); + var students = Int32.Parse((await tr.GetAsync(ClassKey(c))).ToStringAscii()); + tr.Set(ClassKey(c), Slice.FromStringAscii((students + 1).ToString())); tr.Clear(rec); } @@ -204,7 +200,7 @@ public async Task IndecisiveStudent(IFdbDatabase db, int id, int ops, Cancellati #region IAsyncTest... - public string Name { get { return "ClassScheduling"; } } + public string Name => "ClassScheduling"; public async Task Run(IFdbDatabase db, TextWriter log, CancellationToken ct) { diff --git a/FoundationDB.Samples/Utils/Sampler.cs b/FoundationDB.Samples/Utils/Sampler.cs index 12facd22c..1b6b3968e 100644 --- a/FoundationDB.Samples/Utils/Sampler.cs +++ b/FoundationDB.Samples/Utils/Sampler.cs @@ -2,19 +2,15 @@ namespace FoundationDB.Samples.Benchmarks { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using FoundationDB.Layers.Tuples; - using FoundationDB.Layers.Directories; using System; using System.Collections.Generic; + using System.Diagnostics; using System.Linq; using System.IO; - using System.Text; using System.Threading; using System.Threading.Tasks; using Doxense.Mathematics.Statistics; - using System.Diagnostics; + using FoundationDB.Client; public class SamplerTest : IAsyncTest { @@ -24,7 +20,7 @@ public SamplerTest(double ratio) this.Ratio = ratio; } - public double Ratio { get; private set; } + public double Ratio { get; } #region IAsyncTest... @@ -47,7 +43,7 @@ public async Task Run(IFdbDatabase db, TextWriter log, CancellationToken ct) Console.WriteLine("# Detecting cluster topology..."); var servers = await db.QueryAsync(tr => tr .WithReadAccessToSystemKeys() - .GetRange(FdbKeyRange.StartsWith(Fdb.System.ServerList)) + .GetRange(KeyRange.StartsWith(Fdb.System.ServerList)) .Select(kvp => new { Node = kvp.Value.Substring(8, 16).ToHexaString(), @@ -73,7 +69,7 @@ public async Task Run(IFdbDatabase db, TextWriter log, CancellationToken ct) if (sz > 500) sz = 500; //SAFETY if (sz < 50) sz = Math.Max(sz, Math.Min(50, ranges.Count)); - var samples = new List(); + var samples = new List(); for (int i = 0; i < sz; i++) { int p = rnd.Next(ranges.Count); @@ -112,8 +108,8 @@ public async Task Run(IFdbDatabase db, TextWriter log, CancellationToken ct) long count = 0; int iter = 0; - var beginSelector = FdbKeySelector.FirstGreaterOrEqual(range.Begin); - var endSelector = FdbKeySelector.FirstGreaterOrEqual(range.End); + var beginSelector = KeySelector.FirstGreaterOrEqual(range.Begin); + var endSelector = KeySelector.FirstGreaterOrEqual(range.End); while (true) { FdbRangeChunk data = default(FdbRangeChunk); @@ -151,7 +147,7 @@ public async Task Run(IFdbDatabase db, TextWriter log, CancellationToken ct) if (!data.HasMore) break; - beginSelector = FdbKeySelector.FirstGreaterThan(data.Last.Key); + beginSelector = KeySelector.FirstGreaterThan(data.Last.Key); ++iter; } diff --git a/FoundationDB.Storage.Memory.Test/App.config b/FoundationDB.Storage.Memory.Test/App.config deleted file mode 100644 index 6fc5ef6f8..000000000 --- a/FoundationDB.Storage.Memory.Test/App.config +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - - - - - - - - \ No newline at end of file diff --git a/FoundationDB.Storage.Memory.Test/Collections/ColaOrderedDictionaryFacts.cs b/FoundationDB.Storage.Memory.Test/Collections/ColaOrderedDictionaryFacts.cs deleted file mode 100644 index be9e4989e..000000000 --- a/FoundationDB.Storage.Memory.Test/Collections/ColaOrderedDictionaryFacts.cs +++ /dev/null @@ -1,260 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Core.Test -{ - using NUnit.Framework; - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Threading; - - [TestFixture] - public class ColaOrderedDictionaryFacts - { - - [Test] - public void Test_Empty_Dictionary() - { - var cola = new ColaOrderedDictionary(42, Comparer.Default, StringComparer.Ordinal); - Assert.That(cola.Count, Is.EqualTo(0)); - Assert.That(cola.KeyComparer, Is.SameAs(Comparer.Default)); - Assert.That(cola.ValueComparer, Is.SameAs(StringComparer.Ordinal)); - Assert.That(cola.Capacity, Is.EqualTo(63), "Capacity should be the next power of 2, minus 1"); - } - - [Test] - public void Test_ColaOrderedDictionary_Add() - { - var cmp = new CountingComparer(); - - var cola = new ColaOrderedDictionary(cmp); - Assert.That(cola.Count, Is.EqualTo(0)); - - cola.Add(42, "42"); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(1)); - Assert.That(cola.ContainsKey(42), Is.True); - - cola.Add(1, "1"); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(2)); - Assert.That(cola.ContainsKey(1), Is.True); - - cola.Add(66, "66"); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(3)); - Assert.That(cola.ContainsKey(66), Is.True); - - cola.Add(123, "123"); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(4)); - Assert.That(cola.ContainsKey(123), Is.True); - - for(int i = 1; i < 100; i++) - { - cola.Add(-i, (-i).ToString()); - } - cola.Debug_Dump(); - - - cmp.Reset(); - cola.ContainsKey(-99); - Console.WriteLine("Lookup last inserted: " + cmp.Count); - - cmp.Reset(); - cola.ContainsKey(42); - Console.WriteLine("Lookup first inserted: " + cmp.Count); - - cmp.Reset(); - cola.ContainsKey(77); - Console.WriteLine("Lookup not found: " + cmp.Count); - - var keys = new List(); - - foreach(var kvp in cola) - { - Assert.That(kvp.Value, Is.EqualTo(kvp.Key.ToString())); - keys.Add(kvp.Key); - } - - Assert.That(keys.Count, Is.EqualTo(cola.Count)); - Assert.That(keys, Is.Ordered); - Console.WriteLine(String.Join(", ", keys)); - - } - - [Test] - public void Test_ColaOrderedDictionary_Remove() - { - const int N = 100; - - // add a bunch of random values - var rnd = new Random(); - int seed = 1333019583;// rnd.Next(); - Console.WriteLine("Seed " + seed); - rnd = new Random(seed); - - var cola = new ColaOrderedDictionary(); - var list = new List(); - - int x = 0; - for (int i = 0; i < N; i++) - { - x += (1 + rnd.Next(10)); - string s = "value of " + x; - - cola.Add(x, s); - list.Add(x); - } - Assert.That(cola.Count, Is.EqualTo(N)); - - foreach(var item in list) - { - Assert.That(cola.ContainsKey(item), "{0} is missing", item); - } - - cola.Debug_Dump(); - - // now start removing items one by one - while(list.Count > 0) - { - int p = rnd.Next(list.Count); - x = list[p]; - list.RemoveAt(p); - - bool res = cola.Remove(x); - if (!res) cola.Debug_Dump(); - Assert.That(res, Is.True, "Remove({0}) failed", x); - - Assert.That(cola.Count, Is.EqualTo(list.Count), "After removing {0}", x); - } - - cola.Debug_Dump(); - - } - - [Test] - [Category("LongRunning")] - public void Test_MiniBench() - { - const int N = 10 * 1000 * 1000; - - var rnd = new Random(); - long x; - - - //WARMUP - var store = new ColaOrderedDictionary(); - store.Add(1, 1); - store.Add(42, 42); - store.Add(1234, 1234); - store.TryGetValue(42, out x); - store.TryGetValue(404, out x); - - #region Sequentially inserted.... - - Console.WriteLine("Inserting " + N.ToString("N0") + " sequential key/value pairs into a COLA ordered dictionary"); - GC.Collect(); - store = new ColaOrderedDictionary(); - long total = 0; - var sw = Stopwatch.StartNew(); - for (int i = 0; i < N; i++) - { - store.SetItem(i, i); - Interlocked.Increment(ref total); - if (i % (N / 10) == 0) Console.Write("."); - } - sw.Stop(); - - Console.WriteLine("done"); - Console.WriteLine("* Inserted: " + total.ToString("N0") + " keys"); - Console.WriteLine("* Elapsed : " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec"); - Console.WriteLine("* KPS: " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " key/sec"); - Console.WriteLine("* Latency : " + (sw.Elapsed.TotalMilliseconds * 1000000 / total).ToString("N1") + " nanos / insert"); - - // sequential reads - - sw.Restart(); - for (int i = 0; i < total; i++) - { - var _ = store.TryGetValue(i, out x); - if (!_ || x != i) Assert.Fail(); - } - sw.Stop(); - Console.WriteLine("SeqReadOrdered: " + total.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec => " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " kps"); - - // random reads - - sw.Restart(); - for (int i = 0; i < total; i++) - { - var _ = store.TryGetValue(rnd.Next(N), out x); - if (!_) Assert.Fail(); - } - sw.Stop(); - Console.WriteLine("RndReadOrdered: " + total.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec => " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " kps"); - - #endregion - - #region Randomly inserted.... - - Console.WriteLine("(preparing random insert list)"); - - var tmp = new long[N]; - var values = new long[N]; - for (int i = 0; i < N; i++) - { - tmp[i] = rnd.Next(N); - values[i] = i; - } - Array.Sort(tmp, values); - - Console.WriteLine("Inserting " + N.ToString("N0") + " sequential keys into a COLA store"); - GC.Collect(); - store = new ColaOrderedDictionary(); - total = 0; - sw.Restart(); - for (int i = 0; i < N; i++) - { - store.Add(values[i], i); - Interlocked.Increment(ref total); - if (i % (N / 10) == 0) Console.Write("."); - } - sw.Stop(); - - Console.WriteLine("done"); - Console.WriteLine("* Inserted: " + total.ToString("N0") + " keys"); - Console.WriteLine("* Elapsed : " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec"); - Console.WriteLine("* KPS : " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " key/sec"); - Console.WriteLine("* Latency : " + (sw.Elapsed.TotalMilliseconds * 1000000 / total).ToString("N1") + " nanos / insert"); - - // sequential reads - - sw.Restart(); - for (int i = 0; i < total; i++) - { - var _ = store.TryGetValue(i, out x); - if (!_) Assert.Fail(); - } - sw.Stop(); - Console.WriteLine("SeqReadUnordered: " + total.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec => " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " kps"); - - // random reads - - sw.Restart(); - for (int i = 0; i < total; i++) - { - var _ = store.TryGetValue(rnd.Next(N), out x); - if (!_) Assert.Fail(); - } - sw.Stop(); - Console.WriteLine("RndReadUnordered: " + total.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec => " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " kps"); - - #endregion - - } - - } -} diff --git a/FoundationDB.Storage.Memory.Test/Collections/ColaOrderedSetFacts.cs b/FoundationDB.Storage.Memory.Test/Collections/ColaOrderedSetFacts.cs deleted file mode 100644 index dd9208a97..000000000 --- a/FoundationDB.Storage.Memory.Test/Collections/ColaOrderedSetFacts.cs +++ /dev/null @@ -1,345 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Core.Test -{ - using FoundationDB.Client; - using FoundationDB.Layers.Tuples; - using NUnit.Framework; - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Globalization; - using System.Linq; - using System.Threading; - - [TestFixture] - public class ColaOrderedSetFacts - { - - [Test] - public void Test_Empty_ColaSet() - { - var cola = new ColaOrderedSet(42, StringComparer.Ordinal); - Assert.That(cola.Count, Is.EqualTo(0)); - Assert.That(cola.Comparer, Is.SameAs(StringComparer.Ordinal)); - Assert.That(cola.Capacity, Is.EqualTo(63), "Capacity should be the next power of 2, minus 1"); - } - - [Test] - public void Test_Capacity_Is_Rounded_Up() - { - // default capacity is 4 levels, for 31 items max - var cola = new ColaOrderedSet(); - Assert.That(cola.Capacity, Is.EqualTo(31)); - - // 63 items completely fill 5 levels - cola = new ColaOrderedSet(63); - Assert.That(cola.Capacity, Is.EqualTo(63)); - - // 64 items need 6 levels, which can hold up to 127 items - cola = new ColaOrderedSet(64); - Assert.That(cola.Capacity, Is.EqualTo(127)); - } - - [Test] - public void Test_ColaOrderedSet_Add() - { - var cola = new ColaOrderedSet(); - Assert.That(cola.Count, Is.EqualTo(0)); - - cola.Add(42); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(1)); - Assert.That(cola.Contains(42), Is.True); - - cola.Add(1); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(2)); - Assert.That(cola.Contains(1), Is.True); - - cola.Add(66); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(3)); - Assert.That(cola.Contains(66), Is.True); - - cola.Add(123); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(4)); - Assert.That(cola.Contains(123), Is.True); - - cola.Add(-77); - cola.Add(-76); - cola.Add(-75); - cola.Add(-74); - cola.Add(-73); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(9)); - } - - [Test] - public void Test_ColaOrderedSet_Remove() - { - const int N = 1000; - - var cola = new ColaOrderedSet(); - var list = new List(); - - for (int i = 0; i < N;i++) - { - cola.Add(i); - list.Add(i); - } - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(N)); - - for (int i = 0; i < N; i++) - { - Assert.That(cola.Contains(list[i]), Is.True, "{0} is missing (offset {1})", list[i], i); - } - - var rnd = new Random(); - int seed = 1073704892; // rnd.Next(); - Console.WriteLine("Seed: " + seed); - rnd = new Random(seed); - int old = -1; - while (list.Count > 0) - { - int p = rnd.Next(list.Count); - int x = list[p]; - if (!cola.Contains(x)) - { - Assert.Fail("{0} disapeared after removing {1} ?", x, old); - } - - bool res = cola.Remove(x); - Assert.That(res, Is.True, "Removing {0} did nothing", x); - //Assert.That(cola.Contains(191), "blah {0}", x); - - list.RemoveAt(p); - Assert.That(cola.Count, Is.EqualTo(list.Count)); - old = x; - } - cola.Debug_Dump(); - - } - - [Test] - public void Test_CopyTo_Return_Ordered_Values() - { - const int N = 1000; - var rnd = new Random(); - - var cola = new ColaOrderedSet(); - - // create a list of random values - var numbers = new int[N]; - for (int i = 0, x = 0; i < N; i++) numbers[i] = (x += 1 + rnd.Next(10)); - - // insert the list in a random order - var list = new List(numbers); - while(list.Count > 0) - { - int p = rnd.Next(list.Count); - cola.Add(list[p]); - list.RemoveAt(p); - } - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(N)); - - // we can now sort the numbers to get a reference sequence - Array.Sort(numbers); - - // foreach()ing should return the value in natural order - list.Clear(); - foreach (var x in cola) list.Add(x); - Assert.That(list.Count, Is.EqualTo(N)); - Assert.That(list, Is.EqualTo(numbers)); - - // CopyTo() should produce the item in the expected order - var tmp = new int[N]; - cola.CopyTo(tmp); - Assert.That(tmp, Is.EqualTo(numbers)); - - // ToArray() should do the same thing - tmp = cola.ToArray(); - Assert.That(tmp, Is.EqualTo(numbers)); - - } - - [Test] - public void Test_Check_Costs() - { - const int N = 100; - var cmp = new CountingComparer(Comparer.Default); - var cola = new ColaOrderedSet(cmp); - - Console.WriteLine(String.Format(CultureInfo.InvariantCulture, "Parameters: N = {0}, Log(N) = {1}, Log2(N) = {2}, N.Log2(N) = {3}", N, Math.Log(N), Math.Log(N, 2), N * Math.Log(N, 2))); - - Console.WriteLine("Inserting (" + N + " items)"); - for (int i = 0; i < N; i++) - { - cola.Add(FdbTuple.EncodeKey(i << 1)); - } - - Console.WriteLine("> " + cmp.Count + " cmps (" + ((double)cmp.Count / N) + " / insert)"); - cola.Debug_Dump(); - - Console.WriteLine("Full scan (" + (N << 1) + " lookups)"); - cmp.Reset(); - int n = 0; - for (int i = 0; i < (N << 1); i++) - { - if (cola.Contains(FdbTuple.EncodeKey(i))) ++n; - } - Assert.That(n, Is.EqualTo(N)); - Console.WriteLine("> " + cmp.Count + " cmps (" + ((double)cmp.Count / (N << 1)) + " / lookup)"); - - cmp.Reset(); - n = 0; - int tail = Math.Min(16, N >> 1); - int offset = N - tail; - Console.WriteLine("Tail scan (" + tail + " lookups)"); - for (int i = 0; i < tail; i++) - { - if (cola.Contains(FdbTuple.EncodeKey(offset + i))) ++n; - } - Console.WriteLine("> " + cmp.Count + " cmps (" + ((double)cmp.Count / tail) + " / lookup)"); - - Console.WriteLine("ForEach"); - cmp.Reset(); - int p = 0; - foreach(var x in cola) - { - Assert.That(FdbTuple.DecodeKey(x), Is.EqualTo(p << 1)); - ++p; - } - Assert.That(p, Is.EqualTo(N)); - Console.WriteLine("> " + cmp.Count + " cmps (" + ((double)cmp.Count / N) + " / item)"); - } - - [Test] - [Category("LongRunning")] - public void Test_MiniBench() - { - const int N = 10 * 1000 * 1000; - - var rnd = new Random(); - long x; - - - //WARMUP - var store = new ColaOrderedSet(); - store.Add(1); - store.Add(42); - store.Add(1234); - store.TryGetValue(42, out x); - store.TryGetValue(404, out x); - - #region Sequentially inserted.... - - Console.WriteLine("Inserting " + N.ToString("N0") + " sequential key/value pairs into a COLA ordered set"); - GC.Collect(); - store = new ColaOrderedSet(); - long total = 0; - var sw = Stopwatch.StartNew(); - for (int i = 0; i < N; i++) - { - store.Add(i); - Interlocked.Increment(ref total); - if (i % (N / 10) == 0) Console.Write("."); - } - sw.Stop(); - - Console.WriteLine("done"); - Console.WriteLine("* Inserted: " + total.ToString("N0") + " keys"); - Console.WriteLine("* Elapsed : " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec"); - Console.WriteLine("* KPS: " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " key/sec"); - Console.WriteLine("* Latency : " + (sw.Elapsed.TotalMilliseconds * 1000000 / total).ToString("N1") + " nanos / insert"); - - // sequential reads - - sw.Restart(); - for (int i = 0; i < total; i++) - { - var _ = store.TryGetValue(i, out x); - if (!_ || x != i) Assert.Fail(); - } - sw.Stop(); - Console.WriteLine("SeqReadOrdered: " + total.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec => " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " kps"); - - // random reads - - sw.Restart(); - for (int i = 0; i < total; i++) - { - var _ = store.TryGetValue(rnd.Next(N), out x); - if (!_) Assert.Fail(); - } - sw.Stop(); - Console.WriteLine("RndReadOrdered: " + total.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec => " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " kps"); - - #endregion - - #region Randomly inserted.... - - Console.WriteLine("(preparing random insert list)"); - - var tmp = new long[N]; - var values = new long[N]; - for (int i = 0; i < N; i++) - { - tmp[i] = rnd.Next(N); - values[i] = i; - } - Array.Sort(tmp, values); - - Console.WriteLine("Inserting " + N.ToString("N0") + " sequential keys into a COLA store"); - GC.Collect(); - store = new ColaOrderedSet(); - total = 0; - sw.Restart(); - for (int i = 0; i < N; i++) - { - store.Add(values[i]); - Interlocked.Increment(ref total); - if (i % (N / 10) == 0) Console.Write("."); - } - sw.Stop(); - - Console.WriteLine("done"); - Console.WriteLine("* Inserted: " + total.ToString("N0") + " keys"); - Console.WriteLine("* Elapsed : " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec"); - Console.WriteLine("* KPS : " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " key/sec"); - Console.WriteLine("* Latency : " + (sw.Elapsed.TotalMilliseconds * 1000000 / total).ToString("N1") + " nanos / insert"); - - - // sequential reads - - sw.Restart(); - for (int i = 0; i < total; i++) - { - var _ = store.TryGetValue(i, out x); - if (!_ || x != i) Assert.Fail(); - } - sw.Stop(); - Console.WriteLine("SeqReadUnordered: " + total.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec => " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " kps"); - - // random reads - - sw.Restart(); - for (int i = 0; i < total; i++) - { - var _ = store.TryGetValue(rnd.Next(N), out x); - if (!_) Assert.Fail(); - } - sw.Stop(); - Console.WriteLine("RndReadUnordered: " + total.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec => " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " kps"); - - #endregion - - } - - } -} diff --git a/FoundationDB.Storage.Memory.Test/Collections/ColaRangeDictionaryFacts.cs b/FoundationDB.Storage.Memory.Test/Collections/ColaRangeDictionaryFacts.cs deleted file mode 100644 index 7b080a1ff..000000000 --- a/FoundationDB.Storage.Memory.Test/Collections/ColaRangeDictionaryFacts.cs +++ /dev/null @@ -1,518 +0,0 @@ -using FoundationDB.Client; -using FoundationDB.Layers.Tuples; -using NUnit.Framework; -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Globalization; -using System.Linq; -using System.Text; -using System.Threading; -using System.Threading.Tasks; - -namespace FoundationDB.Storage.Memory.Core.Test -{ - [TestFixture] - public class ColaRangeDictionaryFacts - { - - [Test] - public void Test_Empty_RangeDictionary() - { - var cola = new ColaRangeDictionary(0, Comparer.Default, StringComparer.Ordinal); - Assert.That(cola.Count, Is.EqualTo(0)); - Assert.That(cola.KeyComparer, Is.SameAs(Comparer.Default)); - Assert.That(cola.ValueComparer, Is.SameAs(StringComparer.Ordinal)); - Assert.That(cola.Capacity, Is.EqualTo(15), "Capacity should be the next power of 2, minus 1"); - Assert.That(cola.Bounds, Is.Not.Null); - Assert.That(cola.Bounds.Begin, Is.EqualTo(0)); - Assert.That(cola.Bounds.End, Is.EqualTo(0)); - } - - [Test] - public void Test_RangeDictionary_Insert_Single() - { - var cola = new ColaRangeDictionary(); - Assert.That(cola.Count, Is.EqualTo(0)); - - cola.Mark(0, 1, "A"); - Assert.That(cola.Count, Is.EqualTo(1)); - - var items = cola.ToArray(); - Assert.That(items.Length, Is.EqualTo(1)); - Assert.That(items[0].Begin, Is.EqualTo(0)); - Assert.That(items[0].End, Is.EqualTo(1)); - Assert.That(items[0].Value, Is.EqualTo("A")); - } - - [Test] - public void Test_RangeDictionary_Insert_In_Order_Non_Overlapping() - { - var cola = new ColaRangeDictionary(); - Assert.That(cola.Count, Is.EqualTo(0)); - - cola.Mark(0, 1, "A"); - Console.WriteLine("FIRST = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(1)); - - cola.Mark(2, 3, "B"); - Console.WriteLine("SECOND = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(2)); - - cola.Mark(4, 5, "C"); - Console.WriteLine("THIRD = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - cola.Debug_Dump(); - - Assert.That(cola.Count, Is.EqualTo(3)); - var runs = cola.ToArray(); - Assert.That(runs.Length, Is.EqualTo(3)); - - Assert.That(runs[0].Begin, Is.EqualTo(0)); - Assert.That(runs[0].End, Is.EqualTo(1)); - Assert.That(runs[0].Value, Is.EqualTo("A")); - - Assert.That(runs[1].Begin, Is.EqualTo(2)); - Assert.That(runs[1].End, Is.EqualTo(3)); - Assert.That(runs[1].Value, Is.EqualTo("B")); - - Assert.That(runs[2].Begin, Is.EqualTo(4)); - Assert.That(runs[2].End, Is.EqualTo(5)); - Assert.That(runs[2].Value, Is.EqualTo("C")); - - Assert.That(cola.Bounds.Begin, Is.EqualTo(0)); - Assert.That(cola.Bounds.End, Is.EqualTo(5)); - } - - [Test] - public void Test_RangeDictionary_Insert_Out_Of_Order_Non_Overlapping() - { - var cola = new ColaRangeDictionary(); - Assert.That(cola.Count, Is.EqualTo(0)); - - cola.Mark(0, 1, "A"); - Console.WriteLine("FIRST = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(1)); - - cola.Mark(4, 5, "B"); - Console.WriteLine("SECOND = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(2)); - - cola.Mark(2, 3, "C"); - Console.WriteLine("THIRD = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - cola.Debug_Dump(); - - Assert.That(cola.Count, Is.EqualTo(3)); - var runs = cola.ToArray(); - Assert.That(runs.Length, Is.EqualTo(3)); - - Assert.That(runs[0].Begin, Is.EqualTo(0)); - Assert.That(runs[0].End, Is.EqualTo(1)); - Assert.That(runs[0].Value, Is.EqualTo("A")); - - Assert.That(runs[1].Begin, Is.EqualTo(2)); - Assert.That(runs[1].End, Is.EqualTo(3)); - Assert.That(runs[1].Value, Is.EqualTo("C")); - - Assert.That(runs[2].Begin, Is.EqualTo(4)); - Assert.That(runs[2].End, Is.EqualTo(5)); - Assert.That(runs[2].Value, Is.EqualTo("B")); - - Assert.That(cola.Bounds.Begin, Is.EqualTo(0)); - Assert.That(cola.Bounds.End, Is.EqualTo(5)); - - } - [Test] - public void Test_RangeDictionary_Insert_Partially_Overlapping() - { - var cola = new ColaRangeDictionary(); - Assert.That(cola.Count, Is.EqualTo(0)); - - cola.Mark(0, 1, "A"); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(1)); - - cola.Mark(0, 2, "B"); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(1)); - - cola.Mark(1, 3, "C"); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(2)); - - cola.Mark(-1, 2, "D"); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(2)); - - Console.WriteLine("Result = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - } - - [Test] - public void Test_RangeDictionary_Insert_Completly_Overlapping() - { - var cola = new ColaRangeDictionary(); - cola.Mark(4, 5, "A"); - Console.WriteLine("BEFORE = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(1)); - Assert.That(cola.Bounds.Begin, Is.EqualTo(4)); - Assert.That(cola.Bounds.End, Is.EqualTo(5)); - - // overlaps all the ranges at once - // 0123456789 0123456789 0123456789 - // ____A_____ + BBBBBBBBBB = BBBBBBBBBB - cola.Mark(0, 10, "B"); - Console.WriteLine("AFTER = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - cola.Debug_Dump(); - - Assert.That(cola.Count, Is.EqualTo(1)); - var runs = cola.ToArray(); - Assert.That(runs.Length, Is.EqualTo(1)); - Assert.That(runs[0].Begin, Is.EqualTo(0)); - Assert.That(runs[0].End, Is.EqualTo(10)); - Assert.That(runs[0].Value, Is.EqualTo("B")); - - Assert.That(cola.Bounds.Begin, Is.EqualTo(0)); - Assert.That(cola.Bounds.End, Is.EqualTo(10)); - } - - [Test] - public void Test_RangeDictionary_Insert_Contained() - { - var cola = new ColaRangeDictionary(); - cola.Mark(0, 10, "A"); - Console.WriteLine("BEFORE = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(1)); - Assert.That(cola.Bounds.Begin, Is.EqualTo(0)); - Assert.That(cola.Bounds.End, Is.EqualTo(10)); - - // overlaps all the ranges at once - - // 0123456789 0123456789 0123456789 - // AAAAAAAAAA + ____B_____ = AAAABAAAAA - cola.Mark(4, 5, "B"); - Console.WriteLine("AFTER = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(3)); - var items = cola.ToArray(); - Assert.That(items.Length, Is.EqualTo(3)); - - Assert.That(items[0].Begin, Is.EqualTo(0)); - Assert.That(items[0].End, Is.EqualTo(4)); - Assert.That(items[0].Value, Is.EqualTo("A")); - - Assert.That(items[1].Begin, Is.EqualTo(4)); - Assert.That(items[1].End, Is.EqualTo(5)); - Assert.That(items[1].Value, Is.EqualTo("B")); - - Assert.That(items[2].Begin, Is.EqualTo(5)); - Assert.That(items[2].End, Is.EqualTo(10)); - Assert.That(items[2].Value, Is.EqualTo("A")); - - Assert.That(cola.Bounds.Begin, Is.EqualTo(0)); - Assert.That(cola.Bounds.End, Is.EqualTo(10)); - } - - [Test] - public void Test_RangeDictionary_Insert_That_Fits_Between_Two_Ranges() - { - var cola = new ColaRangeDictionary(); - cola.Mark(0, 1, "A"); - cola.Mark(2, 3, "B"); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(2)); - - cola.Mark(1, 2, "C"); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(3)); - - Console.WriteLine("Result = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - - var items = cola.ToArray(); - Assert.That(items.Length, Is.EqualTo(3)); - - Assert.That(items[0].Begin, Is.EqualTo(0)); - Assert.That(items[0].End, Is.EqualTo(1)); - Assert.That(items[0].Value, Is.EqualTo("A")); - - Assert.That(items[1].Begin, Is.EqualTo(1)); - Assert.That(items[1].End, Is.EqualTo(2)); - Assert.That(items[1].Value, Is.EqualTo("C")); - - Assert.That(items[2].Begin, Is.EqualTo(2)); - Assert.That(items[2].End, Is.EqualTo(3)); - Assert.That(items[2].Value, Is.EqualTo("B")); - - } - - [Test] - public void Test_RangeDictionary_Insert_That_Join_Two_Ranges() - { - var cola = new ColaRangeDictionary(); - cola.Mark(0, 1, "A"); - cola.Mark(2, 3, "A"); - Console.WriteLine("BEFORE = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(2)); - - // A_A_ + _A__ = AAA_ - cola.Mark(1, 2, "A"); - Console.WriteLine("AFTER = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - cola.Debug_Dump(); - - Assert.That(cola.Count, Is.EqualTo(1)); - var runs = cola.ToArray(); - Assert.That(runs[0].Begin, Is.EqualTo(0)); - Assert.That(runs[0].End, Is.EqualTo(3)); - Assert.That(runs[0].Value, Is.EqualTo("A")); - - } - - [Test] - public void Test_RangeDictionary_Insert_That_Replace_All_Ranges() - { - var cola = new ColaRangeDictionary(); - cola.Mark(0, 1, "A"); - cola.Mark(2, 3, "A"); - cola.Mark(4, 5, "A"); - cola.Mark(6, 7, "A"); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(4)); - Assert.That(cola.Bounds.Begin, Is.EqualTo(0)); - Assert.That(cola.Bounds.End, Is.EqualTo(7)); - - cola.Mark(-1, 10, "B"); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(1)); - Assert.That(cola.Bounds.Begin, Is.EqualTo(-1)); - Assert.That(cola.Bounds.End, Is.EqualTo(10)); - - Console.WriteLine("Result = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - } - - [Test] - public void Test_RangeDictionary_Insert_Backwards() - { - const int N = 100; - - var cola = new ColaRangeDictionary(); - - for(int i = N; i > 0; i--) - { - int x = i << 1; - cola.Mark(x - 1, x, i.ToString()); - } - - Assert.That(cola.Count, Is.EqualTo(N)); - - Console.WriteLine("Result = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - } - - enum RangeColor - { - Black, - White - } - - [Test] - public void Test_RangeDictionary_Black_And_White() - { - // we have a space from 0 <= x < 100 that is empty - // we insert a random serie of ranges that are either Black or White - // after each run, we check that all ranges are correctly ordered, merged, and so on. - - const int S = 100; // [0, 100) - const int N = 1000; // number of repetitions - const int K = 25; // max number of ranges inserted per run - - var rnd = new Random(); - int seed = rnd.Next(); - Console.WriteLine("Using random seed " + seed); - rnd = new Random(seed); - - for(int i = 0; i< N; i++) - { - var cola = new ColaRangeDictionary(); - - var witnessColors = new RangeColor?[S]; - var witnessIndexes = new int?[S]; - - // choose a random number of ranges - int k = rnd.Next(3, K); - - Trace.WriteLine(""); - Trace.WriteLine(String.Format("# Starting run {0} with {1} insertions", i, k)); - - int p = 0; - for(int j = 0; j |{0}|", String.Join("", witnessIndexes.Select(x => x.HasValue ? (char)('A' + x.Value) : ' '))); - Debug.WriteLine(" |{0}| + [{1,2}, {2,2}) = {3} > #{4,2} [ {5} ]", String.Join("", witnessColors.Select(w => !w.HasValue ? ' ' : w.Value == RangeColor.Black ? '#' : '°')), begin, end, color, cola.Count, String.Join(", ", cola)); - - ++p; - } - - // pack the witness list into ranges - var witnessRanges = new List>(); - RangeColor? prev = null; - p = 0; - for (int z = 1; z < S;z++) - { - if (witnessColors[z] != prev) - { // switch - - if (prev.HasValue) - { - witnessRanges.Add(FdbTuple.Create(p, z, prev.Value)); - } - p = z; - prev = witnessColors[z]; - } - } - - Trace.WriteLine(String.Format("> RANGES: #{0,2} [ {1} ]", cola.Count, String.Join(", ", cola))); - Trace.WriteLine(String.Format(" #{0,2} [ {1} ]", witnessRanges.Count, String.Join(", ", witnessRanges))); - - var counter = new int[S]; - var observedIndexes = new int?[S]; - var observedColors = new RangeColor?[S]; - p = 0; - foreach(var range in cola) - { - Assert.That(range.Begin < range.End, "Begin < End {0}", range); - for (int z = range.Begin; z < range.End; z++) - { - observedIndexes[z] = p; - counter[z]++; - observedColors[z] = range.Value; - } - ++p; - } - - Trace.WriteLine(String.Format("> INDEXS: |{0}|", String.Join("", observedIndexes.Select(x => x.HasValue ? (char)('A' + x.Value) : ' ')))); - Trace.WriteLine(String.Format(" |{0}|", String.Join("", witnessIndexes.Select(x => x.HasValue ? (char)('A' + x.Value) : ' ')))); - - Trace.WriteLine(String.Format("> COLORS: |{0}|", String.Join("", observedColors.Select(w => !w.HasValue ? ' ' : w.Value == RangeColor.Black ? '#' : '°')))); - Trace.WriteLine(String.Format(" |{0}|", String.Join("", witnessColors.Select(w => !w.HasValue ? ' ' : w.Value == RangeColor.Black ? '#' : '°')))); - - // verify the colors - foreach(var range in cola) - { - for (int z = range.Begin; z < range.End; z++) - { - Assert.That(range.Value, Is.EqualTo(witnessColors[z]), "#{0} color mismatch for {1}", z, range); - Assert.That(counter[z], Is.EqualTo(1), "Duplicate at offset #{0} for {1}", z, range); - } - } - - // verify that nothing was missed - for(int z = 0; z < S; z++) - { - if (witnessColors[z] == null) - { - if (counter[z] != 0) Trace.WriteLine("@ FAIL!!! |" + new string('-', z) + "^"); - Assert.That(counter[z], Is.EqualTo(0), "Should be void at offset {0}", z); - } - else - { - if (counter[z] != 1) Trace.WriteLine("@ FAIL!!! |" + new string('-', z) + "^"); - Assert.That(counter[z], Is.EqualTo(1), "Should be filled with {1} at offset {0}", z, witnessColors[z]); - } - } - } - } - - [Test] - public void Test_RangeDictionary_Insert_Random_Ranges() - { - const int N = 1000; - const int K = 1000 * 1000; - - var cola = new ColaRangeDictionary(); - - var rnd = new Random(); - int seed = 2040305906; // rnd.Next(); - Console.WriteLine("seed " + seed); - rnd = new Random(seed); - - int[] expected = new int[N]; - - var sw = Stopwatch.StartNew(); - for (int i = 0; i < K; i++) - { - if (rnd.Next(10000) < 42) - { - //Console.WriteLine("Clear"); - cola.Clear(); - } - else - { - - int x = rnd.Next(N); - int y = rnd.Next(2) == 0 ? x + 1 : rnd.Next(N); - if (y == x) ++y; - if (x <= y) - { - //Console.WriteLine(); - //Console.WriteLine("Add " + x + " ~ " + y + " = " + i); - cola.Mark(x, y, i); - } - else - { - //Console.WriteLine(); - //Console.WriteLine("ddA " + y + " ~ " + x + " = " + i); - cola.Mark(y, x, i); - } - } - //Console.WriteLine(" = " + cola + " -- <> = " + cola.Bounds); - //cola.Debug_Dump(); - - } - sw.Stop(); - - Console.WriteLine("Inserted " + K.ToString("N0") + " random ranges in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec"); - cola.Debug_Dump(); - - Console.WriteLine("Result = " + cola); - Console.WriteLine("Bounds = " + cola.Bounds); - - } - } -} diff --git a/FoundationDB.Storage.Memory.Test/Collections/ColaRangeSetFacts.cs b/FoundationDB.Storage.Memory.Test/Collections/ColaRangeSetFacts.cs deleted file mode 100644 index 4d0f2cdb8..000000000 --- a/FoundationDB.Storage.Memory.Test/Collections/ColaRangeSetFacts.cs +++ /dev/null @@ -1,163 +0,0 @@ -using FoundationDB.Client; -using FoundationDB.Layers.Tuples; -using NUnit.Framework; -using System; -using System.Collections.Generic; -using System.Globalization; -using System.Linq; -using System.Text; -using System.Threading; -using System.Threading.Tasks; - -namespace FoundationDB.Storage.Memory.Core.Test -{ - [TestFixture] - public class ColaRangeSetFacts - { - - [Test] - public void Test_Empty_RangeSet() - { - var cola = new ColaRangeSet(0, Comparer.Default); - Assert.That(cola.Count, Is.EqualTo(0)); - Assert.That(cola.Comparer, Is.SameAs(Comparer.Default)); - Assert.That(cola.Capacity, Is.EqualTo(15), "Capacity should be the next power of 2, minus 1"); - Assert.That(cola.Bounds, Is.Not.Null); - Assert.That(cola.Bounds.Begin, Is.EqualTo(0)); - Assert.That(cola.Bounds.End, Is.EqualTo(0)); - } - - [Test] - public void Test_RangeSet_Insert_Non_Overlapping() - { - var cola = new ColaRangeSet(); - Assert.That(cola.Count, Is.EqualTo(0)); - - cola.Mark(0, 1); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(1)); - - cola.Mark(2, 3); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(2)); - - cola.Mark(4, 5); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(3)); - - Assert.That(cola.Bounds.Begin, Is.EqualTo(0)); - Assert.That(cola.Bounds.End, Is.EqualTo(5)); - - Console.WriteLine("Result = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - } - - [Test] - public void Test_RangeSet_Insert_Partially_Overlapping() - { - var cola = new ColaRangeSet(); - Assert.That(cola.Count, Is.EqualTo(0)); - - cola.Mark(0, 1); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(1)); - - cola.Mark(0, 2); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(1)); - - cola.Mark(1, 3); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(1)); - - cola.Mark(-1, 2); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(1)); - - Console.WriteLine("Result = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - } - - [Test] - public void Test_RangeSet_Insert_Completly_Overlapping() - { - var cola = new ColaRangeSet(); - cola.Mark(1, 2); - cola.Mark(4, 5); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(2)); - Assert.That(cola.Bounds.Begin, Is.EqualTo(1)); - Assert.That(cola.Bounds.End, Is.EqualTo(5)); - - // overlaps the first range completely - cola.Mark(0, 3); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(2)); - Assert.That(cola.Bounds.Begin, Is.EqualTo(0)); - Assert.That(cola.Bounds.End, Is.EqualTo(5)); - - Console.WriteLine("Result = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - - } - - [Test] - public void Test_RangeSet_Insert_That_Join_Two_Ranges() - { - var cola = new ColaRangeSet(); - cola.Mark(0, 1); - cola.Mark(2, 3); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(2)); - - cola.Mark(1, 2); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(1)); - - Console.WriteLine("Result = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - } - - [Test] - public void Test_RangeSet_Insert_That_Replace_All_Ranges() - { - var cola = new ColaRangeSet(); - cola.Mark(0, 1); - cola.Mark(2, 3); - cola.Mark(4, 5); - cola.Mark(6, 7); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(4)); - Assert.That(cola.Bounds.Begin, Is.EqualTo(0)); - Assert.That(cola.Bounds.End, Is.EqualTo(7)); - - cola.Mark(-1, 10); - cola.Debug_Dump(); - Assert.That(cola.Count, Is.EqualTo(1)); - Assert.That(cola.Bounds.Begin, Is.EqualTo(-1)); - Assert.That(cola.Bounds.End, Is.EqualTo(10)); - - Console.WriteLine("Result = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - } - - [Test] - public void Test_RangeSet_Insert_Backwards() - { - const int N = 100; - - var cola = new ColaRangeSet(); - - for(int i = N; i > 0; i--) - { - int x = i << 1; - cola.Mark(x - 1, x); - } - - Assert.That(cola.Count, Is.EqualTo(N)); - - Console.WriteLine("Result = { " + String.Join(", ", cola) + " }"); - Console.WriteLine("Bounds = " + cola.Bounds); - } - } -} diff --git a/FoundationDB.Storage.Memory.Test/Collections/ColaStoreFacts.cs b/FoundationDB.Storage.Memory.Test/Collections/ColaStoreFacts.cs deleted file mode 100644 index cf7b89763..000000000 --- a/FoundationDB.Storage.Memory.Test/Collections/ColaStoreFacts.cs +++ /dev/null @@ -1,462 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Core.Test -{ - using NUnit.Framework; - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Linq; - using System.Text; - using System.Threading; - using System.Threading.Tasks; - - [TestFixture] - public class ColaStoreFacts - { - [Test] - public void Test_Bit_Twiddling() - { - Assert.That(ColaStore.LowestBit(0), Is.EqualTo(0)); - Assert.That(ColaStore.HighestBit(0), Is.EqualTo(0)); - - Assert.That(ColaStore.LowestBit(42), Is.EqualTo(1)); - Assert.That(ColaStore.HighestBit(42), Is.EqualTo(5)); - - for (int i = 1; i < 30; i++) - { - int x = 1 << i; - Assert.That(ColaStore.LowestBit(x), Is.EqualTo(i)); - Assert.That(ColaStore.HighestBit(x), Is.EqualTo(i)); - - Assert.That(ColaStore.HighestBit(x - 1), Is.EqualTo(i - 1)); - Assert.That(ColaStore.LowestBit(x - 1), Is.EqualTo(0)); - } - - Assert.That(ColaStore.LowestBit(0x60000000), Is.EqualTo(29)); - for (int i = 1; i < 30; i++) - { - int x = int.MaxValue - ((1 << i) - 1); - Assert.That(ColaStore.LowestBit(x), Is.EqualTo(i), "i={0}, x={1} : {2}", i, x.ToString("X8"), Convert.ToString(x, 2)); - } - } - - [Test] - public void Test_Map_Index_To_Address() - { - // index => (level, offset) - - int level, offset; - for (int i = 0; i < 1024; i++) - { - level = ColaStore.FromIndex(i, out offset); - Assert.That(((1 << level) - 1) + offset, Is.EqualTo(i), "{0} => ({1}, {2})", i, level, offset); - } - } - - [Test] - public void Test_Map_Address_Index() - { - // index => (level, offset) - - for (int level = 0; level <= 10; level++) - { - int n = 1 << level; - for (int offset = 0; offset < n; offset++) - { - int index = ColaStore.ToIndex(level, offset); - Assert.That(index, Is.EqualTo(n - 1 + offset), "({0}, {1}) => {2}", level, offset, index); - } - } - } - - [Test] - public void Test_Map_Offset_To_Index() - { - //N = 1 - // > 0 [0] - Assert.That(ColaStore.MapOffsetToIndex(1, 0), Is.EqualTo(0)); - Assert.That(() => ColaStore.MapOffsetToIndex(1, 1), Throws.InstanceOf()); - - //N = 2 - // > 0 [_] - // > 1 [0, 1] - Assert.That(ColaStore.MapOffsetToIndex(2, 0), Is.EqualTo(1)); - Assert.That(ColaStore.MapOffsetToIndex(2, 1), Is.EqualTo(2)); - Assert.That(() => ColaStore.MapOffsetToIndex(2, 2), Throws.InstanceOf()); - - //N = 3 - // > 0 [2] - // > 1 [0, 1] - Assert.That(ColaStore.MapOffsetToIndex(3, 0), Is.EqualTo(1)); - Assert.That(ColaStore.MapOffsetToIndex(3, 1), Is.EqualTo(2)); - Assert.That(ColaStore.MapOffsetToIndex(3, 2), Is.EqualTo(0)); - Assert.That(() => ColaStore.MapOffsetToIndex(3, 3), Throws.InstanceOf()); - - //N = 5 - // > 0 [4] - // > 1 [_, _] - // > 2 [0, 1, 2, 3] - Assert.That(ColaStore.MapOffsetToIndex(5, 0), Is.EqualTo(3)); - Assert.That(ColaStore.MapOffsetToIndex(5, 1), Is.EqualTo(4)); - Assert.That(ColaStore.MapOffsetToIndex(5, 2), Is.EqualTo(5)); - Assert.That(ColaStore.MapOffsetToIndex(5, 3), Is.EqualTo(6)); - Assert.That(ColaStore.MapOffsetToIndex(5, 4), Is.EqualTo(0)); - Assert.That(() => ColaStore.MapOffsetToIndex(5, 5), Throws.InstanceOf()); - - // N = 10 - // > 0 [_] - // > 1 [8,9] - // > 2 [_,_,_,_] - // > 3 [0,1,2,3,4,5,6,7] - for (int i = 0; i < 8; i++) Assert.That(ColaStore.MapOffsetToIndex(10, i), Is.EqualTo(7 + i), "MapOffset(10, {0})", i); - for (int i = 8; i < 10; i++) Assert.That(ColaStore.MapOffsetToIndex(10, i), Is.EqualTo(1 + (i - 8)), "MapOffset(10, {0})", i); - Assert.That(() => ColaStore.MapOffsetToIndex(10, 123), Throws.InstanceOf()); - } - - [Test] - public void Test_ColaStore_Iterator_Seek() - { - - var store = new ColaStore(0, Comparer.Default); - - for (int i = 0; i < 10; i++) - { - store.Insert(i); - } - store.Debug_Dump(); - - var iterator = store.GetIterator(); - - Assert.That(iterator.Seek(5, true), Is.True); - Assert.That(iterator.Current, Is.EqualTo(5)); - - Assert.That(iterator.Seek(5, false), Is.True); - Assert.That(iterator.Current, Is.EqualTo(4)); - - Assert.That(iterator.Seek(9, true), Is.True); - Assert.That(iterator.Current, Is.EqualTo(9)); - - Assert.That(iterator.Seek(9, false), Is.True); - Assert.That(iterator.Current, Is.EqualTo(8)); - - Assert.That(iterator.Seek(0, true), Is.True); - Assert.That(iterator.Current, Is.EqualTo(0)); - - Assert.That(iterator.Seek(0, false), Is.False); - Assert.That(iterator.Current, Is.Null); - - Assert.That(iterator.Seek(10, true), Is.True); - Assert.That(iterator.Current, Is.EqualTo(9)); - - Assert.That(iterator.Seek(10, false), Is.True); - Assert.That(iterator.Current, Is.EqualTo(9)); - - } - - [Test] - public void Test_ColaStore_Iterator_Seek_Randomized() - { - const int N = 1000; - - var store = new ColaStore(0, Comparer.Default); - - var rnd = new Random(); - int seed = rnd.Next(); - Console.WriteLine("seed = " + seed); - rnd = new Random(seed); - - var list = Enumerable.Range(0, N).ToList(); - while(list.Count > 0) - { - int p = rnd.Next(list.Count); - store.Insert(list[p]); - list.RemoveAt(p); - } - store.Debug_Dump(); - - for (int i = 0; i < N; i++) - { - var iterator = store.GetIterator(); - - int p = rnd.Next(N); - bool orEqual = rnd.Next(2) == 0; - - bool res = iterator.Seek(p, orEqual); - - if (orEqual) - { // the key should exist - Assert.That(res, Is.True, "Seek({0}, '<=')", p); - Assert.That(iterator.Current, Is.EqualTo(p), "Seek({0}, '<=')", p); - Assert.That(iterator.Valid, Is.True, "Seek({0}, '<=')", p); - } - else if (p == 0) - { // there is no key before the first - Assert.That(res, Is.False, "Seek(0, '<')"); - Assert.That(iterator.Current, Is.Null, "Seek(0, '<')"); - Assert.That(iterator.Valid, Is.False, "Seek(0, '<')"); - } - else - { // the key should exist - Assert.That(res, Is.True, "Seek({0}, '<')", p); - Assert.That(iterator.Current, Is.EqualTo(p - 1), "Seek({0}, '<')", p); - Assert.That(iterator.Valid, Is.True, "Seek({0}, '<')", p); - } - } - - } - - [Test] - public void Test_ColaStore_Iterator_Seek_Then_Next_Randomized() - { - const int N = 1000; - const int K = 10; - - var store = new ColaStore(0, Comparer.Default); - - var rnd = new Random(); - int seed = rnd.Next(); - Console.WriteLine("seed = " + seed); - rnd = new Random(seed); - - var list = Enumerable.Range(0, N).ToList(); - while (list.Count > 0) - { - int p = rnd.Next(list.Count); - store.Insert(list[p]); - list.RemoveAt(p); - } - store.Debug_Dump(); - - for (int i = 0; i < N; i++) - { - var iterator = store.GetIterator(); - - int p = rnd.Next(N); - bool orEqual = rnd.Next(2) == 0; - - if (p == 0 && !orEqual) continue; //TODO: what to do for this case ? - - Assert.That(iterator.Seek(p, orEqual), Is.True); - int? x = iterator.Current; - Assert.That(x, Is.EqualTo(orEqual ? p : p - 1)); - - // all the next should be ordered (starting from p) - while (x < N - 1) - { - Assert.That(iterator.Next(), Is.True, "Seek({0}).Current({1}).Next()", p, x); - Assert.That(iterator.Current, Is.EqualTo(x + 1), "Seek({0}).Current({1}).Next()", p, x); - ++x; - } - // the following Next() should go past the end - Assert.That(iterator.Next(), Is.False); - Assert.That(iterator.Current, Is.Null); - Assert.That(iterator.Valid, Is.False); - - // re-seek to the original location - Assert.That(iterator.Seek(p, orEqual), Is.True); - x = iterator.Current; - Assert.That(x, Is.EqualTo(orEqual ? p : p - 1)); - - // now go backwards - while (x > 0) - { - Assert.That(iterator.Previous(), Is.True, "Seek({0}).Current({1}).Previous()", p, x); - Assert.That(iterator.Current, Is.EqualTo(x - 1), "Seek({0}).Current({1}).Previous()", p, x); - --x; - } - // the following Previous() should go past the beginning - Assert.That(iterator.Previous(), Is.False); - Assert.That(iterator.Current, Is.Null); - Assert.That(iterator.Valid, Is.False); - - if (p >= K && p < N - K) - { // jitter dance - - // start to original location - Assert.That(iterator.Seek(p, true), Is.True); - Assert.That(iterator.Current, Is.EqualTo(p)); - - var sb = new StringBuilder(); - sb.Append("Seek -> "); - for(int j = 0; j < K; j++) - { - x = iterator.Current; - sb.Append(iterator.Current); - if (rnd.Next(2) == 0) - { // next - sb.Append(" -> "); - Assert.That(iterator.Next(), Is.True, "{0}", sb); - Assert.That(iterator.Current, Is.EqualTo(x + 1), "{0} = ?", sb); - } - else - { // prev - sb.Append(" <- "); - Assert.That(iterator.Previous(), Is.True, "{0}", sb); - Assert.That(iterator.Current, Is.EqualTo(x - 1), "{0} = ?", sb); - } - } - } - - } - - } - - [Test] - [Category("LongRunning")] - public void Test_MiniBench() - { - const int N = (1 << 23) - 1; // 10 * 1000 * 1000; - - var rnd = new Random(); - int offset, level; - long x; - - - //WARMUP - var store = new ColaStore(0, Comparer.Default); - store.Insert(1); - store.Insert(42); - store.Insert(1234); - level = store.Find(1, out offset, out x); - - const int BS = (N + 1) / 128; - var timings = new List(BS); - timings.Add(TimeSpan.Zero); - timings.Clear(); - - #region Sequentially inserted.... - - Console.WriteLine("Inserting {0} sequential keys into a COLA store", N); - GC.Collect(); - store = new ColaStore(0, Comparer.Default); - long total = 0; - var sw = Stopwatch.StartNew(); - for (int i = 0; i < N; i++) - { - - int y = rnd.Next(100); - - level = store.Find(y, out offset, out x); - if (level < 0) store.Insert(i); - else store.SetAt(level, offset, x); - - Interlocked.Increment(ref total); - if ((i % BS) == BS - 1) - { - sw.Stop(); - timings.Add(sw.Elapsed); - Console.Write("."); - sw.Start(); - } - } - sw.Stop(); - - Console.WriteLine("done"); - Console.WriteLine("* Inserted: " + total.ToString("N0") + " keys"); - Console.WriteLine("* Elapsed : " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec"); - Console.WriteLine("* KPS: " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " key/sec"); - Console.WriteLine("* Latency : " + (sw.Elapsed.TotalMilliseconds * 1000000 / total).ToString("N1") + " nanos / insert"); - for (int i = 0; i < timings.Count; i++) - { - Console.WriteLine("" + ((i + 1) * BS).ToString() + "\t" + timings[i].TotalSeconds); - } - return; - // sequential reads - - sw.Restart(); - for (int i = 0; i < total; i++) - { - level = store.Find(i, out offset, out x); - } - sw.Stop(); - Console.WriteLine("SeqReadOrdered: " + total.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec => " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " kps"); - - // random reads - - sw.Restart(); - for (int i = 0; i < total; i++) - { - level = store.Find(rnd.Next(N), out offset, out x); - } - sw.Stop(); - Console.WriteLine("RndReadOrdered: " + total.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec => " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " kps"); - - #endregion - - #region Randomly inserted.... - - Console.WriteLine("(preparing random insert list)"); - - var tmp = new long[N]; - var values = new long[N]; - for (int i = 0; i < N; i++) - { - tmp[i] = rnd.Next(N); - values[i] = i; - } - Array.Sort(tmp, values); - - Console.WriteLine("Inserting " + N.ToString("N0") + " sequential keys into a COLA store"); - GC.Collect(); - store = new ColaStore(0, Comparer.Default); - total = 0; - - timings.Clear(); - - sw.Restart(); - for (int i = 0; i < N; i++) - { - level = store.Find(i, out offset, out x); - store.Insert(values[i]); - Interlocked.Increment(ref total); - if ((i % BS) == BS - 1) - { - sw.Stop(); - timings.Add(sw.Elapsed); - Console.Write("."); - sw.Start(); - } - } - sw.Stop(); - - Console.WriteLine("done"); - Console.WriteLine("* Inserted: " + total.ToString("N0") + " keys"); - Console.WriteLine("* Elapsed : " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec"); - Console.WriteLine("* KPS : " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " key/sec"); - Console.WriteLine("* Latency : " + (sw.Elapsed.TotalMilliseconds * 1000000 / total).ToString("N1") + " nanos / insert"); - - for (int i = 0; i < timings.Count;i++) - { - Console.WriteLine("" + ((i + 1) * BS).ToString() + "\t" + timings[i].TotalSeconds); - } - - // sequential reads - - sw.Restart(); - for (int i = 0; i < total; i++) - { - level = store.Find(i, out offset, out x); - } - sw.Stop(); - Console.WriteLine("SeqReadUnordered: " + total.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec => " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " kps"); - - // random reads - - sw.Restart(); - for (int i = 0; i < total; i++) - { - level = store.Find(rnd.Next(N), out offset, out x); - } - sw.Stop(); - Console.WriteLine("RndReadUnordered: " + total.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec => " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " kps"); - - #endregion - - } - - } - -} diff --git a/FoundationDB.Storage.Memory.Test/Collections/CountingComparer`1.cs b/FoundationDB.Storage.Memory.Test/Collections/CountingComparer`1.cs deleted file mode 100644 index 31e178267..000000000 --- a/FoundationDB.Storage.Memory.Test/Collections/CountingComparer`1.cs +++ /dev/null @@ -1,42 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Core.Test -{ - using System; - using System.Collections.Generic; - using System.Threading; - - /// Wrapper for an that counts the number of calls to the method - public class CountingComparer : IComparer - { - - private int m_count; - private IComparer m_comparer; - - - public CountingComparer() - : this(Comparer.Default) - { } - - public CountingComparer(IComparer comparer) - { - m_comparer = comparer; - } - - public int Count { get { return Volatile.Read(ref m_count); } } - - public void Reset() - { - Volatile.Write(ref m_count, 0); - } - - public int Compare(T x, T y) - { - Interlocked.Increment(ref m_count); - return m_comparer.Compare(x, y); - } - } - -} diff --git a/FoundationDB.Storage.Memory.Test/FdbTest.cs b/FoundationDB.Storage.Memory.Test/FdbTest.cs deleted file mode 100644 index 48b38ddcc..000000000 --- a/FoundationDB.Storage.Memory.Test/FdbTest.cs +++ /dev/null @@ -1,141 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013, Doxense SARL -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Storage.Memory.Tests -{ - using FoundationDB.Client; - using FoundationDB.Layers.Directories; - using NUnit.Framework; - using System; - using System.Diagnostics; - using System.Globalization; - using System.Threading; - using System.Threading.Tasks; - - public abstract class FdbTest - { - - private CancellationTokenSource m_cts; - private CancellationToken m_ct; - - [TestFixtureSetUp] - protected void BeforeAllTests() - { - Trace.WriteLine("### " + this.GetType().FullName + " starting"); - //TODO? - } - - [SetUp] - protected void BeforeEachTest() - { - lock (this) - { - m_cts = null; - m_ct = CancellationToken.None; - } - Trace.WriteLine("=== " + TestContext.CurrentContext.Test.FullName + " === " + DateTime.Now.TimeOfDay); - } - - [TearDown] - protected void AfterEachTest() - { - if (m_cts != null) - { - try { m_cts.Cancel(); } catch { } - m_cts.Dispose(); - } - } - - [TestFixtureTearDown] - protected void AfterAllTests() - { - //TODO? - Trace.WriteLine("### " + this.GetType().FullName + " completed"); - } - - /// Cancellation token usable by any test - protected CancellationToken Cancellation - { - [DebuggerStepThrough] - get - { - if (m_cts == null) SetupCancellation(); - return m_ct; - } - } - - private void SetupCancellation() - { - lock (this) - { - if (m_cts == null) - { - m_cts = new CancellationTokenSource(); - m_ct = m_cts.Token; - } - } - } - - #region Logging... - - // These methods are just there to help with the problem of culture-aware string formatting - - [DebuggerStepThrough] - protected static void Log() - { - Console.WriteLine(); - } - - [DebuggerStepThrough] - protected static void Log(string text) - { - Console.WriteLine(text); - } - - [DebuggerStepThrough] - protected static void Log(string format, object arg0) - { - Console.WriteLine(String.Format(CultureInfo.InvariantCulture, format, arg0)); - } - - [DebuggerStepThrough] - protected static void Log(string format, object arg0, object arg1) - { - Console.WriteLine(String.Format(CultureInfo.InvariantCulture, format, arg0, arg1)); - } - - [DebuggerStepThrough] - protected static void Log(string format, params object[] args) - { - Console.WriteLine(String.Format(CultureInfo.InvariantCulture, format, args)); - } - - #endregion - - } -} diff --git a/FoundationDB.Storage.Memory.Test/FoundationDB.Storage.Memory.Test.csproj b/FoundationDB.Storage.Memory.Test/FoundationDB.Storage.Memory.Test.csproj deleted file mode 100644 index fddb13045..000000000 --- a/FoundationDB.Storage.Memory.Test/FoundationDB.Storage.Memory.Test.csproj +++ /dev/null @@ -1,97 +0,0 @@ - - - - - Debug - AnyCPU - {AF76A8D4-E682-4E72-B656-BE3D935712DB} - Exe - Properties - FoundationDB.Storage.Memory.Test - FoundationDB.Storage.Memory.Test - v4.5 - 512 - ..\ - true - - - AnyCPU - true - full - false - bin\Debug\ - DEBUG;TRACE - prompt - 4 - false - false - - - AnyCPU - pdbonly - true - bin\Release\ - TRACE - prompt - 4 - false - - - true - - - FoundationDB.Storage.Memory.Test.snk - - - - False - ..\packages\NUnit.2.6.4\lib\nunit.framework.dll - - - - - - - - - - - - - - - - - - - - - - - - - - - - {773166b7-de74-4fcc-845c-84080cc89533} - FoundationDB.Client - - - {7c7717d6-a1e7-4541-af8b-1ac762b5ed0f} - FoundationDB.Layers.Common - - - {cc98db39-31a1-4642-b4fc-9cb0ab26bf2e} - FoundationDB.Storage.Memory - - - - - - \ No newline at end of file diff --git a/FoundationDB.Storage.Memory.Test/FoundationDB.Storage.Memory.Test.snk b/FoundationDB.Storage.Memory.Test/FoundationDB.Storage.Memory.Test.snk deleted file mode 100644 index 5b29927c3..000000000 Binary files a/FoundationDB.Storage.Memory.Test/FoundationDB.Storage.Memory.Test.snk and /dev/null differ diff --git a/FoundationDB.Storage.Memory.Test/Program.cs b/FoundationDB.Storage.Memory.Test/Program.cs deleted file mode 100644 index 17de38680..000000000 --- a/FoundationDB.Storage.Memory.Test/Program.cs +++ /dev/null @@ -1,33 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory -{ - using System; - using FoundationDB.Storage.Memory.API.Tests; - using FoundationDB.Storage.Memory.Core.Test; - - public class Program - { - - public static void Main() - { - Console.WriteLine(IntPtr.Size == 4 ? "Running in 32-bit mode" : "Running in 64-bit mode"); - - try - { - //new ColaStoreFacts().Test_MiniBench(); - //new ColaOrderedSetFacts().Test_MiniBench(); - //new ColaOrderedDictionaryFacts().Test_MiniBench(); - //new SnapshotFacts().Test_Can_Save_And_Reload_Snapshot().GetAwaiter().GetResult(); - new Benchmarks().MiniBench().GetAwaiter().GetResult(); - } - catch(Exception e) - { - Console.Error.WriteLine(e); - } - } - - } -} diff --git a/FoundationDB.Storage.Memory.Test/Properties/AssemblyInfo.cs b/FoundationDB.Storage.Memory.Test/Properties/AssemblyInfo.cs deleted file mode 100644 index 259c78cc9..000000000 --- a/FoundationDB.Storage.Memory.Test/Properties/AssemblyInfo.cs +++ /dev/null @@ -1,36 +0,0 @@ -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; - -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -[assembly: AssemblyTitle("FoundationDB.Storage.Memory.Test")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("FoundationDB.Storage.Memory.Test")] -[assembly: AssemblyCopyright("Copyright © 2013")] -[assembly: AssemblyTrademark("")] -[assembly: AssemblyCulture("")] - -// Setting ComVisible to false makes the types in this assembly not visible -// to COM components. If you need to access a type in this assembly from -// COM, set the ComVisible attribute to true on that type. -[assembly: ComVisible(false)] - -// The following GUID is for the ID of the typelib if this project is exposed to COM -[assembly: Guid("f7827ce1-4e5d-4960-aaa6-fb8523cc783d")] - -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -// You can specify all the values or you can default the Build and Revision Numbers -// by using the '*' as shown below: -// [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("1.0.0.0")] -[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/FoundationDB.Storage.Memory.Test/Transactions/Benchmarks.cs b/FoundationDB.Storage.Memory.Test/Transactions/Benchmarks.cs deleted file mode 100644 index c4804a046..000000000 --- a/FoundationDB.Storage.Memory.Test/Transactions/Benchmarks.cs +++ /dev/null @@ -1,425 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.API.Tests -{ - using FoundationDB.Client; - using FoundationDB.Layers.Directories; - using FoundationDB.Layers.Indexing; - using FoundationDB.Linq; - using FoundationDB.Storage.Memory.Tests; - using NUnit.Framework; - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Linq; - using System.Threading; - using System.Threading.Tasks; - - [TestFixture] - [Category("LongRunning")] - public class Benchmarks : FdbTest - { - - private static void DumpResult(string label, long total, long trans, TimeSpan elapsed) - { - Log( - "{0,-12}: {1,10:N0} keys in {2,4:N3} sec => {3,9:N0} kps, {4,7:N0} tps", - label, - total, - elapsed.TotalSeconds, - total / elapsed.TotalSeconds, - trans / elapsed.TotalSeconds - ); - } - - private static void DumpMemory(bool collect = false) - { - if (collect) - { - GC.Collect(); - GC.WaitForPendingFinalizers(); - GC.Collect(); - } - Log("Total memory: Managed={0:N1} KiB, WorkingSet={1:N1} KiB", GC.GetTotalMemory(false) / 1024.0, Environment.WorkingSet / 1024.0); - } - - [Test] - public async Task MiniBench() - { - const int M = 1 * 1000 * 1000; - const int B = 100; - const int ENTROPY = 10 * 1000; - - const int T = M / B; - const int KEYSIZE = 10; - const int VALUESIZE = 100; - const bool RANDOM = false; - - var rnd = new Random(); - - //WARMUP - using (var db = MemoryDatabase.CreateNew("FOO")) - { - await db.WriteAsync((tr) => tr.Set(db.Keys.Encode("hello"), Slice.FromString("world")), this.Cancellation); - Slice.Random(rnd, KEYSIZE); - Slice.Random(rnd, VALUESIZE); - } - - Log("Inserting {0}-bytes {1} keys / {2}-bytes values, in {3:N0} transactions", KEYSIZE, RANDOM ? "random" : "ordered", VALUESIZE, T); - - bool random = RANDOM; - string fmt = "D" + KEYSIZE; - using (var db = MemoryDatabase.CreateNew("FOO")) - { - DumpMemory(collect: true); - - long total = 0; - - var payload = new byte[ENTROPY + VALUESIZE]; - rnd.NextBytes(payload); - // help with compression by doubling every byte - for (int i = 0; i < payload.Length; i += 2) payload[i + 1] = payload[i]; - - var sw = Stopwatch.StartNew(); - sw.Stop(); - - sw.Restart(); - for (int i = 0; i < T; i++) - { - using (var tr = db.BeginTransaction(this.Cancellation)) - { - for (int j = 0; j < B; j++) - { - Slice key; - if (random) - { - do - { - key = Slice.Random(rnd, KEYSIZE); - } - while (key[0] == 255); - } - else - { - int x = i * B + j; - //x = x % 1000; - key = Slice.FromString(x.ToString(fmt)); - } - - tr.Set(key, Slice.Create(payload, rnd.Next(ENTROPY), VALUESIZE)); - Interlocked.Increment(ref total); - } - await tr.CommitAsync().ConfigureAwait(false); - } - if (i % 1000 == 0) Console.Write(".");// + (i * B).ToString("D10")); - } - - sw.Stop(); - Log("done"); - Log("* Inserted: {0:N0} keys", total); - Log("* Elapsed : {0:N3} sec", sw.Elapsed.TotalSeconds); - Log("* TPS: {0:N0} transactions/sec", T / sw.Elapsed.TotalSeconds); - Log("* KPS: {0:N0} keys/sec", total / sw.Elapsed.TotalSeconds); - Log("* BPS: {0:N0} bytes/sec", (total * (KEYSIZE + VALUESIZE)) / sw.Elapsed.TotalSeconds); - - DumpMemory(collect: true); - - db.Debug_Dump(false); - - DumpResult("WriteSeq" + B, total, total / B, sw.Elapsed); - - string path = @".\\minibench.pndb"; - Log("Saving {0} ...", path); - sw.Restart(); - await db.SaveSnapshotAsync(path); - sw.Stop(); - Log("* Saved {0:N0} bytes in {1:N3} sec", new System.IO.FileInfo(path).Length, sw.Elapsed.TotalSeconds); - - Log("Warming up reads..."); - var data = await db.GetValuesAsync(Enumerable.Range(0, 100).Select(i => Slice.FromString(i.ToString(fmt))), this.Cancellation); - - Log("Starting read tests..."); - - #region sequential reads - - sw.Restart(); - for (int i = 0; i < total; i += 10) - { - using (var tr = db.BeginReadOnlyTransaction(this.Cancellation)) - { - await tr.GetValuesAsync(Enumerable.Range(i, 10).Select(x => Slice.FromString(x.ToString(fmt)))).ConfigureAwait(false); - } - } - sw.Stop(); - DumpResult("SeqRead10", total, total / 10, sw.Elapsed); - - sw.Restart(); - for (int i = 0; i < total; i += 10) - { - using (var tr = db.BeginReadOnlyTransaction(this.Cancellation)) - { - await tr.Snapshot.GetValuesAsync(Enumerable.Range(i, 10).Select(x => Slice.FromString(x.ToString(fmt)))).ConfigureAwait(false); - } - } - sw.Stop(); - DumpResult("SeqRead10S", total, total / 10, sw.Elapsed); - - sw.Restart(); - for (int i = 0; i < total; i += 10) - { - using (var tr = db.BeginReadOnlyTransaction(this.Cancellation)) - { - int x = i; - int y = i + 10; - await tr.GetRangeAsync( - FdbKeySelector.FirstGreaterOrEqual(Slice.FromString(x.ToString(fmt))), - FdbKeySelector.FirstGreaterOrEqual(Slice.FromString(y.ToString(fmt))) - ).ConfigureAwait(false); - } - } - sw.Stop(); - DumpResult("SeqRead10R", total, total / 10, sw.Elapsed); - - sw.Restart(); - for (int i = 0; i < total; i += 100) - { - using (var tr = db.BeginReadOnlyTransaction(this.Cancellation)) - { - await tr.GetValuesAsync(Enumerable.Range(i, 100).Select(x => Slice.FromString(x.ToString(fmt)))).ConfigureAwait(false); - } - } - sw.Stop(); - DumpResult("SeqRead100", total, total / 100, sw.Elapsed); - - sw.Restart(); - for (int i = 0; i < total; i += 100) - { - using (var tr = db.BeginReadOnlyTransaction(this.Cancellation)) - { - await tr.Snapshot.GetValuesAsync(Enumerable.Range(i, 100).Select(x => Slice.FromString(x.ToString(fmt)))).ConfigureAwait(false); - } - } - sw.Stop(); - DumpResult("SeqRead100S", total, total / 100, sw.Elapsed); - - sw.Restart(); - for (int i = 0; i < total; i += 100) - { - using (var tr = db.BeginReadOnlyTransaction(this.Cancellation)) - { - int x = i; - int y = i + 100; - await tr.GetRangeAsync( - FdbKeySelector.FirstGreaterOrEqual(Slice.FromString(x.ToString(fmt))), - FdbKeySelector.FirstGreaterOrEqual(Slice.FromString(y.ToString(fmt))) - ).ConfigureAwait(false); - } - } - sw.Stop(); - DumpResult("SeqRead100R", total, total / 100, sw.Elapsed); - - sw.Restart(); - for (int i = 0; i < total; i += 100) - { - using (var tr = db.BeginReadOnlyTransaction(this.Cancellation)) - { - int x = i; - int y = i + 100; - await tr.Snapshot.GetRangeAsync( - FdbKeySelector.FirstGreaterOrEqual(Slice.FromString(x.ToString(fmt))), - FdbKeySelector.FirstGreaterOrEqual(Slice.FromString(y.ToString(fmt))) - ).ConfigureAwait(false); - } - } - sw.Stop(); - DumpResult("SeqRead100RS", total, total / 100, sw.Elapsed); - - sw.Restart(); - for (int i = 0; i < total; i += 1000) - { - using (var tr = db.BeginReadOnlyTransaction(this.Cancellation)) - { - await tr.GetValuesAsync(Enumerable.Range(i, 1000).Select(x => Slice.FromString(x.ToString(fmt)))).ConfigureAwait(false); - } - } - sw.Stop(); - DumpResult("SeqRead1k", total, total / 1000, sw.Elapsed); - - #endregion - - DumpMemory(); - - #region random reads - - //sw.Restart(); - //for (int i = 0; i < total; i++) - //{ - // using (var tr = db.BeginReadOnlyTransaction()) - // { - // int x = rnd.Next((int)total); - // await tr.GetAsync(Slice.FromString(x.ToString(fmt))); - // } - //} - //sw.Stop(); - //Log("RndRead1 : " + total.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec => " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " kps"); - - sw.Restart(); - for (int i = 0; i < total; i += 10) - { - using (var tr = db.BeginReadOnlyTransaction(this.Cancellation)) - { - await tr.GetValuesAsync(Enumerable.Range(i, 10).Select(x => Slice.FromString(rnd.Next((int)total).ToString(fmt)))).ConfigureAwait(false); - } - - } - sw.Stop(); - //Log("RndRead10 : " + total.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec => " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " kps, " + (total / (10 * sw.Elapsed.TotalSeconds)).ToString("N0") + " tps"); - DumpResult("RndRead10", total, total / 10, sw.Elapsed); - - sw.Restart(); - for (int i = 0; i < total; i += 10) - { - using (var tr = db.BeginReadOnlyTransaction(this.Cancellation)) - { - await tr.Snapshot.GetValuesAsync(Enumerable.Range(i, 10).Select(x => Slice.FromString(rnd.Next((int)total).ToString(fmt)))).ConfigureAwait(false); - } - - } - sw.Stop(); - //Log("RndRead10S : " + total.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec => " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " kps, " + (total / (10 * sw.Elapsed.TotalSeconds)).ToString("N0") + " tps"); - DumpResult("RndRead10S", total, total / 10, sw.Elapsed); - - sw.Restart(); - for (int i = 0; i < total; i += 10) - { - using (var tr = db.BeginReadOnlyTransaction(this.Cancellation)) - { - int x = rnd.Next((int)total - 10); - int y = x + 10; - await tr.GetRangeAsync( - FdbKeySelector.FirstGreaterOrEqual(Slice.FromString(x.ToString(fmt))), - FdbKeySelector.FirstGreaterOrEqual(Slice.FromString(y.ToString(fmt))) - ).ConfigureAwait(false); - } - - } - sw.Stop(); - //Log("RndRead10R : " + total.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec => " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " kps, " + (total / (10 * sw.Elapsed.TotalSeconds)).ToString("N0") + " tps"); - DumpResult("RndRead10R", total, total / 10, sw.Elapsed); - - sw.Restart(); - for (int i = 0; i < total; i += 100) - { - using (var tr = db.BeginReadOnlyTransaction(this.Cancellation)) - { - await tr.GetValuesAsync(Enumerable.Range(i, 100).Select(x => Slice.FromString(rnd.Next((int)total).ToString(fmt)))).ConfigureAwait(false); - } - - } - sw.Stop(); - //Log("RndRead100 : " + total.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec => " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " kps, " + (total / (100 * sw.Elapsed.TotalSeconds)).ToString("N0") + " tps"); - DumpResult("RndRead100", total, total / 100, sw.Elapsed); - - sw.Restart(); - for (int i = 0; i < total; i += 1000) - { - using (var tr = db.BeginReadOnlyTransaction(this.Cancellation)) - { - await tr.GetValuesAsync(Enumerable.Range(i, 1000).Select(x => Slice.FromString(rnd.Next((int)total).ToString(fmt)))).ConfigureAwait(false); - } - - } - sw.Stop(); - //Log("RndRead1k : " + total.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec => " + (total / sw.Elapsed.TotalSeconds).ToString("N0") + " kps, " + (total / (1000 * sw.Elapsed.TotalSeconds)).ToString("N0") + " tps"); - DumpResult("RndRead1k", total, total / 1000, sw.Elapsed); - - #endregion - - DumpMemory(); - - #region Parallel Reads... - - int CPUS = Environment.ProcessorCount; - - long read = 0; - var mre = new ManualResetEvent(false); - var tasks = Enumerable - .Range(0, CPUS) - .Select(k => Task.Run(async () => - { - var rndz = new Random(k); - mre.WaitOne(); - - int keys = 0; - for (int j = 0; j < 20; j++) - { - for (int i = 0; i < total / CPUS; i += 100) - { - int pp = i;// rndz.Next((int)total - 10); - using (var tr = db.BeginReadOnlyTransaction(this.Cancellation)) - { - var res = await tr.GetValuesAsync(Enumerable.Range(i, 100).Select(x => Slice.FromString((pp + x).ToString(fmt)))).ConfigureAwait(false); - keys += res.Length; - } - } - } - Interlocked.Add(ref read, keys); - return keys; - })).ToArray(); - - sw.Restart(); - mre.Set(); - await Task.WhenAll(tasks); - sw.Stop(); - mre.Dispose(); - //Log("ParaSeqRead: " + read.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N3") + " sec => " + (read / sw.Elapsed.TotalSeconds).ToString("N0") + " kps"); - DumpResult("ParaSeqRead", read, read / 100, sw.Elapsed); - - read = 0; - mre = new ManualResetEvent(false); - tasks = Enumerable - .Range(0, CPUS) - .Select(k => Task.Run(async () => - { - var rndz = new Random(k); - mre.WaitOne(); - - int keys = 0; - for (int j = 0; j < 20; j++) - { - for (int i = 0; i < total / CPUS; i += 100) - { - int pp = i;// rndz.Next((int)total - 100); - using (var tr = db.BeginReadOnlyTransaction(this.Cancellation)) - { - var res = await tr.GetRangeAsync( - FdbKeySelector.FirstGreaterOrEqual(Slice.FromString(pp.ToString(fmt))), - FdbKeySelector.FirstGreaterOrEqual(Slice.FromString((pp + 100).ToString(fmt))) - ).ConfigureAwait(false); - - keys += res.Count; - } - } - } - Interlocked.Add(ref read, keys); - return keys; - })).ToArray(); - - sw.Restart(); - mre.Set(); - await Task.WhenAll(tasks); - sw.Stop(); - mre.Dispose(); - DumpResult("ParaSeqRange", read, read / 100, sw.Elapsed); - #endregion - - DumpMemory(); - - } - - } - - } -} diff --git a/FoundationDB.Storage.Memory.Test/Transactions/Comparisons.cs b/FoundationDB.Storage.Memory.Test/Transactions/Comparisons.cs deleted file mode 100644 index 733213ca9..000000000 --- a/FoundationDB.Storage.Memory.Test/Transactions/Comparisons.cs +++ /dev/null @@ -1,138 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.API.Tests -{ - using FoundationDB.Client; - using FoundationDB.Storage.Memory.Tests; - using NUnit.Framework; - using System; - using System.Threading.Tasks; - - [TestFixture] - public class Comparisons : FdbTest - { - // Compare the behavior of the MemoryDB against a FoundationDB database - - private async Task Scenario1(IFdbTransaction tr) - { - tr.Set(Slice.FromAscii("hello"), Slice.FromAscii("world!")); - tr.Clear(Slice.FromAscii("removed")); - var result = await tr.GetAsync(Slice.FromAscii("narf")); - } - - private Task Scenario2(IFdbTransaction tr) - { - var location = FdbSubspace.CreateDynamic(Slice.FromAscii("TEST")); - tr.ClearRange(FdbKeyRange.StartsWith(location.Key)); - for (int i = 0; i < 10; i++) - { - tr.Set(location.Keys.Encode(i), Slice.FromString("value of " + i)); - } - return Task.FromResult(null); - } - - private Task Scenario3(IFdbTransaction tr) - { - var location = FdbSubspace.Create(Slice.FromAscii("TEST")); - - tr.Set(location.Key + (byte)'a', Slice.FromAscii("A")); - tr.AtomicAdd(location.Key + (byte)'k', Slice.FromFixed32(1)); - tr.Set(location.Key + (byte)'z', Slice.FromAscii("C")); - tr.ClearRange(location.Key + (byte)'a', location.Key + (byte)'k'); - tr.ClearRange(location.Key + (byte)'k', location.Key + (byte)'z'); - return Task.FromResult(null); - } - - private Task Scenario4(IFdbTransaction tr) - { - var location = FdbSubspace.Create(Slice.FromAscii("TEST")); - - //tr.Set(location.Key, Slice.FromString("NARF")); - //tr.AtomicAdd(location.Key, Slice.FromFixedU32(1)); - tr.AtomicAnd(location.Key, Slice.FromFixedU32(7)); - tr.AtomicXor(location.Key, Slice.FromFixedU32(3)); - tr.AtomicXor(location.Key, Slice.FromFixedU32(15)); - return Task.FromResult(null); - } - - private async Task Scenario5(IFdbTransaction tr) - { - var location = FdbSubspace.CreateDynamic(Slice.FromAscii("TEST")); - - //tr.Set(location.Pack(42), Slice.FromString("42")); - //tr.Set(location.Pack(50), Slice.FromString("50")); - //tr.Set(location.Pack(60), Slice.FromString("60")); - - var x = await tr.GetKeyAsync(FdbKeySelector.LastLessThan(location.Keys.Encode(49))); - Console.WriteLine(x); - - tr.Set(location.Keys.Encode("FOO"), Slice.FromString("BAR")); - - } - - private async Task Scenario6(IFdbTransaction tr) - { - var location = FdbSubspace.CreateDynamic(Slice.FromAscii("TEST")); - - tr.AtomicAdd(location.Keys.Encode("ATOMIC"), Slice.FromFixed32(0x55555555)); - - var x = await tr.GetAsync(location.Keys.Encode("ATOMIC")); - Console.WriteLine(x.ToInt32().ToString("x")); - } - - [Test][Ignore] - public async Task Test_Compare_Implementations() - { - for (int mode = 1; mode <= 6; mode++) - { - - Console.WriteLine("#### SCENARIO " + mode + " ####"); - - using (var db = await Fdb.OpenAsync(this.Cancellation)) - { - using (var tr = db.BeginTransaction(this.Cancellation)) - { - await tr.GetReadVersionAsync(); - - switch (mode) - { - case 1: await Scenario1(tr); break; - case 2: await Scenario2(tr); break; - case 3: await Scenario3(tr); break; - case 4: await Scenario4(tr); break; - case 5: await Scenario5(tr); break; - case 6: await Scenario6(tr); break; - } - - await tr.CommitAsync(); - } - } - - using (var db = MemoryDatabase.CreateNew("DB")) - { - using (var tr = db.BeginTransaction(FdbTransactionMode.Default, this.Cancellation)) - { - await tr.GetReadVersionAsync(); - - switch (mode) - { - case 1: await Scenario1(tr); break; - case 2: await Scenario2(tr); break; - case 3: await Scenario3(tr); break; - case 4: await Scenario4(tr); break; - case 5: await Scenario5(tr); break; - case 6: await Scenario6(tr); break; - } - - await tr.CommitAsync(); - } - - db.Debug_Dump(); - } - } - } - - } -} diff --git a/FoundationDB.Storage.Memory.Test/Transactions/MemoryTransactionFacts.cs b/FoundationDB.Storage.Memory.Test/Transactions/MemoryTransactionFacts.cs deleted file mode 100644 index 43de8d3a2..000000000 --- a/FoundationDB.Storage.Memory.Test/Transactions/MemoryTransactionFacts.cs +++ /dev/null @@ -1,1008 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.API.Tests -{ - using FoundationDB.Client; - using FoundationDB.Layers.Collections; - using FoundationDB.Layers.Directories; - using FoundationDB.Layers.Indexing; - using FoundationDB.Linq; - using FoundationDB.Storage.Memory.Tests; - using NUnit.Framework; - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Linq; - using System.Threading.Tasks; - - [TestFixture] - public class MemoryTransactionFacts : FdbTest - { - - [Test] - public async Task Test_Hello_World() - { - using (var db = MemoryDatabase.CreateNew("DB", FdbSubspace.Empty, false)) - { - var key = db.Keys.Encode("hello"); - - // v1 - await db.WriteAsync((tr) => tr.Set(key, Slice.FromString("World!")), this.Cancellation); - db.Debug_Dump(); - var data = await db.ReadAsync((tr) => tr.GetAsync(key), this.Cancellation); - Assert.That(data.ToUnicode(), Is.EqualTo("World!")); - - // v2 - await db.WriteAsync((tr) => tr.Set(key, Slice.FromString("Le Monde!")), this.Cancellation); - db.Debug_Dump(); - data = await db.ReadAsync((tr) => tr.GetAsync(key), this.Cancellation); - Assert.That(data.ToUnicode(), Is.EqualTo("Le Monde!")); - - using (var tr1 = db.BeginTransaction(this.Cancellation)) - { - await tr1.GetReadVersionAsync(); - - await db.WriteAsync((tr2) => tr2.Set(key, Slice.FromString("Sekai!")), this.Cancellation); - db.Debug_Dump(); - - data = await tr1.GetAsync(key); - Assert.That(data.ToUnicode(), Is.EqualTo("Le Monde!")); - } - - data = await db.ReadAsync((tr) => tr.GetAsync(key), this.Cancellation); - Assert.That(data.ToUnicode(), Is.EqualTo("Sekai!")); - - // Collect memory - Trace.WriteLine("### GARBAGE COLLECT! ###"); - db.Collect(); - db.Debug_Dump(); - - } - } - - [Test] - public async Task Test_GetKey() - { - Slice key; - Slice value; - - using (var db = MemoryDatabase.CreateNew("DB")) - { - var location = db.Keys; - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - tr.Set(location.Encode(0), Slice.FromString("first")); - tr.Set(location.Encode(10), Slice.FromString("ten")); - tr.Set(location.Encode(20), Slice.FromString("ten ten")); - tr.Set(location.Encode(42), Slice.FromString("narf!")); - tr.Set(location.Encode(100), Slice.FromString("a hundred missipis")); - await tr.CommitAsync(); - } - - db.Debug_Dump(); - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - - value = await tr.GetAsync(location.Encode(42)); - Console.WriteLine(value); - Assert.That(value.ToString(), Is.EqualTo("narf!")); - - key = await tr.GetKeyAsync(FdbKeySelector.FirstGreaterOrEqual(location.Encode(42))); - Assert.That(key, Is.EqualTo(location.Encode(42))); - - key = await tr.GetKeyAsync(FdbKeySelector.FirstGreaterThan(location.Encode(42))); - Assert.That(key, Is.EqualTo(location.Encode(100))); - - key = await tr.GetKeyAsync(FdbKeySelector.LastLessOrEqual(location.Encode(42))); - Assert.That(key, Is.EqualTo(location.Encode(42))); - - key = await tr.GetKeyAsync(FdbKeySelector.LastLessThan(location.Encode(42))); - Assert.That(key, Is.EqualTo(location.Encode(20))); - - var keys = await tr.GetKeysAsync(new[] - { - FdbKeySelector.FirstGreaterOrEqual(location.Encode(42)), - FdbKeySelector.FirstGreaterThan(location.Encode(42)), - FdbKeySelector.LastLessOrEqual(location.Encode(42)), - FdbKeySelector.LastLessThan(location.Encode(42)) - }); - - Assert.That(keys.Length, Is.EqualTo(4)); - Assert.That(keys[0], Is.EqualTo(location.Encode(42))); - Assert.That(keys[1], Is.EqualTo(location.Encode(100))); - Assert.That(keys[2], Is.EqualTo(location.Encode(42))); - Assert.That(keys[3], Is.EqualTo(location.Encode(20))); - - await tr.CommitAsync(); - } - - } - - } - - [Test] - public async Task Test_GetKey_ReadConflicts() - { - Slice key; - - using (var db = MemoryDatabase.CreateNew("FOO")) - { - var location = db.Keys; - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - tr.Set(location.Encode(42), Slice.FromString("42")); - tr.Set(location.Encode(50), Slice.FromString("50")); - tr.Set(location.Encode(60), Slice.FromString("60")); - await tr.CommitAsync(); - } - db.Debug_Dump(); - - Func check = async (selector, expected) => - { - using (var tr = db.BeginTransaction(this.Cancellation)) - { - key = await tr.GetKeyAsync(selector); - await tr.CommitAsync(); - Assert.That(key, Is.EqualTo(expected), selector.ToString() + " => " + FdbKey.Dump(expected)); - } - }; - - await check( - FdbKeySelector.FirstGreaterOrEqual(location.Encode(50)), - location.Encode(50) - ); - await check( - FdbKeySelector.FirstGreaterThan(location.Encode(50)), - location.Encode(60) - ); - - await check( - FdbKeySelector.FirstGreaterOrEqual(location.Encode(49)), - location.Encode(50) - ); - await check( - FdbKeySelector.FirstGreaterThan(location.Encode(49)), - location.Encode(50) - ); - - await check( - FdbKeySelector.FirstGreaterOrEqual(location.Encode(49)) + 1, - location.Encode(60) - ); - await check( - FdbKeySelector.FirstGreaterThan(location.Encode(49)) + 1, - location.Encode(60) - ); - - await check( - FdbKeySelector.LastLessOrEqual(location.Encode(49)), - location.Encode(42) - ); - await check( - FdbKeySelector.LastLessThan(location.Encode(49)), - location.Encode(42) - ); - } - } - - [Test] - public async Task Test_GetRangeAsync() - { - Slice key; - - using (var db = MemoryDatabase.CreateNew("DB")) - { - var location = db.Keys; - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - for (int i = 0; i <= 100; i++) - { - tr.Set(location.Encode(i), Slice.FromString("value of " + i)); - } - await tr.CommitAsync(); - } - - db.Debug_Dump(); - - // verify that key selectors work find - using (var tr = db.BeginTransaction(this.Cancellation)) - { - key = await tr.GetKeyAsync(FdbKeySelector.FirstGreaterOrEqual(FdbKey.MaxValue)); - if (key != FdbKey.MaxValue) Assert.Inconclusive("Key selectors are buggy: fGE(max)"); - key = await tr.GetKeyAsync(FdbKeySelector.LastLessOrEqual(FdbKey.MaxValue)); - if (key != FdbKey.MaxValue) Assert.Inconclusive("Key selectors are buggy: lLE(max)"); - key = await tr.GetKeyAsync(FdbKeySelector.LastLessThan(FdbKey.MaxValue)); - if (key != location.Encode(100)) Assert.Inconclusive("Key selectors are buggy: lLT(max)"); - } - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - - var chunk = await tr.GetRangeAsync( - FdbKeySelector.FirstGreaterOrEqual(location.Encode(0)), - FdbKeySelector.FirstGreaterOrEqual(location.Encode(50)) - ); -#if DEBUG - for (int i = 0; i < chunk.Count; i++) - { - Console.WriteLine(i.ToString() + " : " + chunk.Chunk[i].Key + " = " + chunk.Chunk[i].Value); - } -#endif - - Assert.That(chunk.Count, Is.EqualTo(50), "chunk.Count"); - Assert.That(chunk.HasMore, Is.False, "chunk.HasMore"); - Assert.That(chunk.Reversed, Is.False, "chunk.Reversed"); - Assert.That(chunk.Iteration, Is.EqualTo(1), "chunk.Iteration"); - - for (int i = 0; i < 50; i++) - { - Assert.That(chunk.Chunk[i].Key, Is.EqualTo(location.Encode(i)), "[{0}].Key", i); - Assert.That(chunk.Chunk[i].Value.ToString(), Is.EqualTo("value of " + i), "[{0}].Value", i); - } - - await tr.CommitAsync(); - } - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - - var chunk = await tr.GetRangeAsync( - FdbKeySelector.FirstGreaterOrEqual(location.Encode(0)), - FdbKeySelector.FirstGreaterOrEqual(location.Encode(50)), - new FdbRangeOptions { Reverse = true } - ); -#if DEBUG - for (int i = 0; i < chunk.Count; i++) - { - Console.WriteLine(i.ToString() + " : " + chunk.Chunk[i].Key + " = " + chunk.Chunk[i].Value); - } -#endif - - Assert.That(chunk.Count, Is.EqualTo(50), "chunk.Count"); - Assert.That(chunk.HasMore, Is.False, "chunk.HasMore"); - Assert.That(chunk.Reversed, Is.True, "chunk.Reversed"); - Assert.That(chunk.Iteration, Is.EqualTo(1), "chunk.Iteration"); - - for (int i = 0; i < 50; i++) - { - Assert.That(chunk.Chunk[i].Key, Is.EqualTo(location.Encode(49 - i)), "[{0}].Key", i); - Assert.That(chunk.Chunk[i].Value.ToString(), Is.EqualTo("value of " + (49 - i)), "[{0}].Value", i); - } - - await tr.CommitAsync(); - } - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - - var chunk = await tr.GetRangeAsync( - FdbKeySelector.FirstGreaterOrEqual(location.Encode(0)), - FdbKeySelector.FirstGreaterOrEqual(FdbKey.MaxValue), - new FdbRangeOptions { Reverse = true, Limit = 1 } - ); -#if DEBUG - for (int i = 0; i < chunk.Count; i++) - { - Console.WriteLine(i.ToString() + " : " + chunk.Chunk[i].Key + " = " + chunk.Chunk[i].Value); - } -#endif - - Assert.That(chunk.Count, Is.EqualTo(1), "chunk.Count"); - Assert.That(chunk.HasMore, Is.True, "chunk.HasMore"); - Assert.That(chunk.Reversed, Is.True, "chunk.Reversed"); - Assert.That(chunk.Iteration, Is.EqualTo(1), "chunk.Iteration"); - - await tr.CommitAsync(); - } - - } - - } - - [Test] - public async Task Test_GetRange() - { - - using (var db = MemoryDatabase.CreateNew("DB")) - { - var location = db.Keys; - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - for (int i = 0; i <= 100; i++) - { - tr.Set(location.Encode(i), Slice.FromString("value of " + i)); - } - await tr.CommitAsync(); - } - - db.Debug_Dump(); - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - - var results = await tr - .GetRange(location.Encode(0), location.Encode(50)) - .ToListAsync(); - - Assert.That(results, Is.Not.Null); -#if DEBUG - for (int i = 0; i < results.Count; i++) - { - Console.WriteLine(i.ToString() + " : " + results[i].Key + " = " + results[i].Value); - } -#endif - - Assert.That(results.Count, Is.EqualTo(50)); - for (int i = 0; i < 50; i++) - { - Assert.That(results[i].Key, Is.EqualTo(location.Encode(i)), "[{0}].Key", i); - Assert.That(results[i].Value.ToString(), Is.EqualTo("value of " + i), "[{0}].Value", i); - } - - await tr.CommitAsync(); - } - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - - var results = await tr - .GetRange(location.Encode(0), location.Encode(50), new FdbRangeOptions { Reverse = true }) - .ToListAsync(); - Assert.That(results, Is.Not.Null); -#if DEBUG - for (int i = 0; i < results.Count; i++) - { - Console.WriteLine(i.ToString() + " : " + results[i].Key + " = " + results[i].Value); - } -#endif - - Assert.That(results.Count, Is.EqualTo(50)); - for (int i = 0; i < 50; i++) - { - Assert.That(results[i].Key, Is.EqualTo(location.Encode(49 - i)), "[{0}].Key", i); - Assert.That(results[i].Value.ToString(), Is.EqualTo("value of " + (49 - i)), "[{0}].Value", i); - } - - await tr.CommitAsync(); - } - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - var result = await tr - .GetRange(location.Encode(0), FdbKey.MaxValue, new FdbRangeOptions { Reverse = true }) - .FirstOrDefaultAsync(); - -#if DEBUG - Console.WriteLine(result.Key + " = " + result.Value); -#endif - Assert.That(result.Key, Is.EqualTo(location.Encode(100))); - Assert.That(result.Value.ToString(), Is.EqualTo("value of 100")); - - await tr.CommitAsync(); - } - - } - - } - - [Test] - public async Task Test_CommittedVersion_On_ReadOnly_Transactions() - { - //note: until CommitAsync() is called, the value of the committed version is unspecified, but current implementation returns -1 - - using (var db = MemoryDatabase.CreateNew("DB")) - { - var location = db.Keys; - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - long ver = tr.GetCommittedVersion(); - Assert.That(ver, Is.EqualTo(-1), "Initial committed version"); - - var _ = await tr.GetAsync(location.Encode("foo")); - - // until the transction commits, the committed version will stay -1 - ver = tr.GetCommittedVersion(); - Assert.That(ver, Is.EqualTo(-1), "Committed version after a single read"); - - // committing a read only transaction - - await tr.CommitAsync(); - - ver = tr.GetCommittedVersion(); - Assert.That(ver, Is.EqualTo(-1), "Read-only comiitted transaction have a committed version of -1"); - } - - db.Debug_Dump(); - } - } - - [Test] - public async Task Test_CommittedVersion_On_Write_Transactions() - { - //note: until CommitAsync() is called, the value of the committed version is unspecified, but current implementation returns -1 - - using (var db = MemoryDatabase.CreateNew("DB")) - { - var location = db.Keys; - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - // take the read version (to compare with the committed version below) - long readVersion = await tr.GetReadVersionAsync(); - - long ver = tr.GetCommittedVersion(); - Assert.That(ver, Is.EqualTo(-1), "Initial committed version"); - - tr.Set(location.Encode("foo"), Slice.FromString("bar")); - - // until the transction commits, the committed version should still be -1 - ver = tr.GetCommittedVersion(); - Assert.That(ver, Is.EqualTo(-1), "Committed version after a single write"); - - // committing a read only transaction - - await tr.CommitAsync(); - - ver = tr.GetCommittedVersion(); - Assert.That(ver, Is.GreaterThanOrEqualTo(readVersion), "Committed version of write transaction should be >= the read version"); - } - - db.Debug_Dump(); - } - } - - [Test] - public async Task Test_CommittedVersion_After_Reset() - { - //note: until CommitAsync() is called, the value of the committed version is unspecified, but current implementation returns -1 - - using (var db = MemoryDatabase.CreateNew("DB")) - { - var location = db.Keys; - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - // take the read version (to compare with the committed version below) - long rv1 = await tr.GetReadVersionAsync(); - // do something and commit - tr.Set(location.Encode("foo"), Slice.FromString("bar")); - await tr.CommitAsync(); - long cv1 = tr.GetCommittedVersion(); - Console.WriteLine("COMMIT: " + rv1 + " / " + cv1); - Assert.That(cv1, Is.GreaterThanOrEqualTo(rv1), "Committed version of write transaction should be >= the read version"); - - // reset the transaction - tr.Reset(); - - long rv2 = await tr.GetReadVersionAsync(); - long cv2 = tr.GetCommittedVersion(); - Console.WriteLine("RESET: " + rv2 + " / " + cv2); - //Note: the current fdb_c client does not revert the commited version to -1 ... ? - //Assert.That(cv2, Is.EqualTo(-1), "Committed version should go back to -1 after reset"); - - // read-only + commit - await tr.GetAsync(location.Encode("foo")); - await tr.CommitAsync(); - cv2 = tr.GetCommittedVersion(); - Console.WriteLine("COMMIT2: " + rv2 + " / " + cv2); - Assert.That(cv2, Is.EqualTo(-1), "Committed version of read-only transaction should be -1 even the transaction was previously used to write something"); - - } - } - } - - [Test] - public async Task Test_Conflicts() - { - - // this SHOULD NOT conflict - using (var db = MemoryDatabase.CreateNew("DB")) - { - var location = db.Keys; - - using (var tr1 = db.BeginTransaction(this.Cancellation)) - { - using (var tr2 = db.BeginTransaction(this.Cancellation)) - { - tr2.Set(location.Encode("foo"), Slice.FromString("changed")); - await tr2.CommitAsync(); - } - - var x = await tr1.GetAsync(location.Encode("foo")); - tr1.Set(location.Encode("bar"), Slice.FromString("other")); - - await tr1.CommitAsync(); - } - - } - - // this SHOULD conflict - using (var db = MemoryDatabase.CreateNew("DB")) - { - var location = db.Keys; - - using (var tr1 = db.BeginTransaction(this.Cancellation)) - { - var x = await tr1.GetAsync(location.Encode("foo")); - - using (var tr2 = db.BeginTransaction(this.Cancellation)) - { - tr2.Set(location.Encode("foo"), Slice.FromString("changed")); - await tr2.CommitAsync(); - } - - tr1.Set(location.Encode("bar"), Slice.FromString("other")); - - Assert.That(async () => await tr1.CommitAsync(), Throws.InstanceOf().With.Property("Code").EqualTo(FdbError.NotCommitted)); - } - - } - - // this SHOULD conflict - using (var db = MemoryDatabase.CreateNew("DB")) - { - var location = db.Keys; - - using (var tr1 = db.BeginTransaction(this.Cancellation)) - { - await tr1.GetReadVersionAsync(); - - using (var tr2 = db.BeginTransaction(this.Cancellation)) - { - tr2.Set(location.Encode("foo"), Slice.FromString("changed")); - await tr2.CommitAsync(); - } - - var x = await tr1.GetAsync(location.Encode("foo")); - tr1.Set(location.Encode("bar"), Slice.FromString("other")); - - Assert.That(async () => await tr1.CommitAsync(), Throws.InstanceOf().With.Property("Code").EqualTo(FdbError.NotCommitted)); - } - - } - - // this SHOULD NOT conflict - using (var db = MemoryDatabase.CreateNew("DB")) - { - var location = db.Keys; - - using (var tr1 = db.BeginTransaction(this.Cancellation)) - { - var x = await tr1.Snapshot.GetAsync(location.Encode("foo")); - - using (var tr2 = db.BeginTransaction(this.Cancellation)) - { - tr2.Set(location.Encode("foo"), Slice.FromString("changed")); - await tr2.CommitAsync(); - } - - tr1.Set(location.Encode("bar"), Slice.FromString("other")); - - await tr1.CommitAsync(); - } - - } - } - - [Test] - public async Task Test_Write_Then_Read() - { - using (var db = MemoryDatabase.CreateNew("FOO")) - { - var location = db.Keys; - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - tr.Set(Slice.FromString("hello"), Slice.FromString("World!")); - tr.AtomicAdd(Slice.FromString("counter"), Slice.FromFixed32(1)); - tr.Set(Slice.FromString("foo"), Slice.FromString("bar")); - await tr.CommitAsync(); - } - - db.Debug_Dump(); - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - var result = await tr.GetAsync(Slice.FromString("hello")); - Assert.That(result, Is.Not.Null); - Assert.That(result.ToString(), Is.EqualTo("World!")); - - result = await tr.GetAsync(Slice.FromString("counter")); - Assert.That(result, Is.Not.Null); - Assert.That(result.ToInt32(), Is.EqualTo(1)); - - result = await tr.GetAsync(Slice.FromString("foo")); - Assert.That(result.ToString(), Is.EqualTo("bar")); - - } - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - tr.Set(Slice.FromString("hello"), Slice.FromString("Le Monde!")); - tr.AtomicAdd(Slice.FromString("counter"), Slice.FromFixed32(1)); - tr.Set(Slice.FromString("narf"), Slice.FromString("zort")); - await tr.CommitAsync(); - } - - db.Debug_Dump(); - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - var result = await tr.GetAsync(Slice.FromString("hello")); - Assert.That(result, Is.Not.Null); - Assert.That(result.ToString(), Is.EqualTo("Le Monde!")); - - result = await tr.GetAsync(Slice.FromString("counter")); - Assert.That(result, Is.Not.Null); - Assert.That(result.ToInt32(), Is.EqualTo(2)); - - result = await tr.GetAsync(Slice.FromString("foo")); - Assert.That(result, Is.Not.Null); - Assert.That(result.ToString(), Is.EqualTo("bar")); - - result = await tr.GetAsync(Slice.FromString("narf")); - Assert.That(result, Is.Not.Null); - Assert.That(result.ToString(), Is.EqualTo("zort")); - } - - // Collect memory - Trace.WriteLine("### GARBAGE COLLECT! ###"); - db.Collect(); - db.Debug_Dump(); - } - } - - [Test] - public async Task Test_Atomic() - { - using (var db = MemoryDatabase.CreateNew("DB")) - { - var location = db.Keys; - - var key1 = location.Encode(1); - var key2 = location.Encode(2); - var key16 = location.Encode(16); - - for (int i = 0; i < 10; i++) - { - using (var tr = db.BeginTransaction(this.Cancellation)) - { - tr.AtomicAdd(key1, Slice.FromFixed64(1)); - tr.AtomicAdd(key2, Slice.FromFixed64(2)); - tr.AtomicAdd(key16, Slice.FromFixed64(16)); - - await tr.CommitAsync(); - } - } - - db.Debug_Dump(); - - // Collect memory - Trace.WriteLine("### GARBAGE COLLECT! ###"); - db.Collect(); - db.Debug_Dump(); - } - } - - [Test] - public async Task Test_Use_Simple_Layer() - { - using (var db = MemoryDatabase.CreateNew("FOO")) - { - var location = db.GlobalSpace; - - var map = new FdbMap("Foos", db.GlobalSpace.Partition.ByKey("Foos"), KeyValueEncoders.Values.StringEncoder); - var index = new FdbIndex("Foos.ByColor", db.GlobalSpace.Partition.ByKey("Foos", "Color")); - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - map.Set(tr, 3, @"{ ""name"": ""Juliet"", ""color"": ""red"" }"); - map.Set(tr, 2, @"{ ""name"": ""Joey"", ""color"": ""blue"" }"); - map.Set(tr, 1, @"{ ""name"": ""Bob"", ""color"": ""red"" }"); - - index.Add(tr, 3, "red"); - index.Add(tr, 2, "blue"); - index.Add(tr, 1, "red"); - - await tr.CommitAsync(); - } - - db.Debug_Dump(true); - - //// Collect memory - //Trace.WriteLine("### GARBAGE COLLECT! ###"); - //db.Collect(); - //db.Debug_Dump(); - } - } - - [Test] - public async Task Test_Use_Directory_Layer() - { - using (var db = MemoryDatabase.CreateNew("DB")) - { - var location = db.GlobalSpace; - - var foos = await db.Directory.CreateOrOpenAsync("Foos", this.Cancellation); - var bars = await db.Directory.CreateOrOpenAsync("Bars", this.Cancellation); - - var foo123 = await db.Directory.CreateOrOpenAsync(new[] { "Foos", "123" }, this.Cancellation); - var bar456 = await bars.CreateOrOpenAsync(db, new[] { "123" }, this.Cancellation); - - db.Debug_Dump(true); - - //// Collect memory - //Trace.WriteLine("### GARBAGE COLLECT! ###"); - //db.Collect(); - //db.Debug_Dump(); - } - } - - [Test] - public async Task Test_Can_Resolve_Key_Selector_Outside_Boundaries() - { - // test various corner cases: - - // - k < first_key or k <= <00> resolves to: - // - '' always - - // - k > last_key or k >= resolve to: - // - '' when access to system keys is off - // - '/backupRange' (usually) when access to system keys is ON - - // - k >= <00> resolves to: - // - key_outside_legal_range when access to system keys is off - // - '/backupRange' (usually) when access to system keys is ON - - // - k >= resolved to: - // - key_outside_legal_range when access to system keys is off - // - '' when access to system keys is ON - - Slice key; - - using (var db = MemoryDatabase.CreateNew("FOO")) - { - - using (var tr = db.BeginTransaction(this.Cancellation)) - { - tr.Set(Slice.FromString("A"), Slice.FromString("min")); - tr.Set(Slice.FromString("Z"), Slice.FromString("max")); - await tr.CommitAsync(); - } - - using (var tr = db.BeginReadOnlyTransaction(this.Cancellation)) - { - // before <00> - key = await tr.GetKeyAsync(FdbKeySelector.LastLessThan(FdbKey.MinValue)); - Assert.That(key, Is.EqualTo(Slice.Empty), "lLT(<00>) => ''"); - - // before the first key in the db - var minKey = await tr.GetKeyAsync(FdbKeySelector.FirstGreaterOrEqual(FdbKey.MinValue)); - Assert.That(minKey, Is.Not.Null); - Console.WriteLine("minKey = " + minKey); - key = await tr.GetKeyAsync(FdbKeySelector.LastLessThan(minKey)); - Assert.That(key, Is.EqualTo(Slice.Empty), "lLT(min_key) => ''"); - - // after the last key in the db - - var maxKey = await tr.GetKeyAsync(FdbKeySelector.LastLessThan(FdbKey.MaxValue)); - Assert.That(maxKey, Is.Not.Null); - Console.WriteLine("maxKey = " + maxKey); - key = await tr.GetKeyAsync(FdbKeySelector.FirstGreaterThan(maxKey)); - Assert.That(key, Is.EqualTo(FdbKey.MaxValue), "fGT(maxKey) => "); - - // after - key = await tr.GetKeyAsync(FdbKeySelector.FirstGreaterThan(FdbKey.MaxValue)); - Assert.That(key, Is.EqualTo(FdbKey.MaxValue), "fGT() => "); - Assert.That(async () => await tr.GetKeyAsync(FdbKeySelector.FirstGreaterThan(Slice.FromAscii("\xFF\xFF"))), Throws.InstanceOf().With.Property("Code").EqualTo(FdbError.KeyOutsideLegalRange)); - Assert.That(async () => await tr.GetKeyAsync(FdbKeySelector.LastLessThan(Slice.FromAscii("\xFF\x00"))), Throws.InstanceOf().With.Property("Code").EqualTo(FdbError.KeyOutsideLegalRange)); - - tr.WithReadAccessToSystemKeys(); - - var firstSystemKey = await tr.GetKeyAsync(FdbKeySelector.FirstGreaterThan(FdbKey.MaxValue)); - // usually the first key in the system space is /backupDataFormat, but that may change in the future version. - Assert.That(firstSystemKey, Is.Not.Null); - Assert.That(firstSystemKey, Is.GreaterThan(FdbKey.MaxValue), "key should be between and "); - Assert.That(firstSystemKey, Is.LessThan(Slice.FromAscii("\xFF\xFF")), "key should be between and "); - - // with access to system keys, the maximum possible key becomes - key = await tr.GetKeyAsync(FdbKeySelector.FirstGreaterOrEqual(Slice.FromAscii("\xFF\xFF"))); - Assert.That(key, Is.EqualTo(Slice.FromAscii("\xFF\xFF")), "fGE() => (with access to system keys)"); - key = await tr.GetKeyAsync(FdbKeySelector.FirstGreaterThan(Slice.FromAscii("\xFF\xFF"))); - Assert.That(key, Is.EqualTo(Slice.FromAscii("\xFF\xFF")), "fGT() => (with access to system keys)"); - - key = await tr.GetKeyAsync(FdbKeySelector.LastLessThan(Slice.FromAscii("\xFF\x00"))); - Assert.That(key, Is.EqualTo(maxKey), "lLT(<00>) => max_key (with access to system keys)"); - key = await tr.GetKeyAsync(FdbKeySelector.FirstGreaterThan(maxKey)); - Assert.That(key, Is.EqualTo(firstSystemKey), "fGT(max_key) => first_system_key (with access to system keys)"); - - } - } - - } - - [Test] - public async Task Test_Can_BulkLoad_Data_Ordered() - { - const int N = 1 * 1000 * 1000; - - // insert N sequential items and bulk load with "ordered = true" to skip the sorting of levels - - Console.WriteLine("Warmup..."); - using (var db = MemoryDatabase.CreateNew("WARMUP")) - { - await db.BulkLoadAsync(Enumerable.Range(0, 100).Select(i => new KeyValuePair(db.Keys.Encode(i), Slice.FromFixed32(i))).ToList(), ordered: true); - } - - using(var db = MemoryDatabase.CreateNew("FOO")) - { - var location = db.Keys; - - Console.WriteLine("Generating " + N.ToString("N0") + " keys..."); - var data = new KeyValuePair[N]; - for (int i = 0; i < N; i++) - { - data[i] = new KeyValuePair( - location.Encode(i), - Slice.FromFixed32(i) - ); - } - Console.WriteLine("Inserting ..."); - - var sw = Stopwatch.StartNew(); - await db.BulkLoadAsync(data, ordered: true); - sw.Stop(); - DumpResult("BulkLoadSeq", N, 1, sw.Elapsed); - - db.Debug_Dump(); - - var rnd = new Random(); - for (int i = 0; i < 100 * 1000; i++) - { - int x = rnd.Next(N); - using (var tx = db.BeginReadOnlyTransaction(this.Cancellation)) - { - var res = await tx.GetAsync(location.Encode(x)).ConfigureAwait(false); - Assert.That(res.ToInt32(), Is.EqualTo(x)); - } - } - - } - } - - [Test] - public async Task Test_Can_BulkLoad_Data_Sequential_Unordered() - { - const int N = 1 * 1000 * 1000; - - // insert N sequential items, but without specifying "ordered = true" to force a sort of all levels - - Console.WriteLine("Warmup..."); - using(var db = MemoryDatabase.CreateNew("WARMUP")) - { - await db.BulkLoadAsync(Enumerable.Range(0, 100).Select(i => new KeyValuePair(db.Keys.Encode(i), Slice.FromFixed32(i))).ToList(), ordered: false); - } - - using (var db = MemoryDatabase.CreateNew("FOO")) - { - var location = db.Keys; - - Console.WriteLine("Generating " + N.ToString("N0") + " keys..."); - var data = new KeyValuePair[N]; - var rnd = new Random(); - for (int i = 0; i < N; i++) - { - data[i] = new KeyValuePair( - location.Encode(i), - Slice.FromFixed32(i) - ); - } - - Console.WriteLine("Inserting ..."); - var sw = Stopwatch.StartNew(); - await db.BulkLoadAsync(data, ordered: false); - sw.Stop(); - DumpResult("BulkLoadSeqSort", N, 1, sw.Elapsed); - - db.Debug_Dump(); - - for (int i = 0; i < 100 * 1000; i++) - { - int x = rnd.Next(N); - using (var tx = db.BeginReadOnlyTransaction(this.Cancellation)) - { - var res = await tx.GetAsync(location.Encode(x)).ConfigureAwait(false); - Assert.That(res.ToInt32(), Is.EqualTo(x)); - } - } - - } - } - - [Test] - public async Task Test_Can_BulkLoad_Data_Random_Unordered() - { - const int N = 1 * 1000 * 1000; - - // insert N randomized items - - Console.WriteLine("Warmup..."); - using (var db = MemoryDatabase.CreateNew("WARMUP")) - { - await db.BulkLoadAsync(Enumerable.Range(0, 100).Select(i => new KeyValuePair(db.Keys.Encode(i), Slice.FromFixed32(i))).ToList(), ordered: false); - } - - using (var db = MemoryDatabase.CreateNew("FOO")) - { - var location = db.Keys; - - Console.WriteLine("Generating " + N.ToString("N0") + " keys..."); - var data = new KeyValuePair[N]; - var ints = new int[N]; - var rnd = new Random(); - for (int i = 0; i < N; i++) - { - data[i] = new KeyValuePair( - location.Encode(i), - Slice.FromFixed32(i) - ); - ints[i] = rnd.Next(int.MaxValue); - } - Console.WriteLine("Shuffling..."); - Array.Sort(ints, data); - - Console.WriteLine("Inserting ..."); - - var sw = Stopwatch.StartNew(); - await db.BulkLoadAsync(data, ordered: false); - sw.Stop(); - DumpResult("BulkLoadRndSort", N, 1, sw.Elapsed); - - db.Debug_Dump(); - - for (int i = 0; i < 100 * 1000; i++) - { - int x = rnd.Next(N); - using (var tx = db.BeginReadOnlyTransaction(this.Cancellation)) - { - var res = await tx.GetAsync(location.Encode(x)).ConfigureAwait(false); - Assert.That(res.ToInt32(), Is.EqualTo(x)); - } - } - - } - } - - private static void DumpResult(string label, long total, long trans, TimeSpan elapsed) - { - Console.WriteLine( - "{0,-12}: {1, 10} keys in {2,4} sec => {3,9} kps, {4,7} tps", - label, - total.ToString("N0"), - elapsed.TotalSeconds.ToString("N3"), - (total / elapsed.TotalSeconds).ToString("N0"), - (trans / elapsed.TotalSeconds).ToString("N0") - ); - } - - private static void DumpMemory(bool collect = false) - { - if (collect) - { - GC.Collect(); - GC.WaitForPendingFinalizers(); - GC.Collect(); - } - Console.WriteLine("Total memory: Managed=" + (GC.GetTotalMemory(false) / 1024.0).ToString("N1") + " kB, WorkingSet=" + (Environment.WorkingSet / 1024.0).ToString("N1") + " kB"); - } - - } -} diff --git a/FoundationDB.Storage.Memory.Test/Transactions/SnapshotFacts.cs b/FoundationDB.Storage.Memory.Test/Transactions/SnapshotFacts.cs deleted file mode 100644 index a5b23b6b2..000000000 --- a/FoundationDB.Storage.Memory.Test/Transactions/SnapshotFacts.cs +++ /dev/null @@ -1,110 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.API.Tests -{ - using FoundationDB.Client; - using FoundationDB.Layers.Tuples; - using FoundationDB.Storage.Memory.Tests; - using FoundationDB.Linq; - using NUnit.Framework; - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Threading.Tasks; - using System.Linq; - using System.IO; - - [TestFixture] - public class SnapshotFacts : FdbTest - { - - [Test] - public async Task Test_Can_Save_And_Reload_Snapshot() - { - const string FILE_PATH = ".\\test.pndb"; - const int N = 1 * 1000 * 1000; - - if (File.Exists(FILE_PATH)) File.Delete(FILE_PATH); - - // insert N sequential items and bulk load with "ordered = true" to skip the sorting of levels - - Console.WriteLine("Generating " + N.ToString("N0") + " keys..."); - var data = new KeyValuePair[N]; - var rnd = new Random(); - for (int i = 0; i < N; i++) - { - data[i] = new KeyValuePair( - Slice.FromAscii(i.ToString("D16")), - Slice.Random(rnd, 50) - ); - } - - var sw = new Stopwatch(); - - using (var db = MemoryDatabase.CreateNew()) - { - Console.Write("Inserting ..."); - sw.Restart(); - await db.BulkLoadAsync(data, ordered: true); - sw.Stop(); - Console.WriteLine(" done in " + sw.Elapsed.TotalSeconds.ToString("N1") + " secs"); - - db.Debug_Dump(); - - Console.Write("Saving..."); - sw.Restart(); - await db.SaveSnapshotAsync(FILE_PATH, null, this.Cancellation); - sw.Stop(); - Console.WriteLine(" done in " + sw.Elapsed.TotalSeconds.ToString("N1") + " secs"); - } - - var fi = new FileInfo(FILE_PATH); - Assert.That(fi.Exists, Is.True, "Snapshot file not found"); - Console.WriteLine("File size is " + fi.Length.ToString("N0") + " bytes (" + (fi.Length * 1.0d / N).ToString("N2") + " bytes/item, " + (fi.Length / (1048576.0 * sw.Elapsed.TotalSeconds)).ToString("N3") + " MB/sec)"); - - Console.Write("Loading..."); - sw.Restart(); - using (var db = await MemoryDatabase.LoadFromAsync(FILE_PATH, this.Cancellation)) - { - sw.Stop(); - Console.WriteLine(" done in " + sw.Elapsed.TotalSeconds.ToString("N1") + " secs (" + (fi.Length / (1048576.0 * sw.Elapsed.TotalSeconds)).ToString("N0") + " MB/sec)"); - db.Debug_Dump(); - - Console.WriteLine("Checking data integrity..."); - sw.Restart(); - long n = 0; - foreach (var batch in data.Buffered(50 * 1000)) - { - using (var tx = db.BeginReadOnlyTransaction(this.Cancellation)) - { - var res = await tx - .Snapshot - .GetRange( - FdbKeySelector.FirstGreaterOrEqual(batch[0].Key), - FdbKeySelector.FirstGreaterThan(batch[batch.Count - 1].Key)) - .ToListAsync() - .ConfigureAwait(false); - - Assert.That(res.Count, Is.EqualTo(batch.Count), "Some keys are missing from {0} to {1} :(", batch[0], batch[batch.Count - 1]); - - for (int i = 0; i < res.Count; i++) - { - // note: Is.EqualTo(...) is slow on Slices so we speed things a bit - if (res[i].Key != batch[i].Key) Assert.That(res[i].Key, Is.EqualTo(batch[i].Key), "Key is different :("); - if (res[i].Value != batch[i].Value) Assert.That(res[i].Value, Is.EqualTo(batch[i].Value), "Value is different for key {0} :(", batch[i].Key); - } - } - n += batch.Count; - Console.Write("\r" + n.ToString("N0")); - } - sw.Stop(); - Console.WriteLine(" done in " + sw.Elapsed.TotalSeconds.ToString("N1") + " secs"); - } - - Console.WriteLine("Content of database are identical ^_^"); - } - - } -} diff --git a/FoundationDB.Storage.Memory.Test/packages.config b/FoundationDB.Storage.Memory.Test/packages.config deleted file mode 100644 index c714ef3a2..000000000 --- a/FoundationDB.Storage.Memory.Test/packages.config +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/FoundationDB.Storage.Memory/API/LevelWriter.cs b/FoundationDB.Storage.Memory/API/LevelWriter.cs deleted file mode 100644 index 6155d3c02..000000000 --- a/FoundationDB.Storage.Memory/API/LevelWriter.cs +++ /dev/null @@ -1,82 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.API -{ - using FoundationDB.Client; - using FoundationDB.Storage.Memory.Core; - using FoundationDB.Storage.Memory.Utils; - using System; - using System.Collections.Generic; - using System.Diagnostics.Contracts; - - /// Helper class to add key/value pairs to a level - /// This class is not thread-safe - internal sealed class LevelWriter : IDisposable - { - - private readonly UnmanagedSliceBuilder m_scratch = new UnmanagedSliceBuilder(128 * 1024); // > 80KB will go to the LOH - private readonly List m_list; - private readonly KeyHeap m_keys; - private readonly ValueHeap m_values; - - public LevelWriter(int count, KeyHeap keyHeap, ValueHeap valueHeap) - { - Contract.Requires(count > 0 && keyHeap != null && valueHeap != null); - m_keys = keyHeap; - m_values = valueHeap; - m_list = new List(count); - } - - public List Data { get { return m_list; } } - - public unsafe void Add(ulong sequence, KeyValuePair current) - { - // allocate the key - var tmp = MemoryDatabaseHandler.PackUserKey(m_scratch, current.Key); - Key* key = m_keys.Append(tmp); - Contract.Assert(key != null, "key == null"); - - // allocate the value - Slice userValue = current.Value; - uint size = checked((uint)userValue.Count); - Value* value = m_values.Allocate(size, sequence, null, key); - Contract.Assert(value != null, "value == null"); - UnmanagedHelpers.CopyUnsafe(&(value->Data), userValue); - - key->Values = value; - - m_list.Add(new IntPtr(key)); - } - - public unsafe void Add(ulong sequence, USlice userKey, USlice userValue) - { - // allocate the key - var tmp = MemoryDatabaseHandler.PackUserKey(m_scratch, userKey); - Key* key = m_keys.Append(tmp); - Contract.Assert(key != null, "key == null"); - - // allocate the value - uint size = userValue.Count; - Value* value = m_values.Allocate(size, sequence, null, key); - Contract.Assert(value != null, "value == null"); - UnmanagedHelpers.CopyUnsafe(&(value->Data), userValue); - - key->Values = value; - - m_list.Add(new IntPtr(key)); - } - - public void Reset() - { - m_list.Clear(); - } - - public void Dispose() - { - m_scratch.Dispose(); - } - } - -} diff --git a/FoundationDB.Storage.Memory/API/MemoryClusterHandler.cs b/FoundationDB.Storage.Memory/API/MemoryClusterHandler.cs deleted file mode 100644 index ff33c9828..000000000 --- a/FoundationDB.Storage.Memory/API/MemoryClusterHandler.cs +++ /dev/null @@ -1,64 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.API -{ - using FoundationDB.Client; - using FoundationDB.Client.Core; - using System; - using System.Threading; - using System.Threading.Tasks; - - internal class MemoryClusterHandler : IFdbClusterHandler, IDisposable - { - - private bool m_disposed; - - public MemoryClusterHandler() - { - //TODO ? - } - - public bool IsInvalid - { - get { return false; } - } - - public bool IsClosed - { - get { return m_disposed; } - } - - public void SetOption(FdbClusterOption option, Slice data) - { - throw new NotImplementedException(); - } - - internal MemoryDatabaseHandler OpenDatabase(Guid uid) - { - return new MemoryDatabaseHandler(uid); - } - - public Task OpenDatabaseAsync(string databaseName, CancellationToken cancellationToken) - { - // fdb currently disallow anthing other than "DB" - if (databaseName != null && databaseName != "DB") throw new FdbException(FdbError.InvalidDatabaseName); - - var uid = Guid.NewGuid(); - return Task.FromResult(OpenDatabase(uid)); - } - - public void Dispose() - { - if (!m_disposed) - { - m_disposed = true; - //TODO - } - - GC.SuppressFinalize(this); - } - } - -} diff --git a/FoundationDB.Storage.Memory/API/MemoryDatabase.cs b/FoundationDB.Storage.Memory/API/MemoryDatabase.cs deleted file mode 100644 index 8f47adcdf..000000000 --- a/FoundationDB.Storage.Memory/API/MemoryDatabase.cs +++ /dev/null @@ -1,136 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.API -{ - using FoundationDB.Client; - using FoundationDB.Layers.Directories; - using FoundationDB.Storage.Memory.Utils; - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Linq; - using System.Threading; - using System.Threading.Tasks; - - /// In-memory database instance - public class MemoryDatabase : FdbDatabase - { - - #region Static Helpers... - - public static MemoryDatabase CreateNew() - { - return CreateNew("DB", FdbSubspace.Empty, false); - } - - public static MemoryDatabase CreateNew(string name) - { - return CreateNew(name, FdbSubspace.Empty, false); - } - - public static MemoryDatabase CreateNew(string name, IFdbSubspace globalSpace, bool readOnly) - { - globalSpace = globalSpace ?? FdbSubspace.Empty; - var uid = Guid.NewGuid(); - - MemoryClusterHandler cluster = null; - MemoryDatabaseHandler db = null; - try - { - cluster = new MemoryClusterHandler(); - db = cluster.OpenDatabase(uid); - - // initialize the system keys for this new db - db.PopulateSystemKeys(); - - return new MemoryDatabase(new FdbCluster(cluster, ":memory:"), db, name, globalSpace, null, readOnly, true); - } - catch - { - if (db != null) db.Dispose(); - if (cluster != null) cluster.Dispose(); - throw; - } - } - - public static async Task LoadFromAsync(string path, CancellationToken cancellationToken) - { - cancellationToken.ThrowIfCancellationRequested(); - - MemoryClusterHandler cluster = null; - MemoryDatabaseHandler db = null; - try - { - cluster = new MemoryClusterHandler(); - db = cluster.OpenDatabase(Guid.Empty); - - // load the snapshot from the disk - var options = new MemorySnapshotOptions(); //TODO! - await db.LoadSnapshotAsync(path, options, cancellationToken); - - return new MemoryDatabase(new FdbCluster(cluster, ":memory:"), db, "DB", FdbSubspace.Empty, null, false, true); - } - catch(Exception) - { - if (db != null) db.Dispose(); - if (cluster != null) cluster.Dispose(); - throw; - } - } - - #endregion - - private readonly MemoryDatabaseHandler m_handler; - - private MemoryDatabase(IFdbCluster cluster, MemoryDatabaseHandler handler, string name, IFdbSubspace globalSpace, IFdbDirectory directory, bool readOnly, bool ownsCluster) - : base(cluster, handler, name, globalSpace, directory, readOnly, ownsCluster) - { - m_handler = handler; - } - - [Conditional("DEBUG")] - public void Debug_Dump(bool detailed = false) - { - m_handler.Debug_Dump(detailed); - } - - /// Trigger a garbage collection of the memory database - /// If the amount of memory that can be collected is too small, this operation will do nothing. - public void Collect() - { - m_handler.Collect(); - } - - /// Replace the content of the database with existing data. - /// Data that will replace the content of the database. The elements do not need to be sorted, but best performance is achieved if all the keys are lexicographically ordered (smallest to largest) - /// Optionnal cancellation token - /// Task that completes then the data has been loaded into the database - /// Any pre-existing data will be removed! - public Task BulkLoadAsync(IEnumerable> data, bool ordered = false, CancellationToken cancellationToken = default(CancellationToken)) - { - if (data == null) throw new ArgumentNullException("data"); - if (cancellationToken.IsCancellationRequested) return TaskHelpers.FromCancellation(cancellationToken); - - var coll = data as ICollection> ?? data.ToList(); - - return m_handler.BulkLoadAsync(coll, ordered, false, cancellationToken); - } - - public Task SaveSnapshotAsync(string path, MemorySnapshotOptions options = null, CancellationToken cancellationToken = default(CancellationToken)) - { - if (path == null) throw new ArgumentNullException("path"); - if (cancellationToken.IsCancellationRequested) return TaskHelpers.FromCancellation(cancellationToken); - - options = options ?? new MemorySnapshotOptions() - { - Mode = MemorySnapshotMode.Full - }; - - return m_handler.SaveSnapshotAsync(path, options, cancellationToken); - } - - } - -} diff --git a/FoundationDB.Storage.Memory/API/MemoryDatabaseHandler.cs b/FoundationDB.Storage.Memory/API/MemoryDatabaseHandler.cs deleted file mode 100644 index 5daa29236..000000000 --- a/FoundationDB.Storage.Memory/API/MemoryDatabaseHandler.cs +++ /dev/null @@ -1,1720 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -#undef FULLDEBUG - -namespace FoundationDB.Storage.Memory.API -{ - using FoundationDB.Client; - using FoundationDB.Client.Core; - using FoundationDB.Client.Utils; - using FoundationDB.Layers.Tuples; - using FoundationDB.Storage.Memory.Core; - using FoundationDB.Storage.Memory.IO; - using FoundationDB.Storage.Memory.Utils; - using System; - using System.Collections.Concurrent; - using System.Collections.Generic; - using System.Diagnostics; - using System.Diagnostics.Contracts; - using System.Text; - using System.Threading; - using System.Threading.Tasks; - - internal class MemoryDatabaseHandler : IFdbDatabaseHandler, IDisposable - { - internal const uint MAX_KEY_SIZE = 10 * 1000; - internal const uint MAX_VALUE_SIZE = 100 * 1000; - - internal const uint KEYHEAP_MIN_PAGESIZE = 64 * 1024; - internal const uint KEYHEAP_MAX_PAGESIZE = 4 * 1024 * 1024; - internal const uint VALUEHEAP_MIN_PAGESIZE = 256 * 1024; - internal const uint VALUEHEAP_MAX_PAGESIZE = 16 * 1024 * 1024; - - internal void PopulateSystemKeys() - { - // we need to create the System keyspace, under \xFF - - // cheap way to generate machine & datacenter ids - var databaseId = new Uuid128(m_uid).ToSlice(); - var machineId = Slice.FromFixed64(Environment.MachineName.GetHashCode()) + databaseId[0, 8]; - var datacenterId = Slice.FromFixed64(Environment.MachineName.GetHashCode()) + databaseId[8, 16]; - var keyServerBlob = Slice.FromFixed16(1) + Slice.FromFixed32(0xA22000) + Slice.FromFixed16(0xFDB) + Slice.FromFixed32(1) + databaseId + Slice.FromFixed32(0); - var one = Slice.FromAscii("1"); - - var systemKeys = new Dictionary() - { - { Fdb.System.BackupDataFormat, one }, - { Fdb.System.ConfigKey("initialized"), one }, - { Fdb.System.ConfigKey("storage_engine"), one }, // ~= memory - { Fdb.System.ConfigKey("storage_replicas"), one }, // single replica - { Fdb.System.Coordinators, Slice.FromString("local:" + m_uid.ToString("N") + "@memory") }, - { Fdb.System.GlobalsKey("lastEpochEnd"), Slice.FromFixed64(0) }, - { Fdb.System.InitId, Slice.FromAscii(Guid.NewGuid().ToString("N")) }, - - { Fdb.System.KeyServers, keyServerBlob }, - { Fdb.System.KeyServers + Fdb.System.KeyServers, keyServerBlob }, - { Fdb.System.KeyServers + Fdb.System.MaxValue, Slice.Empty }, - - { Fdb.System.ServerKeys + databaseId + Slice.FromAscii("/"), one }, - { Fdb.System.ServerKeys + databaseId + Slice.FromAscii("/\xFF\xFF"), Slice.Empty }, - - //TODO: serverList ? - - { Fdb.System.WorkersKey("memory", "datacenter"), datacenterId }, - { Fdb.System.WorkersKey("memory", "machine"), machineId }, - { Fdb.System.WorkersKey("memory", "mclass"), Slice.FromAscii("unset") }, - }; - - BulkLoadAsync(systemKeys, false, false, CancellationToken.None).GetAwaiter().GetResult(); - } - - #region Private Members... - - /// Set to true when the current db instance gets disposed. - private volatile bool m_disposed; - - /// Current version of the database - private long m_currentVersion; - /// Oldest legal read version of the database - private long m_oldestVersion; - - /// Unique number for this database - private Guid m_uid; - - //TODO: replace this with an Async lock ? - private readonly ReaderWriterLockSlim m_dataLock = new ReaderWriterLockSlim(); - private readonly object m_heapLock = new object(); - - private readonly KeyHeap m_keys = new KeyHeap(); - private readonly ValueHeap m_values = new ValueHeap(); - - private ColaStore m_data = new ColaStore(0, new NativeKeyComparer()); - private long m_estimatedSize; - - /// List of all active transaction windows - private LinkedList m_transactionWindows = new LinkedList(); - /// Last transaction window - private TransactionWindow m_currentWindow; - - // note: all scratch buffers should have a size larger than 80KB, so that they to the LOH - /// Pool of builders uses by read operations from transactions (concurrent) - private UnmanagedSliceBuilderPool m_scratchPool = new UnmanagedSliceBuilderPool(128 * 1024, 64); - /// Scratch use to format keys when committing (single writer) - private UnmanagedSliceBuilder m_scratchKey = new UnmanagedSliceBuilder(128 * 1024); - /// Scratch use to hold values when committing (single writer) - private UnmanagedSliceBuilder m_scratchValue = new UnmanagedSliceBuilder(128 * 1024); - - #endregion - - public MemoryDatabaseHandler(Guid uid) - { - m_uid = uid; - } - - public Guid Id { get { return m_uid; } } - - public bool IsInvalid { get { return false; } } - - public bool IsClosed { get { return m_disposed; } } - - public void SetOption(FdbDatabaseOption option, Slice data) - { - throw new NotImplementedException(); - } - - internal long GetCurrentVersion() - { - m_dataLock.EnterReadLock(); - try - { - return Volatile.Read(ref m_currentVersion); - } - finally - { - m_dataLock.ExitReadLock(); - } - } - - /// Format a user key using a slice buffer for temporary storage - /// The buffer is cleared prior to usage! - internal unsafe static USlice PackUserKey(UnmanagedSliceBuilder buffer, Slice userKey) - { - Contract.Requires(buffer != null && userKey.Array != null && userKey.Count >= 0 && userKey.Offset >= 0); - Contract.Requires(userKey.Count <= MemoryDatabaseHandler.MAX_KEY_SIZE); - - buffer.Clear(); - uint keySize = (uint)userKey.Count; - uint size = Key.SizeOf + keySize; - var tmp = buffer.Allocate(size); - var key = (Key*)tmp.Data; - key->Size = (ushort)keySize; - key->HashCode = UnmanagedHelpers.ComputeHashCode(ref userKey); - key->Header = ((ushort)EntryType.Key) << Entry.TYPE_SHIFT; - key->Values = null; - - if (keySize > 0) UnmanagedHelpers.CopyUnsafe(&(key->Data), userKey); - return tmp; - } - - /// Format a user key - internal unsafe static USlice PackUserKey(UnmanagedSliceBuilder buffer, USlice userKey) - { - Contract.Requires(buffer != null && userKey.Data != null); - Contract.Requires(userKey.Count <= MemoryDatabaseHandler.MAX_KEY_SIZE); - - buffer.Clear(); - uint keySize = userKey.Count; - var size = Key.SizeOf + keySize; - var tmp = buffer.Allocate(size); - var key = (Key*)tmp.Data; - key->Size = (ushort)keySize; - key->HashCode = UnmanagedHelpers.ComputeHashCode(ref userKey); - key->Header = ((ushort)EntryType.Key) << Entry.TYPE_SHIFT; - key->Values = null; - - if (keySize > 0) UnmanagedHelpers.CopyUnsafe(&(key->Data), userKey); - return tmp; - } - - private TimeSpan m_transactionHalfLife = TimeSpan.FromSeconds(2.5); - private TimeSpan m_windowMaxDuration = TimeSpan.FromSeconds(5); - private int m_windowMaxWrites = 1000; - - private TransactionWindow GetActiveTransactionWindow_NeedsLocking(ulong sequence) - { - var window = m_currentWindow; - var now = DateTime.UtcNow; - - // open a new window if the previous one is already closed, or is too old - if (window != null) - { // is it still active ? - if (window.Closed || now.Subtract(window.StartedUtc) >= m_transactionHalfLife || window.CommitCount >= m_windowMaxWrites) - { - Log("Recycling previous window " + window); - window = null; - } - } - - if (window == null) - { // need to start a new window - window = new TransactionWindow(now, sequence); - m_currentWindow = window; - m_transactionWindows.AddFirst(window); - } - - // check the oldest transaction window - PurgeOldTransactionWindows(now); - - return window; - } - - private void PurgeOldTransactionWindows(DateTime utcNow) - { - var stop = m_currentWindow; - var node = m_transactionWindows.Last; - TransactionWindow window; - - while ((node != null && (window = node.Value) != null && window != stop)) - { - if (!window.Closed && utcNow.Subtract(window.StartedUtc) <= m_windowMaxDuration) - { - break; - } - Log("Purging old transaction window " + window.ToString()); - - window.Close(); - var tmp = node.Previous; - m_transactionWindows.RemoveLast(); - node = tmp; - } - } - - /// Commits the changes made by a transaction to the database. - /// - /// - /// - /// - /// - /// - /// - /// This method is not thread safe and must be called from the writer thread. - internal unsafe long CommitTransaction(MemoryTransactionHandler trans, long readVersion, ColaRangeSet readConflicts, ColaRangeSet writeConflicts, ColaRangeSet clearRanges, ColaOrderedDictionary writes) - { - if (trans == null) throw new ArgumentNullException("trans"); - if (m_disposed) ThrowDisposed(); - - // version at which the transaction was created (and all reads performed) - ulong readSequence = (ulong)readVersion; - // commit version created by this transaction (if it writes something) - ulong committedSequence = 0; - - Log("Comitting transaction created at readVersion " + readVersion + " ..."); - - bool hasReadConflictRanges = readConflicts != null && readConflicts.Count > 0; - bool hasWriteConflictRanges = writeConflicts != null && writeConflicts.Count > 0; - bool hasClears = clearRanges != null && clearRanges.Count > 0; - bool hasWrites = writes != null && writes.Count > 0; - - bool isReadOnlyTransaction = !hasClears && !hasWrites && !hasWriteConflictRanges; - - m_dataLock.EnterUpgradeableReadLock(); - try - { - TransactionWindow window; - - if (!isReadOnlyTransaction) - { - committedSequence = (ulong)Interlocked.Increment(ref m_currentVersion); - window = GetActiveTransactionWindow_NeedsLocking(committedSequence); - Contract.Assert(window != null); - Log("... will create version " + committedSequence + " in window " + window.ToString()); - } - else - { - Log("... which is read-only"); - window = null; - } - - #region Read Conflict Check - - if (hasReadConflictRanges) - { - - var current = m_transactionWindows.First; - while (current != null && current.Value.LastVersion >= readSequence) - { - if (current.Value.Conflicts(readConflicts, readSequence)) - { - // the transaction has conflicting reads - throw new FdbException(FdbError.NotCommitted); - } - current = current.Next; - } - } - - #endregion - - if (!isReadOnlyTransaction) - { - #region Clear Ranges... - - if (hasClears) - { - foreach (var clear in clearRanges) - { - //TODO! - throw new NotImplementedException("ClearRange not yet implemented. Sorry!"); - } - } - - #endregion - - #region Writes... - - if (hasWrites) - { - IntPtr singleInsert = IntPtr.Zero; - List pendingInserts = null; - - foreach (var write in writes) - { - Key* key; - Value* value; - - // apply all the transformations at once on the key, add a new version if required - - // Only two allowed cases: - // - a single SET operation that create or update the value - // - one or more ATOMIC operations that create or mutate the value - - // For both case, we will do a lookup in the db to get the previous value and location - - // create the lookup key - USlice lookupKey = PackUserKey(m_scratchKey, write.Key); - - IntPtr previous; - int offset, level = m_data.Find(lookupKey.GetPointer(), out offset, out previous); - key = level >= 0 ? (Key*)previous : null; - Contract.Assert((level < 0 && key == null) || (level >= 0 && offset >= 0 && key != null)); - - bool valueMutated = false; - bool currentIsDeleted = false; - bool hasTmpData = false; - - foreach (var op in write.Value) - { - if (op.Type == MemoryTransactionHandler.Operation.Nop) continue; - - if (op.Type == MemoryTransactionHandler.Operation.Set) - { - m_scratchValue.Set(op.Value); - hasTmpData = true; - valueMutated = true; - continue; - } - - // apply the atomic operation to the previous value - if (!hasTmpData) - { - m_scratchValue.Clear(); - if (key != null) - { // grab the current value of this key - - Value* p = key->Values; - if ((p->Header & Value.FLAGS_DELETION) == 0) - { - m_scratchValue.Append(&(p->Data), p->Size); - } - else - { - m_scratchValue.Clear(); - currentIsDeleted = true; - } - } - hasTmpData = true; - } - - switch (op.Type) - { - case MemoryTransactionHandler.Operation.AtomicAdd: - { - op.ApplyAddTo(m_scratchValue); - valueMutated = true; - break; - } - case MemoryTransactionHandler.Operation.AtomicBitAnd: - { - op.ApplyBitAndTo(m_scratchValue); - valueMutated = true; - break; - } - case MemoryTransactionHandler.Operation.AtomicBitOr: - { - op.ApplyBitOrTo(m_scratchValue); - valueMutated = true; - break; - } - case MemoryTransactionHandler.Operation.AtomicBitXor: - { - op.ApplyBitXorTo(m_scratchValue); - valueMutated = true; - break; - } - default: - { - throw new InvalidOperationException(); - } - } - } - - if (valueMutated) - { // we have a new version for this key - - lock (m_heapLock) - { - value = m_values.Allocate(m_scratchValue.Count, committedSequence, key != null ? key->Values : null, null); - } - Contract.Assert(value != null); - m_scratchValue.CopyTo(&(value->Data)); - Interlocked.Add(ref m_estimatedSize, value->Size); - - if (key != null) - { // mutate the previous version for this key - var prev = key->Values; - value->Parent = key; - key->Values = value; - prev->Header |= Value.FLAGS_MUTATED; - prev->Parent = value; - - // make sure no thread seees an inconsitent view of the key - Interlocked.MemoryBarrier(); - } - else - { // add this key to the data store - - // we can reuse the lookup key (which is only missing the correct flags and pointers to the values) - lock (m_heapLock) - { - key = m_keys.Append(lookupKey); - } - key->Values = value; - value->Parent = key; - Contract.Assert(key->Size == write.Key.Count); - Interlocked.Add(ref m_estimatedSize, key->Size); - - // make sure no thread seees an inconsitent view of the key - Interlocked.MemoryBarrier(); - - if (pendingInserts != null) - { - pendingInserts.Add(new IntPtr(key)); - } - else if (singleInsert != IntPtr.Zero) - { - pendingInserts = new List(); - pendingInserts.Add(singleInsert); - pendingInserts.Add(new IntPtr(key)); - singleInsert = IntPtr.Zero; - } - else - { - singleInsert = new IntPtr(key); - } - } - - } - } - - if (singleInsert != IntPtr.Zero || pendingInserts != null) - { - // insert the new key into the data store - m_dataLock.EnterWriteLock(); - try - { - if (singleInsert != IntPtr.Zero) - { - m_data.Insert(singleInsert); - } - else - { - m_data.InsertItems(pendingInserts, ordered: true); - } - } - finally - { - m_dataLock.ExitWriteLock(); - } - } - } - - #endregion - - #region Merge Write Conflicts... - - if (hasWriteConflictRanges) - { - window.MergeWrites(writeConflicts, committedSequence); - } - - #endregion - } - } - finally - { - m_dataLock.ExitUpgradeableReadLock(); - } - - var version = isReadOnlyTransaction ? -1L : (long)committedSequence; - - return version; - } - - internal unsafe Task BulkLoadAsync(ICollection> data, bool ordered, bool append, CancellationToken cancellationToken) - { - Contract.Requires(data != null); - - int count = data.Count; - - // Since we can "only" create a maximum of 28 levels, there is a maximum limit or 2^28 - 1 items that can be loaded in the database (about 268 millions) - if (count >= 1 << 28) throw new InvalidOperationException("Data set is too large. Cannot insert more than 2^28 - 1 items in the memory database"); - - // clear everything, and import the specified data - - m_dataLock.EnterWriteLock(); - try - { - - // the fastest way to insert data, is to insert vectors that are a power of 2 - int min = ColaStore.LowestBit(count); - int max = ColaStore.HighestBit(count); - Contract.Assert(min <= max && max <= 28); - if (append) - { // the appended layers have to be currently free - for (int level = min; level <= max; level++) - { - if (!m_data.IsFree(level)) throw new InvalidOperationException(String.Format("Cannot bulk load level {0} because it is already in use", level)); - } - } - else - { // start from scratch - m_data.Clear(); - m_estimatedSize = 0; - //TODO: clear the key and value heaps ! - //TODO: clear the transaction windows ! - //TODO: kill all pending transactions ! - } - - m_data.EnsureCapacity(count); - - ulong sequence = (ulong)Interlocked.Increment(ref m_currentVersion); - - using (var iter = data.GetEnumerator()) - using (var writer = new LevelWriter(1 << max, m_keys, m_values)) - { - for (int level = max; level >= min && !cancellationToken.IsCancellationRequested; level--) - { - if (ColaStore.IsFree(level, count)) continue; - - //TODO: consider pre-sorting the items before inserting them in the heap using m_comparer (maybe faster than doing the same with the key comparer?) - - // take of batch of values - writer.Reset(); - int batch = 1 << level; - while(batch-- > 0) - { - if (!iter.MoveNext()) - { - throw new InvalidOperationException("Iterator stopped before reaching the expected number of items"); - } - writer.Add(sequence, iter.Current); - } - - // and insert it (should fit nicely in a level without cascading) - m_data.InsertItems(writer.Data, ordered); - } - } - } - finally - { - m_dataLock.ExitWriteLock(); - } - - if (cancellationToken.IsCancellationRequested) return TaskHelpers.FromCancellation(cancellationToken); - return TaskHelpers.CompletedTask; - } - - private static readonly Task NilResult = Task.FromResult(Slice.Nil); - private static readonly Task EmptyResult = Task.FromResult(Slice.Empty); - private static readonly Task MaxResult = Task.FromResult(Slice.FromByte(255)); - - private void EnsureReadVersionNotInTheFuture_NeedsLocking(ulong readVersion) - { - if ((ulong)Volatile.Read(ref m_currentVersion) < readVersion) - { // a read for a future version? This is most probably a bug ! -#if DEBUG - if (Debugger.IsAttached) Debugger.Break(); -#endif - throw new FdbException(FdbError.FutureVersion); - } - } - - [Conditional("FULLDEBUG")] - private unsafe static void DumpKey(string label, IntPtr userKey) - { - var sb = new StringBuilder("(*) " + (label ?? "key") + " = "); - if (userKey == IntPtr.Zero) - { - sb.Append(""); - } - else - { - sb.Append(userKey).Append(" => "); - - Key* key = (Key*)userKey; - Contract.Assert(key != null); - - sb.Append('\'').Append(FdbKey.Dump(Key.GetData(key).ToSlice())).Append('\''); - - Value* value = key->Values; - if (value != null) - { - sb.Append(" => [").Append(value->Sequence).Append("] "); - if ((value->Header & Value.FLAGS_DELETION) != 0) - { - sb.Append("DELETED"); - } - else if (value->Size == 0) - { - sb.Append(""); - } - else - { - sb.Append(Value.GetData(value).ToSlice().ToAsciiOrHexaString()); - } - } - } - Trace.WriteLine(sb.ToString()); - } - - private unsafe bool TryGetValueAtVersion(USlice lookupKey, ulong sequence, out USlice result) - { - result = default(USlice); - - IntPtr existing; - int _, level = m_data.Find(lookupKey.GetPointer(), out _, out existing); - if (level < 0) - { - return false; - } - - Key* key = (Key*)existing; - //TODO: aserts! - - // walk the chain of version until we find one that existed at the request version - Value* current = key->Values; - while (current != null) - { - if (current->Sequence <= sequence) - { // found it - break; - } - current = current->Previous; - } - - if (current == null || (current->Header & Value.FLAGS_DELETION) != 0) - { // this key was created after our read version, or this version is a deletion marker - return false; - } - - if (current->Size > 0) - { // the value is not empty - result = Value.GetData(current); - } - return true; - - } - - /// Read the value of one or more keys, at a specific database version - /// List of keys to read (MUST be ordered) - /// Version of the read - /// Array of results - internal unsafe Slice[] GetValuesAtVersion(Slice[] userKeys, long readVersion) - { - if (m_disposed) ThrowDisposed(); - if (userKeys == null) throw new ArgumentNullException("userKeys"); - - var results = new Slice[userKeys.Length]; - - if (userKeys.Length > 0) - { - m_dataLock.EnterReadLock(); - try - { - ulong sequence = (ulong)readVersion; - EnsureReadVersionNotInTheFuture_NeedsLocking(sequence); - - var buffer = new SliceBuffer(); - - using (var scratch = m_scratchPool.Use()) - { - var builder = scratch.Builder; - - for (int i = 0; i < userKeys.Length; i++) - { - // create a lookup key - var lookupKey = PackUserKey(builder, userKeys[i]); - - USlice value; - if (!TryGetValueAtVersion(lookupKey, sequence, out value)) - { // this key does not exist, or was deleted at that time - results[i] = default(Slice); - } - else if (value.Count == 0) - { // the value is the empty slice - results[i] = Slice.Empty; - } - else - { // move this value to the slice buffer - var data = buffer.Allocate(checked((int)value.Count)); - Contract.Assert(data.Array != null && data.Offset >= 0 && data.Count == (int)value.Count); - UnmanagedHelpers.CopyUnsafe(data, value.Data, value.Count); - results[i] = data; - } - } - } - } - finally - { - m_dataLock.ExitReadLock(); - } - } - return results; - } - - /// Walk the value chain, to return the value of a key that was the latest at a specific read version - /// User key to resolve - /// Sequence number - /// Value of the key at that time, or null if the key was either deleted or not yet created. - internal static unsafe Value* ResolveValueAtVersion(IntPtr userKey, ulong sequence) - { - if (userKey == IntPtr.Zero) return null; - - Key* key = (Key*)userKey; - Contract.Assert((key->Header & Entry.FLAGS_DISPOSED) == 0, "Attempted to read value from a disposed key"); - Contract.Assert(key->Size <= MemoryDatabaseHandler.MAX_KEY_SIZE, "Attempted to read value from a key that is too large"); - - Value* current = key->Values; - while(current != null && current->Sequence > sequence) - { - current = current->Previous; - } - - if (current == null || (current->Header & Value.FLAGS_DELETION) != 0) - { - return null; - } - - Contract.Ensures((current->Header & Entry.FLAGS_DISPOSED) == 0 && current->Sequence <= sequence); - return current; - } - - private unsafe ColaStore.Iterator ResolveCursor(USlice lookupKey, bool orEqual, int offset, ulong sequence) - { - var iterator = m_data.GetIterator(); - - DumpKey(orEqual ? "seek(<=)" : "seek(<)", lookupKey.GetPointer()); - - // seek to the closest key - if (!iterator.Seek(lookupKey.GetPointer(), orEqual)) - { // we are before the first key in the database! - if (offset <= 0) - { - iterator.SeekBeforeFirst(); - return iterator; - } - else - { - iterator.SeekFirst(); - --offset; - } - } - - bool forward = offset >= 0; - - while (iterator.Current != IntPtr.Zero) - { - DumpKey("offset " + offset, iterator.Current); - Value* value = ResolveValueAtVersion(iterator.Current, sequence); - //Trace.WriteLine("[*] " + (long)value); - if (value != null) - { - if (offset == 0) - { // we found a key that was alive, and at the correct offset - break; - } - if (forward) - { - --offset; - } - else - { - ++offset; - } - } - - if (forward) - { // move forward - - //Trace.WriteLine("> next!"); - if (!iterator.Next()) - { - //Trace.WriteLine(" > EOF"); - break; - } - } - else - { // move backward - //Trace.WriteLine("> prev!"); - if (!iterator.Previous()) - { - //Trace.WriteLine(" > EOF"); - break; - } - } - } - - return iterator; - } - - internal unsafe Task GetKeysAtVersion(FdbKeySelector[] selectors, long readVersion) - { - if (m_disposed) ThrowDisposed(); - if (selectors == null) throw new ArgumentNullException("selectors"); - - var results = new Slice[selectors.Length]; - - m_dataLock.EnterReadLock(); - try - { - ulong sequence = (ulong)readVersion; - EnsureReadVersionNotInTheFuture_NeedsLocking(sequence); - - // TODO: convert all selectors to a FirstGreaterThan ? - var buffer = new SliceBuffer(); - - using (var scratch = m_scratchPool.Use()) - { - var builder = scratch.Builder; - - for (int i = 0; i < selectors.Length; i++) - { - var selector = selectors[i]; - - var lookupKey = PackUserKey(builder, selector.Key); - - var iterator = ResolveCursor(lookupKey, selector.OrEqual, selector.Offset, sequence); - Contract.Assert(iterator != null); - - if (iterator.Current == IntPtr.Zero) - { - //Trace.WriteLine("> NOTHING :("); - results[i] = default(Slice); - continue; - } - - // we want the key! - Key* key = (Key*)iterator.Current; - Contract.Assert(key != null && key->Size <= MemoryDatabaseHandler.MAX_KEY_SIZE); - - var data = buffer.Allocate(checked((int)key->Size)); - Contract.Assert(data.Array != null && data.Offset >= 0 && data.Count == (int)key->Size); - UnmanagedHelpers.CopyUnsafe(data, &(key->Data), key->Size); - results[i] = data; - } - } - } - finally - { - m_dataLock.ExitReadLock(); - } - - return Task.FromResult(results); - } - - private static unsafe KeyValuePair CopyResultToManagedMemory(SliceBuffer buffer, Key* key, Value* value) - { - Contract.Requires(buffer != null && key != null && value != null); - - var keyData = buffer.Allocate(checked((int)key->Size)); - UnmanagedHelpers.CopyUnsafe(keyData, &(key->Data), key->Size); - - var valueData = buffer.Allocate(checked((int)value->Size)); - UnmanagedHelpers.CopyUnsafe(valueData, &(value->Data), value->Size); - - return new KeyValuePair(keyData, valueData); - } - - /// Range iterator that will return the keys and values at a specific sequence - internal sealed unsafe class RangeIterator : IDisposable - { - private readonly MemoryDatabaseHandler m_handler; - private readonly ulong m_sequence; - private readonly ColaStore.Iterator m_iterator; - private readonly IntPtr m_stopKey; - private readonly IComparer m_comparer; - private readonly long m_limit; - private readonly long m_targetBytes; - private readonly bool m_reverse; - private bool m_done; - private long m_readKeys; - private long m_readBytes; - private Key* m_currentKey; - private Value* m_currentValue; - private bool m_disposed; - - internal RangeIterator(MemoryDatabaseHandler handler, ulong sequence, ColaStore.Iterator iterator, IntPtr stopKey, IComparer comparer, bool reverse) - { - Contract.Requires(handler != null && iterator != null && comparer != null); - m_handler = handler; - m_sequence = sequence; - m_iterator = iterator; - m_stopKey = stopKey; - m_comparer = comparer; - m_reverse = reverse; - } - - public long Sequence { get { return (long)m_sequence; } } - - public long Count { get { return m_readKeys; } } - - public long Bytes { get { return m_readBytes; } } - - public long TargetBytes { get { return m_targetBytes; } } - - public bool Reverse { get { return m_reverse; } } - - public Key* Key { get { return m_currentKey; } } - - public Value* Value { get { return m_currentValue; } } - - public bool Done { get { return m_done; } } - - public bool MoveNext() - { - if (m_done || m_disposed) return false; - - bool gotOne = false; - - while (!gotOne) - { - var current = m_iterator.Current; - DumpKey("current", current); - - Value* value = MemoryDatabaseHandler.ResolveValueAtVersion(current, m_sequence); - if (value != null) - { - if (m_stopKey != IntPtr.Zero) - { - int c = m_comparer.Compare(current, m_stopKey); - if (m_reverse ? (c < 0 /* BEGIN KEY IS INCLUDED! */) : (c >= 0 /* END KEY IS EXCLUDED! */)) - { // we reached the end, stop there ! - DumpKey("stopped at ", current); - MarkAsDone(); - break; - } - } - Key* key = (Key*)current; - ++m_readKeys; - m_readBytes += checked(key->Size + value->Size); - m_currentKey = key; - m_currentValue = value; - gotOne = true; - } - - // prepare for the next value - if (!(m_reverse ? m_iterator.Previous() : m_iterator.Next())) - { - // out of data to read ? - MarkAsDone(); - break; - } - } - - if (gotOne) - { // we have found a value - return true; - } - - m_currentKey = null; - m_currentValue = null; - return false; - } - - private void MarkAsDone() - { - m_done = true; - } - - public void Dispose() - { - if (!m_disposed) - { - m_disposed = true; - m_currentKey = null; - m_currentValue = null; - //TODO: release any locks taken - } - } - } - - internal unsafe Task GetRangeAtVersion(FdbKeySelector begin, FdbKeySelector end, int limit, int targetBytes, FdbStreamingMode mode, int iteration, bool reverse, long readVersion) - { - if (m_disposed) ThrowDisposed(); - - //HACKHACK - var results = new List>(limit); - - if (limit == 0) limit = 10000; - if (targetBytes == 0) targetBytes = int.MaxValue; - - //bool done = false; - - m_dataLock.EnterReadLock(); - try - { - ulong sequence = (ulong)readVersion; - EnsureReadVersionNotInTheFuture_NeedsLocking(sequence); - - // TODO: convert all selectors to a FirstGreaterThan ? - var buffer = new SliceBuffer(); - - ColaStore.Iterator iterator; - IntPtr stopKey; - - if (!reverse) - { // forward range read: we read from beginKey, and stop once we reach a key >= endKey - - using (var scratch = m_scratchPool.Use()) - { - // first resolve the end to get the stop point - iterator = ResolveCursor(PackUserKey(scratch.Builder, end.Key), end.OrEqual, end.Offset, sequence); - stopKey = iterator.Current; // note: can be ZERO ! - - // now, set the cursor to the begin of the range - iterator = ResolveCursor(PackUserKey(scratch.Builder, begin.Key), begin.OrEqual, begin.Offset, sequence); - if (iterator.Current == IntPtr.Zero) iterator.SeekFirst(); - } - -#if REFACTORED - while (limit > 0 && targetBytes > 0) - { - DumpKey("current", iterator.Current); - - Value* value = ResolveValueAtVersion(iterator.Current, sequence); - if (value != null) - { - if (stopKey != IntPtr.Zero && m_data.Comparer.Compare(iterator.Current, stopKey) >= 0) /* END KEY IS EXCLUDED! */ - { // we reached the end, stop there ! - done = true; - break; - } - - var item = CopyResultToManagedMemory(buffer, (Key*)iterator.Current.ToPointer(), value); - results.Add(item); - --limit; - targetBytes -= item.Key.Count + item.Value.Count; - if (targetBytes < 0) targetBytes = 0; - } - - if (!iterator.Next()) - { // out of data to read ? - done = true; - break; - } - } -#endif - } - else - { // reverse range read: we start from the key before endKey, and stop once we read a key < beginKey - - using (var scratch = m_scratchPool.Use()) - { - // first resolve the begin to get the stop point - iterator = ResolveCursor(PackUserKey(scratch.Builder, begin.Key), begin.OrEqual, begin.Offset, sequence); - DumpKey("resolved(" + begin + ")", iterator.Current); - if (iterator.Current == IntPtr.Zero) iterator.SeekFirst(); - stopKey = iterator.Current; // note: can be ZERO ! - - DumpKey("stopKey", stopKey); - - // now, set the cursor to the end of the range - iterator = ResolveCursor(PackUserKey(scratch.Builder, end.Key), end.OrEqual, end.Offset, sequence); - DumpKey("resolved(" + end + ")", iterator.Current); - if (iterator.Current == IntPtr.Zero) - { - iterator.SeekLast(); - DumpKey("endKey", iterator.Current); - } - else - { - // note: since the end is NOT included in the result, we need to already move the cursor once - iterator.Previous(); - } - } - -#if REFACTORED - while (limit > 0 && targetBytes > 0) - { - DumpKey("current", iterator.Current); - - Value* value = ResolveValueAtVersion(iterator.Current, sequence); - if (value != null) - { - if (stopKey != IntPtr.Zero && m_data.Comparer.Compare(iterator.Current, stopKey) < 0) /* BEGIN KEY IS INCLUDED! */ - { // we reached past the beginning, stop there ! - DumpKey("stopped at ", iterator.Current); - done = true; - break; - } - - var item = CopyResultToManagedMemory(buffer, (Key*)iterator.Current.ToPointer(), value); - results.Add(item); - --limit; - targetBytes -= item.Key.Count + item.Value.Count; - if (targetBytes < 0) targetBytes = 0; - } - - if (!iterator.Previous()) - { // out of data to read ? - done = true; - break; - } - } -#endif - } - - // run the iterator until we reach the end of the range, the end of the database, or any count or size limit - using (var rangeIterator = new RangeIterator(this, sequence, iterator, stopKey, m_data.Comparer, reverse)) - { - while (rangeIterator.MoveNext()) - { - var item = CopyResultToManagedMemory(buffer, rangeIterator.Key, rangeIterator.Value); - results.Add(item); - - if (limit > 0 && rangeIterator.Count >= limit) break; - if (targetBytes > 0 && rangeIterator.Bytes >= targetBytes) break; - } - - bool hasMore = !rangeIterator.Done; - - var chunk = new FdbRangeChunk( - hasMore, - results.ToArray(), - iteration, - reverse - ); - return Task.FromResult(chunk); - } - } - finally - { - m_dataLock.ExitReadLock(); - } - } - - public IFdbTransactionHandler CreateTransaction(FdbOperationContext context) - { - if (m_disposed) ThrowDisposed(); - Contract.Assert(context != null); - - MemoryTransactionHandler transaction = null; - try - { - transaction = new MemoryTransactionHandler(this); - //m_pendingTransactions.Add(transaction); - return transaction; - } - catch(Exception) - { - if (transaction != null) - { - transaction.Dispose(); - //m_pendingTransactions.Remove(transaction); - } - throw; - } - } - - /// Return the read version of the oldest pending transaction - /// Sequence number of the oldest active transaction, or the current read version if there are no pending transactions - private ulong GetOldestReadVersion() - { - //HACKHACK: TODO! - return (ulong)Volatile.Read(ref m_currentVersion); - } - - #region Loading & Saving... - - internal async Task SaveSnapshotAsync(string path, MemorySnapshotOptions options, CancellationToken cancellationToken) - { - Contract.Requires(path != null && options != null); - - if (string.IsNullOrWhiteSpace(path)) throw new ArgumentNullException("path"); - cancellationToken.ThrowIfCancellationRequested(); - - // while we are generating the snapshot on the disk: - // * readers can read without any problems - // * writers can mutate values of existing keys, but cannot INSERT new keys - - var attributes = new Dictionary(StringComparer.Ordinal); - - // Flags bits: - // 0-3: FileType (4 bits) - // 0: Versionned Snapshot - // 1: Compact Snapshot - // 2-15: reserved - - SnapshotFormat.Flags headerFlags = SnapshotFormat.Flags.None; - switch (options.Mode) - { - case MemorySnapshotMode.Full: - case MemorySnapshotMode.Last: - { - headerFlags |= SnapshotFormat.Flags.TYPE_SNAPSHOT_VERSIONNED; - break; - } - case MemorySnapshotMode.Compact: - { - headerFlags |= SnapshotFormat.Flags.TYPE_SNAPSHOT_COMPACT; - break; - } - default: - { - throw new InvalidOperationException("Invalid snapshot mode"); - } - } - - attributes["version"] = FdbTuple.Create(1, 0); - attributes["host"] = FdbTuple.Create(Environment.MachineName); - attributes["timestamp"] = FdbTuple.Create(DateTimeOffset.Now.ToString("O")); - - if (options.Compressed) - { // file is compressed - - headerFlags |= SnapshotFormat.Flags.COMPRESSED; - //TODO: specify compression algorithm... - attributes["compression"] = FdbTuple.Create(true); - attributes["compression.algorithm"] = FdbTuple.Create("lz4"); - } - - if (options.Signed) - { // file will have a cryptographic signature - //TODO: specifiy digital signing algorithm - headerFlags |= SnapshotFormat.Flags.SIGNED; - attributes["signature"] = FdbTuple.Create(true); - attributes["signature.algorithm"] = FdbTuple.Create("pkcs1"); - } - - if (options.Encrypted) - { // file will be encrypted - //TODO: specify crypto algo, key sizes, initialization vectors, ... - headerFlags |= SnapshotFormat.Flags.ENCRYPTED; - attributes["encryption"] = FdbTuple.Create(true); - attributes["encryption.algorithm"] = FdbTuple.Create("pkcs1"); - attributes["encryption.keysize"] = FdbTuple.Create(4096); //ex: RSA 4096 ? - } - - //m_dataLock.EnterReadLock(); - try - { - - // take the current version of the db (that will be used for the snapshot) - ulong sequence = (ulong)Volatile.Read(ref m_currentVersion); - long timestamp = DateTime.UtcNow.Ticks; - int levels = m_data.Depth; - int count = m_data.Count; - - using (var output = new Win32SnapshotFile(path)) - { - var snapshot = new SnapshotWriter(output, levels, SnapshotFormat.PAGE_SIZE, SnapshotFormat.FLUSH_SIZE); - - //Console.WriteLine("> Writing header...."); - await snapshot.WriteHeaderAsync( - headerFlags, - new Uuid128(m_uid), - sequence, - count, - timestamp, - attributes - ).ConfigureAwait(false); - - //Console.WriteLine("> Writing level data..."); - for (int level = levels - 1; level >= 0; level--) - { - if (ColaStore.IsFree(level, count)) - { // this level is not allocated - //Console.WriteLine(" > Skipping empty level " + level); - continue; - } - - //Console.WriteLine(" > Dumping " + levels + " levels..."); - await snapshot.WriteLevelAsync(level, m_data.GetLevel(level), cancellationToken); - } - - // Write the JumpTable to the end of the file - //Console.WriteLine("> Writing Jump Table..."); - await snapshot.WriteJumpTableAsync(cancellationToken); - - // flush any remaining data to the disc - //Console.WriteLine("> Flushing..."); - await snapshot.FlushAsync(cancellationToken); - - //Console.WriteLine("> Final file size if " + output.Length.ToString("N0") + " bytes"); - } - //Console.WriteLine("> Done!"); - - return (long)sequence; - } - finally - { - //m_dataLock.ExitReadLock(); - } - } - - internal Task LoadSnapshotAsync(string path, MemorySnapshotOptions options, CancellationToken cancellationToken) - { - if (string.IsNullOrWhiteSpace(path)) throw new ArgumentNullException("path"); - - //TODO: should this run on the writer thread ? - return Task.Run(() => LoadSnapshotInternal(path, options, cancellationToken), cancellationToken); - } - - private void LoadSnapshotInternal(string path, MemorySnapshotOptions options, CancellationToken cancellationToken) - { - Contract.Requires(path != null && options != null); - - var attributes = new Dictionary(StringComparer.Ordinal); - - //m_dataLock.EnterWriteLock(); - try - { - using (var source = Win32MemoryMappedFile.OpenRead(path)) - { - var snapshot = new SnapshotReader(source); - - // Read the header - //Console.WriteLine("> Reading Header"); - snapshot.ReadHeader(cancellationToken); - - // Read the jump table (at the end) - //Console.WriteLine("> Reading Jump Table"); - snapshot.ReadJumpTable(cancellationToken); - - // we should have enough information to allocate memory - m_data.Clear(); - m_estimatedSize = 0; - - using (var writer = new LevelWriter(1 << snapshot.Depth, m_keys, m_values)) - { - // Read the levels - for (int level = snapshot.Depth - 1; level >= 0; level--) - { - if (!snapshot.HasLevel(level)) - { - continue; - } - - //Console.WriteLine("> Reading Level " + level); - //TODO: right we read the complete level before bulkloading it - // we need to be able to bulk load directly from the stream! - snapshot.ReadLevel(level, writer, cancellationToken); - - m_data.InsertItems(writer.Data, ordered: true); - writer.Reset(); - } - } - - m_uid = snapshot.Id.ToGuid(); - m_currentVersion = (long)snapshot.Sequence; - - //Console.WriteLine("> done!"); - } - } - finally - { - //m_dataLock.ExitWriteLock(); - } - } - - #endregion - - #region Writer Thread... - - private sealed class CommitState : TaskCompletionSource - { - public CommitState(MemoryTransactionHandler trans) - : base() - { - Contract.Requires(trans != null); - this.Transaction = trans; - } - - public void MarkAsCompleted() - { - if (!this.Task.IsCompleted) - { - ThreadPool.UnsafeQueueUserWorkItem((state) => { ((CommitState)state).TrySetResult(null); }, this); - } - } - - public void MarkAsFailed(Exception e) - { - if (!this.Task.IsCompleted) - { - ThreadPool.UnsafeQueueUserWorkItem( - (state) => - { - var items = (Tuple)state; - items.Item1.TrySetException(items.Item2); - }, - Tuple.Create(this, e) - ); - } - } - - public void MarkAsCancelled() - { - if (!this.Task.IsCompleted) - { - ThreadPool.UnsafeQueueUserWorkItem((state) => { ((CommitState)state).TrySetResult(null); }, this); - } - } - - public MemoryTransactionHandler Transaction { get; private set; } - - } - - [Conditional("FULL_DEBUG")] - private static void Log(string msg) - { - Trace.WriteLine("MemoryDatabaseHandler[#" + Thread.CurrentThread.ManagedThreadId + "]: " + msg); - } - - private const int STATE_IDLE = 0; - private const int STATE_RUNNNING = 1; - private const int STATE_SHUTDOWN = 2; - - private int m_eventLoopState = STATE_IDLE; - private AutoResetEvent m_writerEvent = new AutoResetEvent(false); - private ConcurrentQueue m_writerQueue = new ConcurrentQueue(); - private ManualResetEvent m_shutdownEvent = new ManualResetEvent(false); - - internal Task EnqueueCommit(MemoryTransactionHandler trans) - { - if (trans == null) throw new ArgumentNullException("trans"); - - if (Volatile.Read(ref m_eventLoopState) == STATE_SHUTDOWN) - { - throw new FdbException(FdbError.OperationFailed, "The database has already been disposed"); - } - - var entry = new CommitState(trans); - try - { - m_writerQueue.Enqueue(entry); - - // wake up the writer thread if needed - // note: we need to set the event BEFORE changing the eventloop state, because the writer thread may be in the process of shutting down - m_writerEvent.Set(); - Log("Enqueued new commit"); - - if (Interlocked.CompareExchange(ref m_eventLoopState, STATE_RUNNNING, STATE_IDLE) == STATE_IDLE) - { // we have to start the event loop - Log("Starting new Writer EventLoop..."); - var _ = Task.Factory.StartNew(() => WriteEventLoop(), CancellationToken.None, TaskCreationOptions.LongRunning, TaskScheduler.Default); - } - } - catch (Exception e) - { - entry.SetException(e); - } - return entry.Task; - } - - /// Event loop that is called to process all the writes to the database - private void WriteEventLoop() - { - TimeSpan quanta = TimeSpan.FromSeconds(30); - - // confirm that we can still run - if (Interlocked.CompareExchange(ref m_eventLoopState, STATE_RUNNNING, STATE_RUNNNING) != STATE_RUNNNING) - { // a shutdown was retquested, exit immediately - Log("WriteEventLoop fast abort"); - return; - } - - Log("WriteEventLoop started"); - - try - { - bool keepGoing = true; - while (keepGoing) - { - // Wait() will: - // - return true if we have a new entry to process - // - return false if the quanta timeout has expired - // - throw an OperationCanceledException if the cancellation token was triggered - if (m_writerEvent.WaitOne(quanta)) - { - Log("WriteEventLoop wake up"); - CommitState entry; - - // process all the pending writes - while (Volatile.Read(ref m_eventLoopState) != STATE_SHUTDOWN && m_writerQueue.TryDequeue(out entry)) - { - if (entry.Task.IsCompleted) - { // the task has already been completed/cancelled? - continue; - } - - try - { - Log("WriteEventLoop process transaction"); - //TODO: work ! - entry.Transaction.CommitInternal(); - entry.MarkAsCompleted(); - } - catch (Exception e) - { - Log("WriteEventLoop transaction failed: " + e.Message); - entry.MarkAsFailed(new FdbException(FdbError.InternalError, "The transaction failed to commit", e)); - } - } - - if (Volatile.Read(ref m_eventLoopState) == STATE_SHUTDOWN) - { // we have been asked to shutdown - Log("WriteEventLoop shutdown requested"); - // drain the commit queue, and mark all of them as failed - while (m_writerQueue.TryDequeue(out entry)) - { - if (entry != null) entry.MarkAsCancelled(); - } - keepGoing = false; - } - } - else - { // try to step down - - Log("WriteEventLoop no activity"); - Interlocked.CompareExchange(ref m_eventLoopState, STATE_IDLE, STATE_RUNNNING); - // check again if nobody was trying to queue a write at the same time - if (!m_writerEvent.WaitOne(TimeSpan.Zero, false) || Interlocked.CompareExchange(ref m_eventLoopState, STATE_RUNNNING, STATE_IDLE) == STATE_IDLE) - { // either there were no pending writes, or we lost the race and will be replaced by another thread - Log("WriteEventLoop will step down"); - keepGoing = false; // stop - } -#if DEBUG - else - { - Log("WriteEventLoop will resume"); - } -#endif - } - } - Log("WriteEventLoop exit"); - } - catch(Exception) - { - //TODO: fail all pending commits ? - // reset the state to IDLE so that another write can restart us - Interlocked.CompareExchange(ref m_eventLoopState, STATE_IDLE, STATE_RUNNNING); - throw; - } - finally - { - if (Volatile.Read(ref m_eventLoopState) == STATE_SHUTDOWN) - { - m_shutdownEvent.Set(); - } - } - } - - private void StopWriterEventLoop() - { - // signal a shutdown - Log("WriterEventLoop requesting stop..."); - int oldState; - if ((oldState = Interlocked.Exchange(ref m_eventLoopState, STATE_SHUTDOWN)) != STATE_SHUTDOWN) - { - switch (oldState) - { - case STATE_RUNNNING: - { - // need to wake up the thread, if it was waiting for new writes - m_writerEvent.Set(); - // and wait for it to finish... - if (!m_shutdownEvent.WaitOne(TimeSpan.FromSeconds(5))) - { - // what should we do ? - } - Log("WriterEventLoop stopped"); - break; - } - default: - { // not running, or already shutdown ? - m_shutdownEvent.Set(); - break; - } - } - } - } - - #endregion - - /// Perform a complete garbage collection - public void Collect() - { - // - determine the old read version that is in use - // - look for all the windows that are older than that - // - collect all keys that were modified in these windows (value changed, or deleted) - // - for all heap pages that are above a freespace threshold, merge them into fewer full pages - - m_dataLock.EnterUpgradeableReadLock(); - try - { - - // collect everything that is oldest than the oldest active read version. - ulong sequence = GetOldestReadVersion(); - - lock (m_heapLock) - { - // purge the dead values - m_values.Collect(sequence); - - // pack the keys - //m_keys.Collect(sequence); - //BUGBUG: need to purge the colastore also ! - } - - m_oldestVersion = (long)sequence; - } - finally - { - m_dataLock.ExitUpgradeableReadLock(); - } - - - } - - public void Dispose() - { - if (!m_disposed) - { - m_disposed = true; - - StopWriterEventLoop(); - //TODO: need to lock and ensure that all pending transactions are done - - m_writerEvent.Dispose(); - m_shutdownEvent.Dispose(); - - m_keys.Dispose(); - m_values.Dispose(); - if (m_transactionWindows != null) - { - foreach (var window in m_transactionWindows) - { - if (window != null) window.Dispose(); - } - } - if (m_scratchPool != null) m_scratchPool.Dispose(); - m_scratchKey.Dispose(); - m_scratchValue.Dispose(); - } - } - - private void ThrowDisposed() - { - throw new ObjectDisposedException("The database has already been disposed"); - } - - [Conditional("DEBUG")] - public void Debug_Dump(bool detailed = false) - { - Debug.WriteLine("Dumping content of Database"); - m_dataLock.EnterReadLock(); - try - { - Debug.WriteLine("> Version: {0}", m_currentVersion); - Debug.WriteLine("> Items: {0:N0}", m_data.Count); - Debug.WriteLine("> Estimated size: {0:N0} bytes", m_estimatedSize); - Debug.WriteLine("> Transaction windows: {0}", m_transactionWindows.Count); - foreach(var window in m_transactionWindows) - { - Debug.WriteLine(" > {0} : {1:N0} commits{2}", window.ToString(), window.CommitCount, window.Closed ? " [CLOSED]" : ""); - } - long cmps, eqs, ghcs; - NativeKeyComparer.GetCounters(out cmps, out eqs, out ghcs); - Debug.WriteLine("> Comparisons: {0:N0} compares, {1:N0} equals, {2:N0} hashcodes", cmps, eqs, ghcs); - NativeKeyComparer.ResetCounters(); - lock (m_heapLock) - { - unsafe - { - m_keys.Debug_Dump(detailed); - m_values.Debug_Dump(detailed); - } - } - Debug.WriteLine(""); - } - finally - { - m_dataLock.ExitReadLock(); - } - } - - } - -} diff --git a/FoundationDB.Storage.Memory/API/MemorySnapshotOptions.cs b/FoundationDB.Storage.Memory/API/MemorySnapshotOptions.cs deleted file mode 100644 index 65101f15b..000000000 --- a/FoundationDB.Storage.Memory/API/MemorySnapshotOptions.cs +++ /dev/null @@ -1,36 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.API -{ - using System; - - public enum MemorySnapshotMode - { - /// Include all keys (included the deletions), as well as all their mutations, timestamped with their sequence number - Full = 0, - /// Include all keys (inlcuded the deletions), but with only their latest value. - Last, - /// Include only the live keys, with their latest value. - Compact, - - } - - public sealed class MemorySnapshotOptions - { - - public MemorySnapshotOptions() - { } - - public MemorySnapshotMode Mode { get; set; } - - public bool Compressed { get; set; } - - public bool Signed { get; set; } - - public bool Encrypted { get; set; } - - } - -} diff --git a/FoundationDB.Storage.Memory/API/MemoryTransactionHandler.cs b/FoundationDB.Storage.Memory/API/MemoryTransactionHandler.cs deleted file mode 100644 index 3f0bf2b2e..000000000 --- a/FoundationDB.Storage.Memory/API/MemoryTransactionHandler.cs +++ /dev/null @@ -1,1268 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -#undef DUMP_TRANSACTION_STATE - -namespace FoundationDB.Storage.Memory.API -{ - using FoundationDB.Client; - using FoundationDB.Client.Core; - using FoundationDB.Client.Utils; - using FoundationDB.Storage.Memory.Core; - using FoundationDB.Storage.Memory.Utils; - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Diagnostics.Contracts; - using System.Globalization; - using System.Linq; - using System.Runtime.InteropServices; - using System.Threading; - using System.Threading.Tasks; - - public sealed class MemoryTransactionHandler : IFdbTransactionHandler, IDisposable - { - internal const int MaxKeySize = 10 * 1000; //note: this should be the same as FoundationDB ! - internal const int MaxValueSize = 100 * 1000; //note: this should be the same as FoundationDB ! - private const int InitialBufferSize = 32 * 1024; //note: this should be at least 2x larger than the max key size, and if possible a power of 2 - - #region Private Fields... - - private readonly MemoryDatabaseHandler m_db; - - private volatile bool m_disposed; - - /// Buffer used to store the keys and values of this transaction - private SliceBuffer m_buffer; - - /// Lock that protects the state of the transaction - private readonly object m_lock = new object(); - /// List of all conflicts due to read operations - private ColaRangeSet m_readConflicts; - /// List of all conflicts due to write operations - private ColaRangeSet m_writeConflicts; - /// List of all ClearRange - private ColaRangeSet m_clears; - /// List of all Set operations (Set, Atomic, ..) - private ColaOrderedDictionary m_writes; - - /// Read version of the transaction - private long? m_readVersion; - /// Committed version of the transaction - private long m_committedVersion; - - private int m_retryCount; - - #endregion - - internal enum Operation - { - Nop = 0, - - Set = 1, - //note: the AtomicXXX should match the value of FdbMutationType - AtomicAdd = 2, - AtomicBitAnd = 6, - AtomicBitOr = 7, - AtomicBitXor = 8, - } - - [StructLayout(LayoutKind.Sequential)] - internal struct WriteCommand - { - public Slice Key; //PERF: readonly struct - public Slice Value; //PERF: readonly struct - public readonly Operation Type; - - public WriteCommand(Operation type, Slice key, Slice value) - { - this.Type = type; - this.Key = key; - this.Value = value; - } - - public override string ToString() - { - return String.Format(CultureInfo.InvariantCulture, "{0}({1}, {2}))", this.Type.ToString(), this.Key.ToAsciiOrHexaString(), this.Value.ToAsciiOrHexaString()); - } - - internal static byte[] PrepareValueForAtomicOperation(Slice value, int size) - { - if (value.Count >= size) - { // truncate if needed - return value.GetBytes(0, size); - } - - // pad with zeroes - var tmp = new byte[size]; - value.CopyTo(tmp, 0); - return tmp; - } - - public Slice ApplyTo(Slice value) - { - switch(this.Type) - { - case Operation.Set: - { - return this.Value; - } - case Operation.Nop: - { - return value; - } - case Operation.AtomicAdd: - { - return ApplyAdd(value); - } - case Operation.AtomicBitAnd: - { - return ApplyBitAnd(value); - } - case Operation.AtomicBitOr: - { - return ApplyBitOr(value); - } - case Operation.AtomicBitXor: - { - return ApplyBitXor(value); - } - default: - { - throw new NotSupportedException("Invalid write command type"); - } - } - } - - public Slice ApplyAdd(Slice value) - { - var tmp = PrepareValueForAtomicOperation(value, this.Value.Count); - BufferAdd(tmp, 0, this.Value.Array, this.Value.Offset, this.Value.Count); - return Slice.Create(tmp); - } - - public void ApplyAddTo(UnmanagedSliceBuilder value) - { - uint size = checked((uint)this.Value.Count); - - // if the value is empty, then this is the same thing as adding to 0 - if (value.Count == 0) - { - value.Append(this.Value); - return; - } - - // truncate the value if larger, or pad it with zeroes if shorter - value.Resize(size, 0); - - if (size > 0) - { - unsafe - { - fixed (byte* ptr = this.Value.Array) - { - byte* left = value.Data; - byte* right = ptr + this.Value.Offset; - - //TODO: find a way to optimize this for common sizes like 4 or 8 bytes! - int carry = 0; - while (size-- > 0) - { - carry += *left + *right++; - *left++ = (byte)carry; - carry >>= 8; - } - } - } - } - } - - public Slice ApplyBitAnd(Slice value) - { - var tmp = PrepareValueForAtomicOperation(value, this.Value.Count); - BufferBitAnd(tmp, 0, this.Value.Array, this.Value.Offset, this.Value.Count); - return Slice.Create(tmp); - } - - public void ApplyBitAndTo(UnmanagedSliceBuilder value) - { - uint size = checked((uint)this.Value.Count); - - // if the value is empty, then 0 AND * will always be zero - if (value.Count == 0) - { - value.Resize(size, 0); - return; - } - - // truncate the value if larger, or pad it with zeroes if shorter - value.Resize(size, 0); - - if (size > 0) - { - unsafe - { - fixed (byte* ptr = this.Value.Array) - { - byte* left = value.Data; - byte* right = ptr + this.Value.Offset; - - //TODO: find a way to optimize this for common sizes like 4 or 8 bytes! - while (size-- > 0) - { - *left++ &= *right++; - } - } - } - } - } - - public Slice ApplyBitOr(Slice value) - { - var tmp = PrepareValueForAtomicOperation(value, this.Value.Count); - BufferBitOr(tmp, 0, this.Value.Array, this.Value.Offset, this.Value.Count); - return Slice.Create(tmp); - } - - public void ApplyBitOrTo(UnmanagedSliceBuilder value) - { - uint size = checked((uint)this.Value.Count); - - // truncate the value if larger, or pad it with zeroes if shorter - value.Resize(size, 0); - - if (size > 0) - { - unsafe - { - fixed (byte* ptr = this.Value.Array) - { - byte* left = value.Data; - byte* right = ptr + this.Value.Offset; - - //TODO: find a way to optimize this for common sizes like 4 or 8 bytes! - while (size-- > 0) - { - *left++ |= *right++; - } - } - } - } - } - - public Slice ApplyBitXor(Slice value) - { - var tmp = PrepareValueForAtomicOperation(value, this.Value.Count); - BufferBitXor(tmp, 0, this.Value.Array, this.Value.Offset, this.Value.Count); - return Slice.Create(tmp); - } - - public void ApplyBitXorTo(UnmanagedSliceBuilder value) - { - uint size = checked((uint)this.Value.Count); - - // truncate the value if larger, or pad it with zeroes if shorter - value.Resize(size, 0); - - if (size > 0) - { - unsafe - { - fixed (byte* ptr = this.Value.Array) - { - byte* left = value.Data; - byte* right = ptr + this.Value.Offset; - - //TODO: find a way to optimize this for common sizes like 4 or 8 bytes! - while (size-- > 0) - { - *left++ ^= *right++; - } - } - } - } - } - - internal static int BufferAdd(byte[] buffer, int offset, byte[] arg, int argOffset, int count) - { - // TODO: optimize this! - int carry = 0; - while (count-- > 0) - { - carry += buffer[offset] + arg[argOffset++]; - buffer[offset++] = (byte)carry; - carry >>= 8; - } - return carry; - } - - internal static void BufferBitAnd(byte[] buffer, int offset, byte[] arg, int argOffset, int count) - { - while (count-- > 0) - { - buffer[offset++] &= arg[argOffset++]; - } - } - - internal static void BufferBitOr(byte[] buffer, int offset, byte[] arg, int argOffset, int count) - { - while (count-- > 0) - { - buffer[offset++] |= arg[argOffset++]; - } - } - - internal static void BufferBitXor(byte[] buffer, int offset, byte[] arg, int argOffset, int count) - { - while (count-- > 0) - { - buffer[offset++] ^= arg[argOffset++]; - } - } - - internal static WriteCommand MergeSetAndAtomicOperation(WriteCommand command, Operation op, Slice argument) - { - // truncate/resize the previous value to the size of the add - int size = argument.Count; - var tmp = PrepareValueForAtomicOperation(command.Value, size); - - switch (op) - { - case Operation.AtomicAdd: - { // do a littlee-endian ADD between the two buffers - BufferAdd(tmp, 0, argument.Array, argument.Offset, size); - break; - } - case Operation.AtomicBitAnd: - { // do an AND between the two buffers - BufferBitAnd(tmp, 0, argument.Array, argument.Offset, size); - break; - } - case Operation.AtomicBitOr: - { // do a OR between the two buffers - BufferBitOr(tmp, 0, argument.Array, argument.Offset, size); - break; - } - case Operation.AtomicBitXor: - { // do a XOR between the two buffers - BufferBitXor(tmp, 0, argument.Array, argument.Offset, size); - break; - } - default: - { // not supposed to happen - throw new InvalidOperationException(); - } - } - - return new WriteCommand(Operation.Set, command.Key, Slice.Create(tmp)); - } - - internal static WriteCommand MergeTwoAtomicOperations(WriteCommand command, Slice argument) - { - // truncate/resize the previous value to the size of the add - int size = argument.Count; - var tmp = PrepareValueForAtomicOperation(command.Value, size); - - switch (command.Type) - { - case Operation.AtomicAdd: - { // do a littlee-endian ADD between the two buffers - BufferAdd(tmp, 0, argument.Array, argument.Offset, size); - break; - } - case Operation.AtomicBitAnd: - { // do an AND between the two buffers - BufferBitAnd(tmp, 0, argument.Array, argument.Offset, size); - break; - } - case Operation.AtomicBitOr: - { // do a OR between the two buffers - BufferBitOr(tmp, 0, argument.Array, argument.Offset, size); - break; - } - case Operation.AtomicBitXor: - { // do a XOR between the two buffers - BufferBitXor(tmp, 0, argument.Array, argument.Offset, size); - break; - } - default: - { // not supposed to happen - throw new InvalidOperationException(); - } - } - - return new WriteCommand(command.Type, command.Key, Slice.Create(tmp)); - } - - internal static Slice ApplyTo(WriteCommand[] commands, Slice value) - { - var result = value; - for(int i=0;i(SliceComparer.Default); - m_writes = new ColaOrderedDictionary(SliceComparer.Default); - m_readConflicts = new ColaRangeSet(SliceComparer.Default); - m_writeConflicts = new ColaRangeSet(SliceComparer.Default); - } - else - { - m_clears.Clear(); - m_writes.Clear(); - m_readConflicts.Clear(); - m_writeConflicts.Clear(); - } - - m_retryCount = 0; - this.AccessSystemKeys = NO_ACCESS; - this.NextWriteNoWriteConflictRange = false; - this.ReadYourWritesDisable = false; - } - } - - public int Size - { - get { return m_buffer.Size; } - } - - public FdbIsolationLevel IsolationLevel - { - get - { - //BUGBUG: this is currently a lie! until we support "ReadYourWrite", we actually only support Snapshot isolation level! - return FdbIsolationLevel.Serializable; - } - } - - /// Adds a range to teh clear list of this transaction - /// Must be called with m_lock taken - private void AddClearCommand_NeedsLocking(FdbKeyRange range) - { - // merge the cleared range with the others - m_clears.Mark(range.Begin, range.End); - - // remove all writes that where in this range - var keys = m_writes.FindBetween(range.Begin, true, range.End, false).ToList(); - if (keys.Count > 0) - { - foreach(var key in keys) - { - m_writes.Remove(key); - } - } - } - - /// Adds a command to the write list of this transaction - /// Must be called with m_lock taken - private void AddWriteCommand_NeedsLocking(WriteCommand command) - { - var commands = new WriteCommand[1]; - commands[0] = command; - - if (!m_writes.GetOrAdd(command.Key, commands, out commands)) - { // there is already a command for that key - - if (command.Type == Operation.Set) - { // Set always overwrites everything - if (commands.Length == 1) - { // reuse the command array - commands[0] = command; - return; - } - // overwrite - m_writes.SetItem(command.Key, new[] { command }); - return; - } - - var last = commands[commands.Length - 1]; - if (last.Type == Operation.Set) - { // "SET(X) x ATOMIC(op, P)" are merged into "SET(X')" with X' = atomic(op, X, P) - Contract.Assert(commands.Length == 1); - - command = WriteCommand.MergeSetAndAtomicOperation(last, command.Type, command.Value); - // update in place - commands[commands.Length - 1] = command; - return; - - } - - if (last.Type == command.Type) - { // atomics of the same kind can be merged - - command = WriteCommand.MergeTwoAtomicOperations(last, command.Value); - // update in place - commands[commands.Length - 1] = command; - return; - } - - // just queue the command at the end - Array.Resize(ref commands, commands.Length + 1); - commands[commands.Length - 1] = command; - - m_writes.SetItem(command.Key, commands); - - } - } - - /// Read and clear the NextWriteNoConflict flags. - /// Value of the flag, which is cleared for the following write. - /// Must be called with m_lock taken - private bool ConsumeNextWriteNoConflict_NeedsLocking() - { - if (this.NextWriteNoWriteConflictRange) - { - this.NextWriteNoWriteConflictRange = false; - return true; - } - return false; - } - - /// Adds a range to the write conflict list - /// Must be called with m_lock taken - private void AddWriteConflict_NeedsLocking(FdbKeyRange range) - { - m_writeConflicts.Mark(range.Begin, range.End); - } - - /// Adds a range to the read conflict list - /// Must be called with m_lock taken - private void AddReadConflict_NeedsLocking(FdbKeyRange range) - { - m_readConflicts.Mark(range.Begin, range.End); - } - - private void CheckAccessToSystemKeys(Slice key, bool end = false) - { - if (this.AccessSystemKeys == 0 && key[0] == 0xFF) - { // access to system keys is not allowed - if (!end || key.Count > 1) - { - throw new FdbException(FdbError.KeyOutsideLegalRange); - } - } - } - - private Slice MergeResultWithLocalState(Slice key, Slice value) - { - WriteCommand[] commands; - if (m_writes.TryGetValue(key, out commands)) - { // the key will be mutated by this transaction - return WriteCommand.ApplyTo(commands, value); - } - - if (m_clears.ContainsKey(key)) - { // the key will be deleted by this transaction - return Slice.Nil; - } - - return value; - } - - public Task GetAsync(Slice key, bool snapshot, CancellationToken cancellationToken) - { - Contract.Requires(key.HasValue); - cancellationToken.ThrowIfCancellationRequested(); - - CheckAccessToSystemKeys(key); - - FdbKeyRange range; - lock (m_buffer) - { - range = m_buffer.InternRangeFromKey(key); - } - - // we need the read version - EnsureHasReadVersion(); - - // read the value in the db - //TODO: how to lock ? - var results = m_db.GetValuesAtVersion(new Slice[] { range.Begin }, m_readVersion.Value); - Contract.Assert(results != null && results.Length == 1); - var result = results[0]; - - // snapshot read always see the db, regular read must merge with local mutation, unless option ReadYourWrites is set - if (!snapshot && !this.ReadYourWritesDisable) - { // we need to merge the db state with the local mutations - result = MergeResultWithLocalState(range.Begin, result); - } - - if (!snapshot) - { - lock (m_lock) - { - AddReadConflict_NeedsLocking(range); - } - } - - return Task.FromResult(result); - } - - public Task GetValuesAsync(Slice[] keys, bool snapshot, CancellationToken cancellationToken) - { - Contract.Requires(keys != null); - cancellationToken.ThrowIfCancellationRequested(); - - // order and check the keys - var ordered = new Slice[keys.Length]; - for (int i = 0; i < keys.Length;i++) - { - var key = keys[i]; - if (key.IsNullOrEmpty) throw new ArgumentException("Key cannot be null or empty"); - CheckAccessToSystemKeys(key); - ordered[i] = key; - } - if (ordered.Length > 1) - { // the db expect the keys to be sorted - Array.Sort(ordered, SliceComparer.Default); - } - - // we need the read version - EnsureHasReadVersion(); - - var ranges = new FdbKeyRange[ordered.Length]; - lock (m_buffer) - { - for (int i = 0; i < ordered.Length; i++) - { - ranges[i] = m_buffer.InternRangeFromKey(ordered[i]); - ordered[i] = ranges[i].Begin; - } - } - - // read the values in the db - //TODO: how to lock ? - var results = m_db.GetValuesAtVersion(ordered, m_readVersion.Value); - - // snapshot read always see the db, regular read must merge with local mutation, unless option ReadYourWrites is set - if (!snapshot && !this.ReadYourWritesDisable) - { // we need to merge the db state with the local mutations - for (int i = 0; i < ordered.Length; i++) - { - results[i] = MergeResultWithLocalState(ordered[i], results[i]); - } - } - - if (!snapshot) - { - lock (m_lock) - { - for (int i = 0; i < ranges.Length; i++) - { - AddReadConflict_NeedsLocking(ranges[i]); - } - } - } - - return Task.FromResult(results); - } - - private sealed class SelectorKeyComparer : IComparer - { - - public static readonly SelectorKeyComparer Default = new SelectorKeyComparer(); - - private static readonly SliceComparer s_comparer = SliceComparer.Default; - - private SelectorKeyComparer() - { } - - public int Compare(FdbKeySelector x, FdbKeySelector y) - { - return s_comparer.Compare(x.Key, y.Key); - } - } - - public async Task GetKeyAsync(FdbKeySelector selector, bool snapshot, CancellationToken cancellationToken) - { - Contract.Requires(selector.Key.HasValue); - cancellationToken.ThrowIfCancellationRequested(); - - CheckAccessToSystemKeys(selector.Key, end: true); - - //Trace.WriteLine("## GetKey " + selector + ", snapshot=" + snapshot); - - FdbKeyRange keyRange; - lock (m_buffer) - { - keyRange = m_buffer.InternRangeFromKey(selector.Key); - selector = new FdbKeySelector(keyRange.Begin, selector.OrEqual, selector.Offset); - } - - // we need the read version - EnsureHasReadVersion(); - - var results = await m_db.GetKeysAtVersion(new [] { selector }, m_readVersion.Value).ConfigureAwait(false); - Contract.Assert(results != null && results.Length == 1); - var result = results[0]; - - FdbKeyRange resultRange; - int c = result.CompareTo(selector.Key); - if (c == 0) - { // the result is identical to the key - resultRange = keyRange; - result = keyRange.Begin; - } - else - { // intern the result - lock(m_buffer) - { - resultRange = m_buffer.InternRangeFromKey(result); - result = resultRange.Begin; - } - } - - //TODO: how to merge the results with the local state mutations ? - // => add values that were inserted - // => remove values that were cleared - // => change the value of keys that were mutated locally - - if (!snapshot) - { - lock (m_lock) - { - //TODO: use the result to create the conflict range (between the resolver key and the returned key) - if (c == 0) - { // the key itself was selected, so it can only conflict if it gets deleted by another transaction - // [ result, result+\0 ) - AddReadConflict_NeedsLocking(resultRange); - } - else if (c < 0) - { // the result is before the selected key, so any change between them (including deletion of the result) will conflict - // orEqual == true => [ result, key + \0 ) - // orEqual == false => [ result, key ) - AddReadConflict_NeedsLocking(FdbKeyRange.Create(resultRange.Begin, selector.OrEqual ? keyRange.End : keyRange.Begin)); - } - else - { // the result is after the selected key, so any change between it and the result will conflict - // orEqual == true => [ key + \0, result + \0 ) - // orEqual == false => [ key , result + \0 ) - AddReadConflict_NeedsLocking(FdbKeyRange.Create(selector.OrEqual ? keyRange.End : keyRange.Begin, resultRange.End)); - } - } - } - - return result; - } - - public async Task GetKeysAsync(FdbKeySelector[] selectors, bool snapshot, CancellationToken cancellationToken) - { - Contract.Requires(selectors != null); - - cancellationToken.ThrowIfCancellationRequested(); - - // order and check the keys - var ordered = new FdbKeySelector[selectors.Length]; - for (int i = 0; i < selectors.Length; i++) - { - if (selectors[i].Key.IsNullOrEmpty) throw new ArgumentException("Key cannot be null or empty"); - //CheckAccessToSystemKeys(key); - ordered[i] = selectors[i]; - } - if (ordered.Length > 1) - { // the db expects the keys to be sorted - Array.Sort(ordered, SelectorKeyComparer.Default); - } - - // we need the read version - EnsureHasReadVersion(); - - lock (m_buffer) - { - for (int i = 0; i < ordered.Length; i++) - { - ordered[i] = m_buffer.InternSelector(ordered[i]); - } - } - - var results = await m_db.GetKeysAtVersion(ordered, m_readVersion.Value).ConfigureAwait(false); - - if (!snapshot) - { - lock (m_lock) - { -#if !DEBUGz - throw new NotImplementedException("TODO: track read ranges in GetKeysAsync() !"); -#endif - } - } - - return results; - } - - public async Task GetRangeAsync(FdbKeySelector beginInclusive, FdbKeySelector endExclusive, FdbRangeOptions options, int iteration, bool snapshot, CancellationToken cancellationToken) - { - Contract.Requires(beginInclusive.Key.HasValue && endExclusive.Key.HasValue && options != null); - - cancellationToken.ThrowIfCancellationRequested(); - - //TODO: check system keys - - //Trace.WriteLine("## GetRange " + beginInclusive + " <= k < " + endExclusive + ", limit=" + options.Limit + ", reverse=" + options.Reverse + ", snapshot=" + snapshot); - - lock (m_buffer) - { - beginInclusive = m_buffer.InternSelector(beginInclusive); - endExclusive = m_buffer.InternSelector(endExclusive); - } - - // we need the read version - EnsureHasReadVersion(); - - options = FdbRangeOptions.EnsureDefaults(options, null, null, FdbStreamingMode.Iterator, false); - options.EnsureLegalValues(); - - var result = await m_db.GetRangeAtVersion(beginInclusive, endExclusive, options.Limit ?? 0, options.TargetBytes ?? 0, options.Mode.Value, iteration, options.Reverse.Value, m_readVersion.Value).ConfigureAwait(false); - - if (!snapshot) - { - lock (m_lock) - { - //TODO: use the result to create the conflict range (between the resolver key and the returned key) - //AddReadConflict_NeedsLocking(range); - } - } - return result; - } - - public void Set(Slice key, Slice value) - { - // check - if (key.IsNullOrEmpty) throw new ArgumentException("Key cannot be null or empty"); - if (value.IsNull) throw new ArgumentNullException("Value cannot be null"); - CheckAccessToSystemKeys(key); - - - // first thing is copy the data in our own buffer, and only use those for the rest - FdbKeyRange range; - lock (m_buffer) - { - range = m_buffer.InternRangeFromKey(key); - value = m_buffer.Intern(value); - } - - lock (m_lock) - { - if (!ConsumeNextWriteNoConflict_NeedsLocking()) - { - AddWriteConflict_NeedsLocking(range); - } - AddWriteCommand_NeedsLocking(new WriteCommand(Operation.Set, range.Begin, value)); - } - } - - public void Atomic(Slice key, Slice param, FdbMutationType mutation) - { - // check - if (key.IsNullOrEmpty) throw new ArgumentException("Key cannot be null or empty"); - if (param.IsNull) throw new ArgumentNullException("Parameter cannot be null"); - CheckAccessToSystemKeys(key); - - if (mutation != FdbMutationType.Add && mutation != FdbMutationType.BitAnd && mutation != FdbMutationType.BitOr && mutation != FdbMutationType.BitXor && mutation != FdbMutationType.Max && mutation != FdbMutationType.Min) - { - //TODO: throw an FdbException instead? - throw new ArgumentException("Invalid mutation type", "mutation"); - } - - FdbKeyRange range; - lock (m_buffer) - { - range = m_buffer.InternRangeFromKey(key); - param = m_buffer.Intern(param); - } - - lock (m_lock) - { - if (!ConsumeNextWriteNoConflict_NeedsLocking()) - { - AddWriteConflict_NeedsLocking(range); - } - AddWriteCommand_NeedsLocking(new WriteCommand((Operation)mutation, range.Begin, param)); - } - } - - public void Clear(Slice key) - { - // check - if (key.IsNullOrEmpty) throw new ArgumentException("Key cannot be null or empty"); - CheckAccessToSystemKeys(key); - - FdbKeyRange range; - lock (m_buffer) - { - range = m_buffer.InternRangeFromKey(key); - } - - lock (m_lock) - { - if (!ConsumeNextWriteNoConflict_NeedsLocking()) - { - AddWriteConflict_NeedsLocking(range); - } - AddClearCommand_NeedsLocking(range); - } - } - - public void ClearRange(Slice beginKeyInclusive, Slice endKeyExclusive) - { - // check - if (beginKeyInclusive.IsNullOrEmpty) throw new ArgumentException("Begin key cannot be null or empty"); - if (endKeyExclusive.IsNullOrEmpty) throw new ArgumentException("End key cannot be null or empty"); - CheckAccessToSystemKeys(beginKeyInclusive); - CheckAccessToSystemKeys(endKeyExclusive, end: true); - - FdbKeyRange range; - lock (m_buffer) - { - range = m_buffer.InternRange(beginKeyInclusive, endKeyExclusive); - } - - lock (m_lock) - { - if (!ConsumeNextWriteNoConflict_NeedsLocking()) - { - AddWriteConflict_NeedsLocking(range); - } - AddClearCommand_NeedsLocking(range); - } - } - - public void AddConflictRange(Slice beginKeyInclusive, Slice endKeyExclusive, FdbConflictRangeType type) - { - // check - if (beginKeyInclusive.IsNullOrEmpty) throw new ArgumentException("Begin key cannot be null or empty"); - if (endKeyExclusive.IsNullOrEmpty) throw new ArgumentException("End key cannot be null or empty"); - if (type != FdbConflictRangeType.Read && type != FdbConflictRangeType.Write) throw new ArgumentOutOfRangeException("type", "Invalid range conflict type"); - - CheckAccessToSystemKeys(beginKeyInclusive); - CheckAccessToSystemKeys(endKeyExclusive, end: true); - - FdbKeyRange range; - lock(m_buffer) - { - range = m_buffer.InternRange(beginKeyInclusive, endKeyExclusive); - } - - lock (m_lock) - { - if (type == FdbConflictRangeType.Read) - { - AddReadConflict_NeedsLocking(range); - } - else - { - AddWriteConflict_NeedsLocking(range); - } - } - } - - public void Reset() - { - //TODO: kill any pending "async" reads - //TODO: release the current read version and/or transaction window ? - Initialize(true); - } - - public Task CommitAsync(CancellationToken cancellationToken) - { - Log("CommitAsync() called"); - cancellationToken.ThrowIfCancellationRequested(); - - if (!m_readVersion.HasValue) - { - EnsureHasReadVersion(); - } - -#if DUMP_TRANSACTION_STATE - Trace.WriteLine(String.Format(CultureInfo.InvariantCulture, "=== COMMITING TRANSACTION {0} ===", this.Id)); - Trace.WriteLine(String.Format(CultureInfo.InvariantCulture, "# ReadVersion: {0}", m_readVersion ?? -1)); - - if (m_readConflicts.Count == 0) - { - Trace.WriteLine("# Read Conflicts: none"); - } - else - { - Trace.WriteLine(String.Format(CultureInfo.InvariantCulture, "# Read Conflicts: ({0}) => {1}", m_readConflicts.Count, m_readConflicts.ToString())); - } - - if (m_writeConflicts.Count == 0) - { - Trace.WriteLine("# Write Conflicts: none"); - } - else - { - Trace.WriteLine(String.Format(CultureInfo.InvariantCulture, "# Write Conflicts: ({0}) => {1}", m_writeConflicts.Count, m_writeConflicts.ToString())); - } - - if (m_clears.Count == 0) - { - Trace.WriteLine("# Clears: none"); - } - else - { - Trace.WriteLine(String.Format(CultureInfo.InvariantCulture, "# Clears: ({0})", m_clears.Count)); - foreach (var op in m_clears) - { - Trace.WriteLine(" > " + new FdbKeyRange(op.Begin, op.End)); - } - } - - if (m_writes.Count == 0) - { - Trace.WriteLine("# Writes: none"); - } - else - { - Trace.WriteLine(String.Format(CultureInfo.InvariantCulture, "# Writes: ({0})", m_writes.Count)); - foreach (var op in m_writes) - { - Trace.WriteLine(" > " + String.Join("; ", op.Value)); - } - } - - var pages = m_buffer.GetPages(); - Trace.WriteLine(String.Format(CultureInfo.InvariantCulture, "# Slice buffer: {0} bytes in {1} pages ({2} allocated, {3:##0.00}% wasted)", m_buffer.Size, pages.Length, m_buffer.Allocated, 100.0 - (m_buffer.Size * 100.0 / m_buffer.Allocated))); - foreach(var page in pages) - { - Trace.WriteLine(" > " + page.ToString()); - } -#endif - - //m_committedVersion = await m_db.CommitTransactionAsync(this, m_readVersion.Value, m_readConflicts, m_writeConflicts, m_clears, m_writes).ConfigureAwait(false); - - return m_db.EnqueueCommit(this); - -#if DUMP_TRANSACTION_STATE - Trace.WriteLine("=== DONE with commit version " + m_committedVersion); -#endif - } - - internal void CommitInternal() - { - Log("CommitInternalAsync() called"); - m_committedVersion = m_db.CommitTransaction(this, m_readVersion.Value, m_readConflicts, m_writeConflicts, m_clears, m_writes); - Log("committed at " + m_committedVersion); - } - - public long GetCommittedVersion() - { - return m_committedVersion; - } - - public void SetReadVersion(long version) - { - throw new NotImplementedException(); - } - - public async Task OnErrorAsync(FdbError code, CancellationToken cancellationToken) - { - cancellationToken.ThrowIfCancellationRequested(); - - switch (code) - { - case FdbError.TimedOut: - case FdbError.PastVersion: - { // wait a bit - - ++m_retryCount; - if (m_retryCount > this.RetryLimit) - { // max rety limit reached - throw new FdbException(code); - } - - //HACKHACK: implement a real back-off delay logic - await Task.Delay(15, cancellationToken).ConfigureAwait(false); - - this.Reset(); - - return; - } - default: - { - throw new FdbException(code); - } - } - } - - public FdbWatch Watch(Slice key, System.Threading.CancellationToken cancellationToken) - { - Contract.Requires(key.HasValue); - cancellationToken.ThrowIfCancellationRequested(); - - throw new NotSupportedException(); - } - - public Task GetAddressesForKeyAsync(Slice key, CancellationToken cancellationToken) - { - if (cancellationToken.IsCancellationRequested) return TaskHelpers.FromCancellation(cancellationToken); - - throw new NotImplementedException(); - } - - private long EnsureHasReadVersion() - { - if (!m_readVersion.HasValue) - { - m_readVersion = m_db.GetCurrentVersion(); - } - return m_readVersion.Value; - } - - public Task GetReadVersionAsync(CancellationToken cancellationToken) - { - if (cancellationToken.IsCancellationRequested) return TaskHelpers.FromCancellation(cancellationToken); - return Task.FromResult(EnsureHasReadVersion()); - } - - public void Cancel() - { - if (m_disposed) ThrowDisposed(); - throw new NotImplementedException(); - } - - public int RetryLimit { get; internal set; } - - public int Timeout { get; internal set; } - - /// The transaction has access to the system keyspace - public int AccessSystemKeys { get; internal set; } - const int NO_ACCESS = 0; - const int READ_ACCESS = 1; - const int READ_WRITE_ACCESS = 2; - - /// The next write will not cause a write conflict - public bool NextWriteNoWriteConflictRange { get; internal set; } - - /// If true, the transaction always read the value from the database, and does not see the local mutations - public bool ReadYourWritesDisable { get; internal set; } - - /// Number of retries already done by this transaction - public int RetryCount { get { return m_retryCount; } } - - /// Décode the value of a transaction option into a boolean - private static bool DecodeBooleanOption(Slice data) - { - if (data.Count == 8) - { // spec says that ints should be passed as 8 bytes integers, so we need to accept all zeroes as "false" - return data.ToInt64() != 0; - } - else - { - return data.ToBool(); - } - } - - public void SetOption(FdbTransactionOption option, Slice data) - { - switch(option) - { - case FdbTransactionOption.AccessSystemKeys: - { - this.AccessSystemKeys = (data.IsNullOrEmpty || DecodeBooleanOption(data)) ? READ_WRITE_ACCESS : NO_ACCESS; - break; - } - case FdbTransactionOption.ReadSystemKeys: - { - this.AccessSystemKeys = (data.IsNullOrEmpty || DecodeBooleanOption(data)) ? READ_ACCESS : NO_ACCESS; - break; - } - case FdbTransactionOption.RetryLimit: - { - if (data.Count != 8) throw new FdbException(FdbError.InvalidOptionValue); - long value = data.ToInt64(); - if (value < 0 || value >= int.MaxValue) throw new FdbException(FdbError.InvalidOptionValue); - this.RetryLimit = (int)value; - break; - } - - case FdbTransactionOption.Timeout: - { - if (data.Count != 8) throw new FdbException(FdbError.InvalidOptionValue); - long value = data.ToInt64(); - if (value < 0 || value >= int.MaxValue) throw new FdbException(FdbError.InvalidOptionValue); - this.Timeout = (int)value; - break; - } - - case FdbTransactionOption.NextWriteNoWriteConflictRange: - { - this.NextWriteNoWriteConflictRange = data.IsNullOrEmpty || DecodeBooleanOption(data); - break; - } - case FdbTransactionOption.ReadYourWritesDisable: - { - this.ReadYourWritesDisable = data.IsNullOrEmpty || DecodeBooleanOption(data); - break; - } - - default: - { - throw new FdbException(FdbError.InvalidOption); - } - } - } - - private static void ThrowDisposed() - { - throw new ObjectDisposedException("This transaction has already been disposed."); - } - - public void Dispose() - { - if (m_disposed) - { - //TODO: locking ? - m_disposed = true; - - //TODO! - m_buffer = null; - m_readConflicts = null; - m_writeConflicts = null; - m_clears = null; - m_writes = null; - } - - GC.SuppressFinalize(this); - } - - [Conditional("FULL_DEBUG")] - private static void Log(string msg) - { - Trace.WriteLine("MemoryTransactionHandler[#" + Thread.CurrentThread.ManagedThreadId + "]: " + msg); - } - - } - -} diff --git a/FoundationDB.Storage.Memory/Collections/ColaOrderedDictionary.cs b/FoundationDB.Storage.Memory/Collections/ColaOrderedDictionary.cs deleted file mode 100644 index 0b8737052..000000000 --- a/FoundationDB.Storage.Memory/Collections/ColaOrderedDictionary.cs +++ /dev/null @@ -1,442 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Core -{ - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Linq; - using System.Runtime.InteropServices; - - /// Represent an ordered set of key/value pairs, stored in a Cache Oblivious Lookahead Array - /// Type of ordered keys stored in the dictionary. - /// Type of values stored in the dictionary. - [DebuggerDisplay("Count={m_items.Count}"), DebuggerTypeProxy(typeof(ColaOrderedDictionary<,>.DebugView))] - public class ColaOrderedDictionary : IEnumerable> - { - - /// Debug view helper - private sealed class DebugView - { - private readonly ColaOrderedDictionary m_dictionary; - - public DebugView(ColaOrderedDictionary dictionary) - { - m_dictionary = dictionary; - } - - [DebuggerBrowsable(DebuggerBrowsableState.RootHidden)] - public KeyValuePair[] Items - { - get - { - var tmp = new KeyValuePair[m_dictionary.Count]; - m_dictionary.CopyTo(tmp, 0); - return tmp; - } - } - } - - /// Wrapper for a comparer on the keys of a key/value pair - private sealed class KeyOnlyComparer : IComparer> - { - private readonly IComparer m_comparer; - - public KeyOnlyComparer(IComparer comparer) - { - m_comparer = comparer; - } - - public int Compare(KeyValuePair x, KeyValuePair y) - { - return m_comparer.Compare(x.Key, y.Key); - } - } - - /// COLA array used to store the entries in the dictionary - private readonly ColaStore> m_items; - - /// Comparer for the keys of the dictionary - private readonly IComparer m_keyComparer; - - /// Comparer for the values of the dictionary - private readonly IEqualityComparer m_valueComparer; - - private volatile int m_version; - - #region Constructors... - - public ColaOrderedDictionary(IComparer keyComparer = null, IEqualityComparer valueComparer = null) - : this(0, keyComparer, valueComparer) - { } - - public ColaOrderedDictionary(int capacity) - : this(capacity, null, null) - { } - - public ColaOrderedDictionary(int capacity, IComparer keyComparer, IEqualityComparer valueComparer) - { - m_keyComparer = keyComparer ?? Comparer.Default; - m_valueComparer = valueComparer ?? EqualityComparer.Default; - m_items = new ColaStore>(capacity, new KeyOnlyComparer(m_keyComparer)); - } - - #endregion - - #region Public Properties... - - public int Count - { - get { return m_items.Count; } - } - - public int Capacity - { - get { return m_items.Capacity; } - } - - public TValue this[TKey key] - { - get { return GetValue(key); } - set { SetItem(key, value); } - } - - #endregion - - public void Clear() - { - ++m_version; - m_items.Clear(); - } - - public IComparer KeyComparer - { - get { return m_keyComparer; } - } - - public IEqualityComparer ValueComparer - { - get { return m_valueComparer; } - } - - internal ColaStore> Items - { - get { return m_items; } - } - - /// Adds an entry with the specified key and value to the sorted dictionary. - /// The key of the entry to add. - /// The value of the entry to add. - /// If an entry with the same key already exist in the dictionary. - public void Add(TKey key, TValue value) - { - if (key == null) ThrowKeyCannotBeNull(); - - ++m_version; - if (!m_items.SetOrAdd(new KeyValuePair(key, value), overwriteExistingValue: false)) - { - --m_version; - ThrowKeyAlreadyExists(); - } - } - - /// Sets the specified key and value in the immutable sorted dictionary, possibly overwriting an existing value for the given key. - /// The key of the entry to add. - /// The key value to set. - public void SetItem(TKey key, TValue value) - { - if (key == null) ThrowKeyCannotBeNull(); - ++m_version; - m_items.SetOrAdd(new KeyValuePair(key, value), overwriteExistingValue: true); - } - - /// Try to add an entry with the specified key and value to the sorted dictionary, or update its value if it already exists. - /// The key of the entry to add. - /// The value of the entry to add. - /// true if the key did not previously exist and was inserted; otherwise, false. - public bool AddOrUpdate(TKey key, TValue value) - { - if (key == null) ThrowKeyCannotBeNull(); - - KeyValuePair entry; - int offset, level = m_items.Find(new KeyValuePair(key, default(TValue)), out offset, out entry); - if (level >= 0) - { // already exists - // keep the old key, and update the value - ++m_version; - m_items.SetAt(level, offset, new KeyValuePair(entry.Key, value)); - return false; - } - - ++m_version; - m_items.Insert(new KeyValuePair(key, value)); - return true; - } - - /// Try to add an entry with the specified key and value to the sorted dictionary, if it does not already exists. - /// The key of the entry to add. - /// The value of the entry to add. - /// Receives the previous value if already exists, or if it was inserted - /// true if the key did not previously exist and was inserted; otherwise, false. - public bool GetOrAdd(TKey key, TValue value, out TValue actualValue) - { - if (key == null) ThrowKeyCannotBeNull(); - - KeyValuePair entry; - int _, level = m_items.Find(new KeyValuePair(key, default(TValue)), out _, out entry); - if (level >= 0) - { // already exists - actualValue = entry.Value; - return false; - } - - ++m_version; - m_items.Insert(new KeyValuePair(key, value)); - actualValue = value; - return true; - } - - public bool ContainsKey(TKey key) - { - if (key == null) ThrowKeyCannotBeNull(); - - int _; - KeyValuePair __; - return m_items.Find(new KeyValuePair(key, default(TValue)), out _, out __) >= 0; - } - - public bool ContainsValue(TValue value) - { - foreach(var kvp in m_items.IterateUnordered()) - { - if (m_valueComparer.Equals(kvp.Value)) return true; - } - return false; - } - - /// Determines whether this dictionary contains a specified key. - /// The key to search for. - /// The matching key located in the dictionary if found, or equalkey if no match is found. - /// true if a match for is found; otherwise, false. - public bool TryGetKey(TKey equalKey, out TKey actualKey) - { - if (equalKey == null) ThrowKeyCannotBeNull(); - - KeyValuePair entry; - int _, level = m_items.Find(new KeyValuePair(equalKey, default(TValue)), out _, out entry); - if (level < 0) - { - actualKey = equalKey; - return false; - } - actualKey = entry.Key; - return true; - } - - /// Gets the value associated with the specified key. - /// The key to search for. - /// - /// true if a match for is found; otherwise, false. - public bool TryGetValue(TKey key, out TValue value) - { - if (key == null) ThrowKeyCannotBeNull(); - - KeyValuePair entry; - int _, level = m_items.Find(new KeyValuePair(key, default(TValue)), out _, out entry); - if (level < 0) - { - value = default(TValue); - return false; - } - value = entry.Value; - return true; - } - - public TValue GetValue(TKey key) - { - if (key == null) ThrowKeyCannotBeNull(); - - KeyValuePair entry; - int _, level = m_items.Find(new KeyValuePair(key, default(TValue)), out _, out entry); - if (level < 0) - { - ThrowKeyNotFound(); - } - return entry.Value; - } - - /// Gets the existing key and value associated with the specified key. - /// The key to search for. - /// The matching key and value pair located in the dictionary if found. - /// true if a match for is found; otherwise, false. - public bool TryGetKeyValue(TKey key, out KeyValuePair entry) - { - if (key == null) ThrowKeyCannotBeNull(); - - int _, level = m_items.Find(new KeyValuePair(key, default(TValue)), out _, out entry); - return level >= 0; - } - - /// Removes the entry with the specified key from the dictionary. - /// The key of the entry to remove. - /// true if the value was found and removed from the dictionary; otherwise, false. - /// It is NOT allowed to remove keys while iterating on the dictionary at the same time! - public bool Remove(TKey key) - { - if (key == null) ThrowKeyCannotBeNull(); - - KeyValuePair _; - int offset, level = m_items.Find(new KeyValuePair(key, default(TValue)), out offset, out _); - - if (level >= 0) - { - ++m_version; - m_items.RemoveAt(level, offset); - return true; - } - return false; - } - - /// Remove the entries with the specified keys from the dictionary. - /// The keys of the entries to remove. - /// Number of entries that were found and removed. - /// It is NOT allowed to remove keys while iterating on the dictionary at the same time! - public int RemoveRange(IEnumerable keys) - { - if (keys == null) throw new ArgumentNullException("keys"); - - // we need to protect against people passing in the result of calling FindBetween, - // because we can't remove while iterating at the same time ! - - int count = 0; - foreach (var key in keys) - { - if (Remove(key)) ++count; - } - return count; - } - - /// Enumerate all the keys in the dictionary that are in the specified range - /// Start of the range - /// If true, the key is included in the range - /// End of the range - /// If true, the key is included in the range - /// Unordered list of the all the keys in the dictionary that are in the range. - /// There is no guarantee in the actual order of the keys returned. It is also not allowed to remove keys while iterating over the sequence. - public IEnumerable FindBetween(TKey begin, bool beginOrEqual, TKey end, bool endOrEqual) - { - // return the unordered list of all the keys that are between the begin/end pair. - // each bound is included in the list if its corresponding 'orEqual' is set to true - - if (m_items.Count > 0) - { - var start = new KeyValuePair(begin, default(TValue)); - var stop = new KeyValuePair(end, default(TValue)); - - foreach (var kvp in m_items.FindBetween(start, beginOrEqual, stop, endOrEqual, int.MaxValue)) - { - yield return kvp.Key; - } - } - } - - /// Returns an enumerator that iterates through the ordered dictionary - public ColaStore.Enumerator> GetEnumerator() - { - return new ColaStore.Enumerator>(m_items, reverse: false); - } - - IEnumerator> IEnumerable>.GetEnumerator() - { - return this.GetEnumerator(); - } - - System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() - { - return this.GetEnumerator(); - } - - internal void CopyTo(KeyValuePair[] array, int index) - { - m_items.CopyTo(array, index, m_items.Count); - } - - private static void ThrowKeyCannotBeNull() - { - throw new ArgumentNullException("key"); - } - - private static void ThrowKeyNotFound() - { - throw new KeyNotFoundException(); - } - - private static void ThrowKeyAlreadyExists() - { - throw new InvalidOperationException("An entry with the same key but a different value already exists."); - } - - //TODO: remove or set to internal ! - [Conditional("DEBUG")] - public void Debug_Dump() - { - Trace.WriteLine("Dumping ColaOrderedDictionary<" + typeof(TKey).Name + ", " + typeof(TValue).Name + "> filled at " + (100.0d * this.Count / this.Capacity).ToString("N2") + "%"); - m_items.Debug_Dump(); - } - - [StructLayout(LayoutKind.Sequential)] - public struct Enumerator : IEnumerator>, IDisposable - { - private const int NOT_FOUND = -1; - - private readonly int m_version; - private readonly ColaOrderedDictionary m_parent; - private ColaStore.Enumerator> m_iterator; - - internal Enumerator(ColaOrderedDictionary parent, bool reverse) - { - m_version = parent.m_version; - m_parent = parent; - m_iterator = new ColaStore.Enumerator>(parent.m_items, reverse); - } - - public bool MoveNext() - { - if (m_version != m_parent.m_version) - { - ColaStore.ThrowStoreVersionChanged(); - } - - return m_iterator.MoveNext(); - } - - public KeyValuePair Current - { - get { return m_iterator.Current; } - } - - public void Dispose() - { - // we are a struct that can be copied by value, so there is no guarantee that Dispose() will accomplish anything anyway... - } - - object System.Collections.IEnumerator.Current - { - get { return m_iterator.Current; } - } - - void System.Collections.IEnumerator.Reset() - { - if (m_version != m_parent.m_version) - { - ColaStore.ThrowStoreVersionChanged(); - } - m_iterator = new ColaStore.Enumerator>(m_parent.m_items, m_iterator.Reverse); - } - - } - - } -} diff --git a/FoundationDB.Storage.Memory/Collections/ColaOrderedSet.cs b/FoundationDB.Storage.Memory/Collections/ColaOrderedSet.cs deleted file mode 100644 index bb08b076a..000000000 --- a/FoundationDB.Storage.Memory/Collections/ColaOrderedSet.cs +++ /dev/null @@ -1,285 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Core -{ - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Diagnostics.Contracts; - using System.Runtime.InteropServices; - - /// Represent an ordered set of elements, stored in a Cache Oblivous Lookup Array - /// Type of elements stored in the set - /// Inserts are in O(LogN) amortized. Lookups are in O(Log(N)) - public class ColaOrderedSet : IEnumerable - { - private const int NOT_FOUND = -1; - - /// COLA array used to store the elements in the set - private readonly ColaStore m_items; - - private volatile int m_version; - - #region Constructors... - - public ColaOrderedSet() - : this(0, Comparer.Default) - { } - - public ColaOrderedSet(int capacity) - : this(capacity, Comparer.Default) - { } - - public ColaOrderedSet(IComparer comparer) - : this(0, comparer) - { } - - public ColaOrderedSet(int capacity, IComparer comparer) - { - if (capacity < 0) throw new ArgumentOutOfRangeException("capacity", "Capacity cannot be less than zero."); - Contract.EndContractBlock(); - - m_items = new ColaStore(capacity, comparer ?? Comparer.Default); - } - - #endregion - - #region Public Properties... - - /// Gets the number of elements in the immutable sorted set. - public int Count - { - get { return m_items.Count; } - } - - /// Current capacity of the set - public int Capacity - { - get { return m_items.Capacity; } - } - - public IComparer Comparer - { - get { return m_items.Comparer; } - } - - public T this[int index] - { - get - { - if (index < 0 || index >= m_items.Count) ThrowIndexOutOfRangeException(); - int offset; - int level = ColaStore.MapOffsetToLocation(m_items.Count, index, out offset); - Contract.Assert(level >= 0); - return m_items.GetAt(level, offset); - } - } - - private static void ThrowIndexOutOfRangeException() - { - throw new IndexOutOfRangeException("Index is out of range"); - } - - #endregion - - #region Public Methods... - - public void Clear() - { - ++m_version; - m_items.Clear(); - } - - /// Adds the specified value to this ordered set. - /// The value to add. - /// If the value already exists in the set, it will not be overwritten - public bool Add(T value) - { - ++m_version; - if (!m_items.SetOrAdd(value, overwriteExistingValue: false)) - { - --m_version; - return false; - } - return true; - } - - /// Adds or overwrite the specified value to this ordered set. - /// The value to add. - /// If the value already exists in the set, it will be overwritten by - public bool Set(T value) - { - ++m_version; - return m_items.SetOrAdd(value, overwriteExistingValue: true); - } - - public bool TryRemove(T value, out T actualValue) - { - int offset; - int level = m_items.Find(value, out offset, out actualValue); - if (level != NOT_FOUND) - { - ++m_version; - m_items.RemoveAt(level, offset); - return true; - } - return false; - } - - public bool Remove(T value) - { - T _; - return TryRemove(value, out _); - } - - public T RemoveAt(int arrayIndex) - { - if (arrayIndex < 0 || arrayIndex >= m_items.Count) throw new ArgumentOutOfRangeException("arrayIndex", "Index is outside the array"); - - int offset; - int level = ColaStore.MapOffsetToLocation(m_items.Count, arrayIndex, out offset); - Contract.Assert(level >= 0 && offset >= 0 && offset < 1 << level); - - ++m_version; - return m_items.RemoveAt(level, offset); - } - - /// Determines whether this immutable sorted set contains the specified value. - /// The value to check for. - /// true if the set contains the specified value; otherwise, false. - public bool Contains(T value) - { - int _; - T __; - return m_items.Find(value, out _, out __) >= 0; - } - - /// Find an element - /// - /// The zero-based index of the first occurrence of within the entire list, if found; otherwise, –1. - public int IndexOf(T value) - { - T _; - int offset, level = m_items.Find(value, out offset, out _); - if (level >= 0) - { - return ColaStore.MapLocationToOffset(m_items.Count, level, offset); - } - return NOT_FOUND; - } - - /// Searches the set for a given value and returns the equal value it finds, if any. - /// The value to search for. - /// The value from the set that the search found, or the original value if the search yielded no match. - /// A value indicating whether the search was successful. - public bool TryGetValue(T value, out T actualValue) - { - int _; - return m_items.Find(value, out _, out actualValue) >= 0; - } - - /// Copy the ordered elements of the set to an array - /// The one-dimensional array that is the destination of the elements copied from collection. The array must have zero-based indexing. - public void CopyTo(T[] array) - { - Contract.Requires(array != null); - m_items.CopyTo(array, 0, array.Length); - } - - /// Copies the ordered elements of the set to an array, starting at a particular array index. - /// The one-dimensional array that is the destination of the elements copied from collection. The array must have zero-based indexing. - /// The zero-based index in array at which copying begins. - public void CopyTo(T[] array, int arrayIndex) - { - Contract.Requires(array != null && arrayIndex >= 0); - m_items.CopyTo(array, arrayIndex, m_items.Count); - } - - public void CopyTo(T[] array, int arrayIndex, int count) - { - Contract.Requires(array != null && arrayIndex >= 0 && count >= 0); - m_items.CopyTo(array, arrayIndex, count); - } - - public ColaStore.Enumerator GetEnumerator() - { - return new ColaStore.Enumerator(m_items, reverse: false); - } - - IEnumerator IEnumerable.GetEnumerator() - { - return this.GetEnumerator(); - } - - System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() - { - return this.GetEnumerator(); - } - - #endregion - - //TODO: remove or set to internal ! - [Conditional("DEBUG")] - public void Debug_Dump() - { - Trace.WriteLine("Dumping ColaOrderedSet<" + typeof(T).Name + "> filled at " + (100.0d * this.Count / this.Capacity).ToString("N2") + "%"); - m_items.Debug_Dump(); - } - - [StructLayout(LayoutKind.Sequential)] - public struct Enumerator : IEnumerator, IDisposable - { - private const int NOT_FOUND = -1; - - private readonly int m_version; - private readonly ColaOrderedSet m_parent; - private ColaStore.Enumerator m_iterator; - - internal Enumerator(ColaOrderedSet parent, bool reverse) - { - m_version = parent.m_version; - m_parent = parent; - m_iterator = new ColaStore.Enumerator(parent.m_items, reverse); - } - - public bool MoveNext() - { - if (m_version != m_parent.m_version) - { - ColaStore.ThrowStoreVersionChanged(); - } - - return m_iterator.MoveNext(); - } - - public T Current - { - get { return m_iterator.Current; } - } - - public void Dispose() - { - // we are a struct that can be copied by value, so there is no guarantee that Dispose() will accomplish anything anyway... - } - - object System.Collections.IEnumerator.Current - { - get { return m_iterator.Current; } - } - - void System.Collections.IEnumerator.Reset() - { - if (m_version != m_parent.m_version) - { - ColaStore.ThrowStoreVersionChanged(); - } - m_iterator = new ColaStore.Enumerator(m_parent.m_items, m_iterator.Reverse); - } - - } - - } - -} diff --git a/FoundationDB.Storage.Memory/Collections/ColaRangeDictionary.cs b/FoundationDB.Storage.Memory/Collections/ColaRangeDictionary.cs deleted file mode 100644 index 73c7be545..000000000 --- a/FoundationDB.Storage.Memory/Collections/ColaRangeDictionary.cs +++ /dev/null @@ -1,704 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -// enables consitency checks after each operation to the set -#undef ENFORCE_INVARIANTS - -namespace FoundationDB.Storage.Memory.Core -{ - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Diagnostics.Contracts; - using System.Globalization; - - /// Represent an ordered list of ranges, each associated with a specific value, stored in a Cache Oblivous Lookup Array - /// Type of the keys stored in the set - /// Type of the values associated with each range - [DebuggerDisplay("Count={m_items.Count}, Bounds={m_bounds.Begin}..{m_bounds.End}")] - public sealed class ColaRangeDictionary : IEnumerable.Entry> - { - // This class is equivalent to ColaRangeSet, except that we have an extra value stored in each range. - // That means that we only merge ranges with the same value, and split/truncate/overwrite ranges with different values - - // INVARIANTS - // * If there is at least on range, the set is not empty (ie: Begin <= End) - // * The Begin key is INCLUDED in range, but the End key is EXCLUDED from the range (ie: Begin <= K < End) - // * The End key of a range MUST be GREATER THAN or EQUAL TO the Begin key of a range (ie: ranges are not backwards) - // * The End key of a range CANNOT be GREATER THAN the Begin key of the next range (ie: ranges do not overlap) - // * If the End key of a range is EQUAL TO the Begin key of the next range, then they MUST have a DIFFERENT value - - /// Mutable range - public sealed class Entry - { - public TKey Begin { get; internal set; } - public TKey End { get; internal set; } - public TValue Value { get; internal set; } - - public Entry(TKey begin, TKey end, TValue value) - { - this.Begin = begin; - this.End = end; - this.Value = value; - } - - /// Overwrite this range with another one - /// New range that will overwrite the current instance - internal void Set(Entry other) - { - this.Begin = other.Begin; - this.End = other.End; - this.Value = other.Value; - } - - public override string ToString() - { - return String.Format(CultureInfo.InvariantCulture, "({0} ~ {1}, {2})", this.Begin, this.End, this.Value); - } - } - - /// Range comparer that only test the Begin key - private sealed class BeginKeyComparer : IComparer - { - private readonly IComparer m_comparer; - - public BeginKeyComparer(IComparer comparer) - { - m_comparer = comparer; - } - - public int Compare(Entry x, Entry y) - { -#if DEBUG - if (x == null || y == null) Debugger.Break(); -#endif - return m_comparer.Compare(x.Begin, y.Begin); - } - } - - private sealed class EndKeyComparer : IComparer - { - private readonly IComparer m_comparer; - - public EndKeyComparer(IComparer comparer) - { - m_comparer = comparer; - } - - public int Compare(Entry x, Entry y) - { -#if DEBUG - if (x == null || y == null) Debugger.Break(); -#endif - return m_comparer.Compare(x.End, y.End); - } - } - - private readonly ColaStore m_items; - private readonly IComparer m_keyComparer; - private readonly IComparer m_valueComparer; - private readonly Entry m_bounds; - - public ColaRangeDictionary() - : this(0, null, null) - { } - - public ColaRangeDictionary(int capacity) - : this(capacity, null, null) - { } - - public ColaRangeDictionary(IComparer keyComparer, IComparer valueComparer) - : this(0, keyComparer, valueComparer) - { } - - public ColaRangeDictionary(int capacity, IComparer keyComparer, IComparer valueComparer) - { - m_keyComparer = keyComparer ?? Comparer.Default; - m_valueComparer = valueComparer ?? Comparer.Default; - if (capacity == 0) capacity = 15; - m_items = new ColaStore(capacity, new BeginKeyComparer(m_keyComparer)); - m_bounds = new Entry(default(TKey), default(TKey), default(TValue)); - } - - [Conditional("ENFORCE_INVARIANTS")] - private void CheckInvariants() - { - Contract.Assert(m_bounds != null); - Debug.WriteLine("INVARIANTS:" + this.ToString() + " <> " + m_bounds.ToString()); - - if (m_items.Count == 0) - { - Contract.Assert(EqualityComparer.Default.Equals(m_bounds.Begin, default(TKey))); - Contract.Assert(EqualityComparer.Default.Equals(m_bounds.End, default(TKey))); - } - else if (m_items.Count == 1) - { - Contract.Assert(EqualityComparer.Default.Equals(m_bounds.Begin, m_items[0].Begin)); - Contract.Assert(EqualityComparer.Default.Equals(m_bounds.End, m_items[0].End)); - } - else - { - Entry previous = null; - Entry first = null; - foreach (var item in this) - { - Contract.Assert(m_keyComparer.Compare(item.Begin, item.End) < 0, "End key should be after begin"); - - if (previous == null) - { - first = item; - } - else - { - int c = m_keyComparer.Compare(previous.End, item.Begin); - if (c > 0) Contract.Assert(false, String.Format("Range overlapping: {0} and {1}", previous, item)); - if (c == 0 && m_valueComparer.Compare(previous.Value, item.Value) == 0) Contract.Assert(false, String.Format("Unmerged ranges: {0} and {1}", previous, item)); - } - previous = item; - } - Contract.Assert(EqualityComparer.Default.Equals(m_bounds.Begin, first.Begin), String.Format("Min bound {0} does not match with {1}", m_bounds.Begin, first.Begin)); - Contract.Assert(EqualityComparer.Default.Equals(m_bounds.End, previous.End), String.Format("Max bound {0} does not match with {1}", m_bounds.End, previous.End)); - } - - } - - public int Count { get { return m_items.Count; } } - - public int Capacity { get { return m_items.Capacity; } } - - public IComparer KeyComparer { get { return m_keyComparer; } } - - public IComparer ValueComparer { get { return m_valueComparer; } } - - public Entry Bounds { get { return m_bounds; } } - - private Entry GetBeginRangeIntersecting(Entry range) - { - // look for the first existing range that is intersected by the start of the new range - - Entry cursor; - int offset, level = m_items.FindPrevious(range, true, out offset, out cursor); - if (level < 0) - { - return null; - } - return cursor; - } - - private Entry GetEndRangeIntersecting(Entry range) - { - // look for the last existing range that is intersected by the end of the new range - - Entry cursor; - int offset, level = m_items.FindPrevious(range, true, out offset, out cursor); - if (level < 0) - { - return null; - } - return cursor; - } - - private TKey Min(TKey a, TKey b) - { - return m_keyComparer.Compare(a, b) <= 0 ? a : b; - } - - private TKey Max(TKey a, TKey b) - { - return m_keyComparer.Compare(a, b) >= 0 ? a : b; - } - - public void Clear() - { - m_items.Clear(); - m_bounds.Begin = default(TKey); - m_bounds.End = default(TKey); - - CheckInvariants(); - } - - public void Mark(TKey begin, TKey end, TValue value) - { - if (m_keyComparer.Compare(begin, end) >= 0) throw new InvalidOperationException("End key must be greater than the Begin key."); - - // adds a new interval to the dictionary by overwriting or splitting any previous interval - // * if there are no interval, or the interval is disjoint from all other intervals, it is inserted as-is - // * if the new interval completly overwrites one or more intervals, they will be replaced by the new interval - // * if the new interval partially overlaps with one or more intervals, they will be split into chunks, and the new interval will be inserted between them - - // Examples: - // { } + [0..1,A] => { [0..1,A] } - // { [0..1,A] } + [2..3,B] => { [0..1,A], [2..3,B] } - // { [4..5,A] } + [0..10,B] => { [0..10,B] } - // { [0..10,A] } + [4..5,B] => { [0..4,A], [4..5,B], [5..10,A] } - // { [2..4,A], [6..8,B] } + [3..7,C] => { [2..3,A], [3..7,C], [7..8,B] } - // { [1..2,A], [2..3,B], ..., [9..10,Y] } + [0..10,Z] => { [0..10,Z] } - - var entry = new Entry(begin, end, value); - Entry cursor; - var cmp = m_keyComparer; - int c1, c2; - - try - { - - switch (m_items.Count) - { - case 0: - { // the list empty - - // no checks required - m_items.Insert(entry); - m_bounds.Begin = entry.Begin; - m_bounds.End = entry.End; - break; - } - - case 1: - { // there is only one value - - cursor = m_items[0]; - - c1 = cmp.Compare(begin, cursor.End); - if (c1 >= 0) - { - // [--------) [========) - // or [--------|========) - if (c1 == 0 && m_valueComparer.Compare(cursor.Value, value) == 0) - { - cursor.End = end; - } - else - { - m_items.Insert(entry); - } - m_bounds.End = end; - return; - } - c1 = cmp.Compare(end, cursor.Begin); - if (c1 <= 0) - { - // [========) [--------) - // or [========|--------) - if (c1 == 0 && m_valueComparer.Compare(cursor.Value, value) == 0) - { - cursor.Begin = begin; - } - else - { - m_items.Insert(entry); - } - m_bounds.Begin = begin; - return; - } - - c1 = cmp.Compare(begin, cursor.Begin); - c2 = cmp.Compare(end, cursor.End); - if (c1 == 0) - { // same start - if (c2 == 0) - { // same end - // [--------) - // + [========) - // = [========) - cursor.Value = value; - } - else if (c2 < 0) - { - // [----------) - // + [======) - // = [======|---) - if (m_valueComparer.Compare(cursor.Value, value) != 0) - { - cursor.Begin = end; - m_items.Insert(entry); - } - } - else - { - // [------) - // + [==========) - // = [==========) - cursor.Set(entry); - m_bounds.End = end; - } - } - else if (c1 > 0) - { // entry is to the right - if (c2 >= 0) - { - // [------) - // + [=======) - // = [---|=======) - - cursor.End = begin; - m_items.Insert(entry); - if (c2 > 0) m_bounds.End = end; - } - else - { - // [-----------) - // + [====) - // = [---|====|--) - var tmp = new Entry(end, cursor.End, cursor.Value); - cursor.End = begin; - m_items.InsertItems(entry, tmp); - } - } - else - { // entry is to the left - if (c2 >= 0) - { - cursor.Set(entry); - m_bounds.End = end; - } - else - { - cursor.Begin = end; - m_items.Insert(entry); - } - m_bounds.Begin = begin; - } - break; - } - - default: - { - // check with the bounds first - - if (cmp.Compare(begin, m_bounds.End) > 0) - { // completely to the right - m_items.Insert(entry); - m_bounds.End = end; - break; - } - if (cmp.Compare(end, m_bounds.Begin) < 0) - { // completely to the left - m_items.Insert(entry); - m_bounds.Begin = begin; - break; - } - if (cmp.Compare(begin, m_bounds.Begin) <= 0 && cmp.Compare(end, m_bounds.End) >= 0) - { // overlaps with all the ranges - // note :if we are here, there was at least 2 items, so just clear everything - m_items.Clear(); - m_items.Insert(entry); - m_bounds.Begin = entry.Begin; - m_bounds.End = entry.End; - break; - } - - // note: we have already bound checked, so we know that there is at least one overlap ! - - bool inserted = false; - - // => we will try to find the first range and last range in the dictionary that would be impacted, mutate them and delete all ranges in between - - var iterator = m_items.GetIterator(); - // seek to the range that starts before (or at) the new range's begin point - if (!iterator.Seek(entry, true)) - { // the new range will go into first position - // => still need to check if we are overlapping with the next ranges - iterator.SeekFirst(); - //Console.WriteLine(" . new lower bound, but intersects with first range..."); - m_bounds.Begin = begin; - } - - m_bounds.End = Max(m_bounds.End, end); - - cursor = iterator.Current; - - c1 = cmp.Compare(cursor.Begin, begin); - c2 = cmp.Compare(cursor.End, end); - if (c1 >= 0) - { - if (c2 == 0) - { // same end - // [-------).. [-------).. - // + [=======) + [==========) - // = [=======).. = [==========).. - cursor.Set(entry); - return; - } - - if (c2 > 0) - { // truncate begin - // [----------).. [----------).. - // + [=======) + [=======) - // = [=======|--).. = [=======|-----).. - cursor.Begin = end; - m_items.Insert(entry); - return; - } - - // replace + propagate - // [-------)???.. [-----)????.. - // + [==========) + [============) - // = [==========).. = [============).. - - cursor.Set(entry); - inserted = true; - //TODO: need to propagate ! - } - else - { - if (c2 == 0) - { // same end - // [------------) - // [========) - // = [---|========) - - cursor.End = begin; - m_items.Insert(entry); - return; - } - - if (c2 > 0) - { - // [------------) - // [=====) - // = [---|=====|--) - - var tmp = new Entry(end, cursor.End, cursor.Value); - cursor.End = begin; - m_items.InsertItems(entry, tmp); - return; - } - - int c3 = cmp.Compare(begin, cursor.End); - if (c3 >= 0) - { - if (c3 == 0 && m_valueComparer.Compare(value, cursor.Value) == 0) - { // touching same value => merge - cursor.End = end; - entry = cursor; - inserted = true; - } - else - { - // [---) - // [=====???? - // = [---) [=====???? - } - } - else - { - // [--------???? - // [====???? - // = [---|====???? - cursor.End = begin; - } - } - - // if we end up here, it means that we may be overlapping with following items - // => we need to delete them until we reach the last one, which we need to either delete or mutate - // => also, if we haven't inserted the entry yet, we will reuse the first deleted range to insert the entry, and only insert at the end if we haven't found a spot - - List deleted = null; - - while (true) - { - if (!iterator.Next()) - { // we reached past the end of the db - break; - } - - // cursor: existing range that we need to either delete or mutate - cursor = iterator.Current; - - c1 = cmp.Compare(cursor.Begin, end); - if (c1 == 0) - { // touching the next range - if (m_valueComparer.Compare(value, cursor.Value) == 0) - { // contiguous block with same value => merge - // [===========) - // [=====) - // = [=================) - if (inserted) - { - if (cmp.Compare(cursor.End, entry.End) > 0) - { - entry.End = cursor.End; - } - //note: we can't really delete while iterating with a cursor, so just mark it for deletion - if (deleted == null) deleted = new List(); - deleted.Add(cursor); - } - else - { - cursor.Begin = begin; - entry = cursor; - inserted = true; - } - break; - } - else - { - // [-----------) - // [=====) - // = [=====|-----------) - } - break; - } - else if (c1 > 0) - { // we are past the inserted range, nothing to do any more - // [------------) - // [=====) - // = [=====) [------------) - //Console.WriteLine(" . no overlap => break"); - break; - } - - c1 = cmp.Compare(cursor.End, end); - if (c1 <= 0) - { // we are completely covered => delete - - // [-------) [-------) - // + [...=======) + [...=======...) - // = [...=======) = [...=======...) - if (!inserted) - { // use that slot to insert ourselves - cursor.Set(entry); - inserted = true; - } - else - { - //note: we can't really delete while iterating with a cursor, so just mark it for deletion - if (deleted == null) deleted = new List(); - deleted.Add(cursor); - - } - } - else - { // we are only partially overlapped - - // [------------) - // [....========) - // = [....========|---) - - cursor.Begin = end; - break; - } - } - - if (deleted != null && deleted.Count > 0) - { - m_items.RemoveItems(deleted); - } - - if (!inserted) - { // we did not find an existing spot to re-use, so we need to insert the new range - m_items.Insert(entry); - } - break; - } - } - } - finally - { - CheckInvariants(); - } - } - - /// Checks if there is at least one range in the dictionary that intersects with the specified range, and matches the predicate - /// Lower bound of the intersection - /// Higher bound (excluded) of the intersection - /// Value that is passed as the second argument to - /// Predicate called for each intersected range. - /// True if there was at least one intersecting range, and returned true for that range. - public bool Intersect(TKey begin, TKey end, TValue arg, Func predicate) - { - if (m_items.Count == 0) return false; - - var cmp = m_keyComparer; - if (cmp.Compare(m_bounds.Begin, end) >= 0) return false; - if (cmp.Compare(m_bounds.End, begin) <= 0) return false; - - var entry = new Entry(begin, end, default(TValue)); - - var iterator = m_items.GetIterator(); - if (!iterator.Seek(entry, true)) - { // starts before - iterator.SeekFirst(); - } - - do - { - var cursor = iterator.Current; - - // A and B intersects if: CMP(B.end, A.begin) <= 0 .OR. CMP(A.end, B.begin) <= 0 - - if (cmp.Compare(end, cursor.Begin) <= 0) - { - return false; - } - - if (cmp.Compare(cursor.End, begin) > 0 && predicate(cursor.Value, arg)) - { - return true; - } - } - while(iterator.Next()); - - return false; - } - - public ColaStore.Enumerator GetEnumerator() - { - return new ColaStore.Enumerator(m_items, reverse: false); - } - - IEnumerator IEnumerable.GetEnumerator() - { - return this.GetEnumerator(); - } - - System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() - { - return this.GetEnumerator(); - } - - [Conditional("DEBUG")] - //TODO: remove or set to internal ! - public void Debug_Dump() - { - Debug.WriteLine("Dumping ColaRangeDictionary<" + typeof(TKey).Name + "> filled at " + (100.0d * this.Count / this.Capacity).ToString("N2") + "%"); - m_items.Debug_Dump(); - } - - public override string ToString() - { - if (m_items.Count == 0) return "{ }"; - - var sb = new System.Text.StringBuilder(); - Entry previous = null; - foreach(var item in this) - { - if (previous == null) - { - sb.Append('['); - } - else if (m_keyComparer.Compare(previous.End, item.Begin) < 0) - { - sb.Append(previous.End).Append(") ["); - } - else - { - sb.Append(previous.End).Append('|'); - } - - sb.Append(item.Begin).Append("..(").Append(item.Value).Append(").."); - previous = item; - } - if (previous != null) - { - sb.Append(previous.End).Append(")"); - } - - return sb.ToString(); - } - - } - -} diff --git a/FoundationDB.Storage.Memory/Collections/ColaRangeSet.cs b/FoundationDB.Storage.Memory/Collections/ColaRangeSet.cs deleted file mode 100644 index d8492d249..000000000 --- a/FoundationDB.Storage.Memory/Collections/ColaRangeSet.cs +++ /dev/null @@ -1,343 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -// enables consitency checks after each operation to the set -#define ENFORCE_INVARIANTS - -namespace FoundationDB.Storage.Memory.Core -{ - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Diagnostics.Contracts; - using System.Globalization; - - /// Represent an ordered list of ranges, stored in a Cache Oblivous Lookup Array - /// Type of keys stored in the set - [DebuggerDisplay("Count={m_items.Count}, Bounds={m_bounds.Begin}..{m_bounds.End}")] - public sealed class ColaRangeSet : IEnumerable.Entry> - { - // We store the ranges in a COLA array that is sorted by the Begin keys - // The range are mutable, which allows for efficient merging - - // INVARIANTS - // * If there is at least on range, the set is not empty - // * The Begin key is INCLUDED in range, but the End key is EXCLUDED from the range (ie: Begin <= K < End) - // * The End key of a range is always GREATER than or EQUAL to the Begin key of a range (ie: ranges are not backwards) - // * The End key of a range is always strictly LESS than the Begin key of the next range (ie: there are gaps between ranges) - - // This should give us a sorted set of disjoint ranges - - /// Mutable range - public sealed class Entry - { - public TKey Begin { get; internal set; } - public TKey End { get; internal set; } - - public Entry(TKey begin, TKey end) - { - this.Begin = begin; - this.End = end; - } - - internal void ReplaceWith(Entry other) - { - this.Begin = other.Begin; - this.End = other.End; - } - - internal void Update(TKey begin, TKey end) - { - this.Begin = begin; - this.End = end; - } - - internal bool Contains(TKey key, IComparer comparer) - { - return comparer.Compare(key, this.Begin) >= 0 && comparer.Compare(key, this.End) < 0; - } - - public override string ToString() - { - return String.Format(CultureInfo.InvariantCulture, "[{0}, {1})", this.Begin, this.End); - } - } - - /// Range comparer that only test the Begin key - private sealed class BeginKeyComparer : IComparer - { - private readonly IComparer m_comparer; - - public BeginKeyComparer(IComparer comparer) - { - m_comparer = comparer; - } - - public int Compare(Entry x, Entry y) - { - return m_comparer.Compare(x.Begin, y.Begin); - } - - } - - private readonly ColaStore m_items; - private readonly IComparer m_comparer; - private readonly Entry m_bounds; - - public ColaRangeSet() - : this(0, null) - { } - - public ColaRangeSet(int capacity) - : this(capacity, null) - { } - - public ColaRangeSet(IComparer keyComparer) - : this(0, keyComparer) - { } - - public ColaRangeSet(int capacity, IComparer keyComparer) - { - m_comparer = keyComparer ?? Comparer.Default; - if (capacity == 0) capacity = 15; - m_items = new ColaStore(capacity, new BeginKeyComparer(m_comparer)); - m_bounds = new Entry(default(TKey), default(TKey)); - } - - [Conditional("ENFORCE_INVARIANTS")] - private void CheckInvariants() - { - } - - public int Count { get { return m_items.Count; } } - - public int Capacity { get { return m_items.Capacity; } } - - public IComparer Comparer { get { return m_comparer; } } - - public Entry Bounds { get { return m_bounds; } } - - private bool Resolve(Entry previous, Entry candidate) - { - - int c = m_comparer.Compare(previous.Begin, candidate.Begin); - if (c == 0) - { // they share the same begin key ! - - if (m_comparer.Compare(previous.End, candidate.End) < 0) - { // candidate replaces the previous ony - previous.ReplaceWith(candidate); - } - return true; - } - - if (c < 0) - { // b is to the right - if (m_comparer.Compare(previous.End, candidate.Begin) < 0) - { // there is a gap in between - return false; - } - // they touch or overlap - previous.Update(previous.Begin, Max(previous.End, candidate.End)); - return true; - } - else - { // b is to the left - if (m_comparer.Compare(candidate.End, previous.Begin) < 0) - { // there is a gap in between - return false; - } - // they touch or overlap - previous.Update(candidate.Begin, Max(previous.End, candidate.End)); - return true; - } - } - - private TKey Min(TKey a, TKey b) - { - return m_comparer.Compare(a, b) <= 0 ? a : b; - } - - private TKey Max(TKey a, TKey b) - { - return m_comparer.Compare(a, b) >= 0 ? a : b; - } - - public void Clear() - { - m_items.Clear(); - m_bounds.Update(default(TKey), default(TKey)); - } - - public void Mark(TKey begin, TKey end) - { - if (m_comparer.Compare(begin, end) >= 0) throw new InvalidOperationException("End key must be greater than the Begin key."); - - var entry = new Entry(begin, end); - Entry cursor; - - switch (m_items.Count) - { - case 0: - { // the list empty - - // no checks required - m_items.Insert(entry); - m_bounds.ReplaceWith(entry); - break; - } - - case 1: - { // there is only one value - - cursor = m_items[0]; - if (!Resolve(cursor, entry)) - { // no conflict - m_items.Insert(entry); - m_bounds.Update( - Min(entry.Begin, cursor.Begin), - Max(entry.End, cursor.End) - ); - } - else - { // merged with the previous range - m_bounds.ReplaceWith(cursor); - } - break; - } - default: - { - // check with the bounds first - - if (m_comparer.Compare(begin, m_bounds.End) > 0) - { // completely to the right - m_items.Insert(entry); - m_bounds.Update(m_bounds.Begin, end); - break; - } - if (m_comparer.Compare(end, m_bounds.Begin) < 0) - { // completely to the left - m_items.Insert(entry); - m_bounds.Update(begin, m_bounds.End); - break; - } - if (m_comparer.Compare(begin, m_bounds.Begin) <= 0 && m_comparer.Compare(end, m_bounds.End) >= 0) - { // overlaps with all the ranges - // note :if we are here, there was at least 2 items, so just clear everything - m_items.Clear(); - m_items.Insert(entry); - m_bounds.ReplaceWith(entry); - break; - } - - - // overlaps with existing ranges, we may need to resolve conflicts - int offset, level; - bool inserted = false; - - // once inserted, will it conflict with the previous entry ? - if ((level = m_items.FindPrevious(entry, true, out offset, out cursor)) >= 0) - { - if (Resolve(cursor, entry)) - { - entry = cursor; - inserted = true; - } - } - - // also check for potential conflicts with the next entries - while (true) - { - level = m_items.FindNext(entry, false, out offset, out cursor); - if (level < 0) break; - - if (inserted) - { // we already have inserted the key so conflicts will remove the next segment - if (Resolve(entry, cursor)) - { // next segment has been removed - //Console.WriteLine(" > folded with previous: " + entry); - m_items.RemoveAt(level, offset); - } - else - { - break; - } - } - else - { // we havent inserted the key yet, so in case of conflict, we will use the next segment's slot - if (Resolve(cursor, entry)) - { - //Console.WriteLine(" > merged in place: " + cursor); - inserted = true; - } - else - { - break; - } - } - } - - if (!inserted) - { // no conflict, we have to insert the new range - m_items.Insert(entry); - } - - m_bounds.Update( - Min(m_bounds.Begin, entry.Begin), - Max(m_bounds.End, entry.End) - ); - - break; - } - } - - //TODO: check constraints ! - } - - /// Checks if there is at least one range that contains the specified key - /// Key to test - /// True if the key is contained by one range; otherwise, false. - public bool ContainsKey(TKey key) - { - if (m_bounds.Contains(key, m_comparer)) - { - var entry = new Entry(key, key); - int offset, level = m_items.FindPrevious(entry, true, out offset, out entry); - return level >= 0 && entry.Contains(key, m_comparer); - } - return false; - } - - public ColaStore.Enumerator GetEnumerator() - { - return new ColaStore.Enumerator(m_items, reverse: false); - } - - IEnumerator IEnumerable.GetEnumerator() - { - return this.GetEnumerator(); - } - - System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() - { - return this.GetEnumerator(); - } - - [Conditional("DEBUG")] - //TODO: remove or set to internal ! - public void Debug_Dump() - { - Console.WriteLine("Dumping ColaRangeSet<" + typeof(TKey).Name + "> filled at " + (100.0d * this.Count / this.Capacity).ToString("N2") + "%"); - m_items.Debug_Dump(); - } - - public override string ToString() - { - if (m_items.Count == 0) return "{ }"; - return "{ " + String.Join(", ", this) + " }"; - } - - } - -} diff --git a/FoundationDB.Storage.Memory/Collections/ColaStore.cs b/FoundationDB.Storage.Memory/Collections/ColaStore.cs deleted file mode 100644 index 96067d3af..000000000 --- a/FoundationDB.Storage.Memory/Collections/ColaStore.cs +++ /dev/null @@ -1,1195 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Core -{ - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Diagnostics.Contracts; - using System.Runtime.InteropServices; - - public static class ColaStore - { - private const int NOT_FOUND = -1; - - private static readonly int[] MultiplyDeBruijnLowestBitPosition = new int[32] - { - 0, 1, 28, 2, 29, 14, 24, 3, 30, 22, 20, 15, 25, 17, 4, 8, - 31, 27, 13, 23, 21, 19, 16, 7, 26, 12, 18, 6, 11, 5, 10, 9 - }; - - private static readonly int[] MultiplyDeBruijnHighestBitPosition = new int[32] - { - 0, 9, 1, 10, 13, 21, 2, 29, 11, 14, 16, 18, 22, 25, 3, 30, - 8, 12, 20, 28, 15, 17, 24, 7, 19, 27, 23, 6, 26, 5, 4, 31 - }; - - internal static bool IsFree(int level, int count) - { - Contract.Requires(level >= 0 && count >= 0); - return (count & (1 << level)) == 0; - } - - internal static bool IsAllocated(int level, int count) - { - Contract.Requires(level >= 0 && count >= 0); - return (count & (1 << level)) != 0; - } - - /// Finds the level that holds an absolute index - /// Absolute index in a COLA array where 0 is the root, 1 is the first item of level 1, and so on - /// Receive the offset in the level that contains is located - /// Level that contains the specified location. - public static int FromIndex(int index, out int offset) - { - Contract.Requires(index >= 0); - - int level = HighestBit(index); - offset = index - (1 << level) + 1; - Contract.Ensures(level >= 0 && level < 31 && offset >= 0 && offset < (1 << level)); - return level; - } - - /// Convert a (level, offset) pair into the corresponding absolute index - /// Level of the location (0 for the root) - /// Offset within the level of the location - /// Absolute index where 0 is the root, 1 is the first item of level 1, and so on - public static int ToIndex(int level, int offset) - { - Contract.Requires(level >= 0 && level < 31 && offset >= 0 && offset < (1 << level)); - int index = (1 << level) - 1 + offset; - Contract.Ensures(index >= 0 && index < 1 << level); - return index; - } - - public static int LowestBit(int value) - { - uint v = (uint)value; - v = (uint)((v & -v) * 0x077CB531U); - - return MultiplyDeBruijnLowestBitPosition[v >> 27]; - } - - public static int HighestBit(int value) - { - // first round down to one less than a power of 2 - uint v = (uint)value; - v |= v >> 1; - v |= v >> 2; - v |= v >> 4; - v |= v >> 8; - v |= v >> 16; - - return MultiplyDeBruijnHighestBitPosition[(int)((v * 0x07C4ACDDU) >> 27)]; - } - - /// Computes the absolute index from a value offset (in the allocated levels) - /// Number of items in the COLA array - /// Offset of the value in the allocated levels of the COLA array, with 0 being the oldest (first item of the last allocated level) - /// Absolute index of the location where that value would be stored in the COLA array (from the top) - public static int MapOffsetToIndex(int count, int arrayIndex) - { - Contract.Requires(count >= 0 && arrayIndex >= 0 && arrayIndex < count); - - int offset; - int level = MapOffsetToLocation(count, arrayIndex, out offset); - return (1 << level) - 1 + offset; - } - - /// Computes the (level, offset) pair from a value offset (in the allocated levels) - /// Number of items in the COLA array - /// Offset of the value in the allocated levels of the COLA array, with 0 being the oldest (first item of the last allocated level) - /// Absolute index of the location where that value would be stored in the COLA array (from the top) - public static int MapOffsetToLocation(int count, int arrayIndex, out int offset) - { - if (count < 0) throw new ArgumentOutOfRangeException("count", "Count cannot be less than zero"); - if (arrayIndex < 0 || arrayIndex >= count) throw new ArgumentOutOfRangeException("arrayIndex", "Index is outside the array"); - - if (count == 0) - { // special case for the empty array - offset = 0; - return 0; - } - - // find the highest allocated level (note: 50% of values will be in this segment!) - int level = HighestBit(count); - int k = 1 << level; - int p = k - 1; - do - { - if ((count & k) != 0) - { // this level is allocated - if (arrayIndex < k) - { - offset = arrayIndex; - return level; - } - arrayIndex -= k; - } - k >>= 1; - --level; - p -= k; - } - while (k > 0); - - // should not happen ! - throw new InvalidOperationException(); - } - - public static int MapLocationToOffset(int count, int level, int offset) - { - Contract.Assert(count >= 0 && level >= 0 && offset >= 0 && offset < 1 << level); - if (count < 0) throw new ArgumentOutOfRangeException("count", "Count cannot be less than zero"); - - if (count == 0) - { // special case for the empty array - return 0; - } - - // compute the base location of the selected level - int p = 0; - int k = 1; - for (int i = 0; i < level; i++) - { - if ((count & k) != 0) - { - p += k; - } - k <<= 1; - } - - return p + offset; - } - - internal static void ThrowDuplicateKey(T value) - { - throw new InvalidOperationException(String.Format("Cannot insert '{0}' because the key already exists in the set", value)); - } - - internal static int BinarySearch(T[] array, int offset, int count, T value, IComparer comparer) - { - Contract.Assert(array != null && offset >= 0 && count >= 0 && comparer != null); - - // Instead of starting from the midle we will exploit the fact that, since items are usually inserted in order, the value is probably either to the left or the right of the segment. - // Also, since most activity happens in the top levels, the search array is probably very small (size 1, 2 or 4) - - if (count == 0) - { - // note: there should be no array of size 0, this is probably a bug ! - return ~offset; - } - - int end = offset - 1 + count; - int c; - - // compare with the last item - c = comparer.Compare(array[end], value); - if (c == 0) return end; - if (count == 1) - { - return c < 0 ? ~(offset + 1) : ~offset; - } - if (c < 0) return ~(end + 1); - --end; - - // compare with the first - c = comparer.Compare(array[offset], value); - if (c == 0) return offset; - if (c > 0) return ~offset; - - int cursor = offset + 1; - while (cursor <= end) - { - int center = cursor + ((end - cursor) >> 1); - c = comparer.Compare(array[center], value); - if (c == 0) - { // the value is the center point - return center; - } - if (c < 0) - { // the value is after the center point - cursor = center + 1; - } - else - { // the value is before the center point - end = center - 1; - } - } - return ~cursor; - } - - /// Merge two values into level 1 - /// Segment for level 1 (should be of size 2) - /// Left value - /// Right value - /// Comparer to use - internal static void MergeSimple(T[] segment, T left, T right, IComparer comparer) - { - Contract.Requires(segment != null && segment.Length == 2); - - int c = comparer.Compare(left, right); - if (c == 0) ThrowDuplicateKey(right); - else if (c < 0) - { - segment[0] = left; - segment[1] = right; - } - else - { - segment[0] = right; - segment[1] = left; - } - } - - /// Replace a value in a segment with another value, while keeping it sorted - /// Segment that will received the new value - /// Offset of replaced value in the segment - /// New value to insert into the segment - /// Comparer to use - internal static void MergeInPlace(T[] segment, int offset, T value, IComparer comparer) - { - Contract.Requires(segment != null && offset >= 0 && comparer != null); - - // Find the spot where the new value should be inserted - int p = BinarySearch(segment, 0, segment.Length, value, comparer); - if (p >= 0) - { // this is not supposed to happen! - ThrowDuplicateKey(value); - } - - int index = (~p); - Contract.Assert(index >= 0 && index <= segment.Length); - if (index == offset) - { // merge in place - - // _______ offset == index - // V - // before: [...] X [...] - // after: [...] O [...] - - segment[index] = value; - return; - } - if (index < offset) - { // shift right - - // ____________ index - // / _______ offset - // V V - // before: [...] # # # X [...] - // after: [...] O # # # [...] - - Array.Copy(segment, index, segment, index + 1, offset - index); - segment[index] = value; - } - else - { // shift left - - --index; - - // ____________ offset - // / _______ index - // V V - // before: [...] X # # # [...] - // after: [...] # # # O [...] - - Array.Copy(segment, offset + 1, segment, offset, index - offset); - segment[index] = value; - } - } - - /// Spread the content of a level to all the previous levels into pieces, except the first item that is returned - /// Level that should be broken into chunks - /// List of all the levels - /// The last element of the broken level - /// The broken segment will be cleared - internal static T SpreadLevel(int level, T[][] inputs) - { - Contract.Requires(level >= 0 && inputs != null && inputs.Length > level); - - // Spread all items in the target level - except the first - to the previous level (which should ALL be EMPTY) - - var source = inputs[level]; - - int p = 1; - for (int i = level - 1; i >= 0; i--) - { - var segment = inputs[i]; - Contract.Assert(segment != null); - int n = segment.Length; - Array.Copy(source, p, segment, 0, n); - p += n; - } - Contract.Assert(p == source.Length); - T res = source[0]; - Array.Clear(source, 0, source.Length); - return res; - } - - /// Merge two ordered segments of level N into an ordered segment of level N + 1 - /// Destination, level N + 1 (size 2^(N+1)) - /// First level N segment (size 2^N) - /// Second level N segment (taille 2^N) - /// Comparer used for the merge - internal static void MergeSort(T[] output, T[] left, T[] right, IComparer comparer) - { - Contract.Requires(output != null && left != null && right != null && comparer != null); - Contract.Requires(left.Length > 0 && output.Length == left.Length * 2 && right.Length == left.Length); - - int c, n = left.Length; - // note: The probality to merge an array of size N is rougly 1/N (with N being a power of 2), - // which means that we will spend roughly half the time merging arrays of size 1 into an array of size 2.. - - if (n == 1) - { // Most frequent case (p=0.5) - var l = left[0]; - var r = right[0]; - if ((c = comparer.Compare(l, r)) < 0) - { - output[0] = l; - output[1] = r; - } - else - { - Contract.Assert(c != 0); - output[0] = r; - output[1] = l; - } - return; - } - - if (n == 2) - { // second most frequent case (p=0.25) - - // We are merging 2 pairs of ordered values into an array of size 4 - if (comparer.Compare(left[1], right[0]) < 0) - { // left << right - output[0] = left[0]; - output[1] = left[1]; - output[2] = right[0]; - output[3] = right[1]; - return; - } - - if (comparer.Compare(right[1], left[0]) < 0) - { // right << left - output[0] = right[0]; - output[1] = right[1]; - output[2] = left[0]; - output[3] = left[1]; - return; - } - - // left and right intersects - // => just use the regular merge sort below - } - - int pLeft = 0; - int pRight = 0; - int pOutput = 0; - - while (true) - { - if ((c = comparer.Compare(left[pLeft], right[pRight])) < 0) - { // left is smaller than right => advance - - output[pOutput++] = left[pLeft++]; - - if (pLeft >= n) - { // the left array is done, copy the remainder of the right array - if (pRight < n) Array.Copy(right, pRight, output, pOutput, n - pRight); - return; - } - } - else - { // right is smaller or equal => advance - Contract.Assert(c != 0); - - output[pOutput++] = right[pRight++]; - - if (pRight >= n) - { // the right array is done, copy the remainder of the left array - if (pLeft < n) Array.Copy(left, pLeft, output, pOutput, n - pLeft); - return; - } - } - } - - } - - internal static int[] CreateCursors(int count, out int min) - { - min = LowestBit(count); - var cursors = new int[HighestBit(count) + 1]; - int k = 1; - for (int i = 0; i < cursors.Length; i++) - { - if (i < min || (count & k) == 0) cursors[i] = NOT_FOUND; - k <<= 1; - } - return cursors; - } - - /// Search for the smallest element that is larger than a reference element - /// Reference element - /// If true, return the position of the value itself if it is found. If false, return the position of the closest value that is smaller. - /// Receive the offset within the level of the next element, or 0 if not found - /// Receive the value of the next element, or default(T) if not found - /// Level of the next element, or -1 if was already the largest - public static int FindNext(T[][] levels, int count, T value, bool orEqual, IComparer comparer, out int offset, out T result) - { - int level = NOT_FOUND; - T min = default(T); - int minOffset = 0; - - // scan each segment for a value that would be larger, keep track of the smallest found - for (int i = 0; i < levels.Length; i++) - { - if (ColaStore.IsFree(i, count)) continue; - - var segment = levels[i]; - int pos = ColaStore.BinarySearch(segment, 0, segment.Length, value, comparer); - if (pos >= 0) - { // we found an exact match in this segment - if (orEqual) - { - offset = pos; - result = segment[pos]; - return i; - } - - // the next item in this segment should be larger - ++pos; - } - else - { // we found where it would be stored in this segment - pos = ~pos; - } - - if (pos < segment.Length) - { - if (level == NOT_FOUND || comparer.Compare(segment[pos], min) < 0) - { // we found a better candidate - min = segment[pos]; - level = i; - minOffset = pos; - } - } - } - - offset = minOffset; - result = min; - return level; - } - - /// Search for the largest element that is smaller than a reference element - /// Reference element - /// If true, return the position of the value itself if it is found. If false, return the position of the closest value that is smaller. - /// Receive the offset within the level of the previous element, or 0 if not found - /// Receive the value of the previous element, or default(T) if not found - /// Level of the previous element, or -1 if was already the smallest - public static int FindPrevious(T[][] levels, int count, T value, bool orEqual, IComparer comparer, out int offset, out T result) - { - int level = NOT_FOUND; - T max = default(T); - int maxOffset = 0; - - // scan each segment for a value that would be smaller, keep track of the smallest found - for (int i = 0; i < levels.Length; i++) - { - if (ColaStore.IsFree(i, count)) continue; - - var segment = levels[i]; - int pos = ColaStore.BinarySearch(segment, 0, segment.Length, value, comparer); - // the previous item in this segment should be smaller - if (pos < 0) - { // it is not - pos = ~pos; - } - else if (orEqual) - { // we found an exact match in this segment - offset = pos; - result = segment[pos]; - return i; - } - - --pos; - - if (pos >= 0) - { - if (level == NOT_FOUND || comparer.Compare(segment[pos], max) > 0) - { // we found a better candidate - max = segment[pos]; - level = i; - maxOffset = pos; - } - } - } - - offset = maxOffset; - result = max; - return level; - } - - public static IEnumerable FindBetween(T[][] levels, int count, T begin, bool beginOrEqual, T end, bool endOrEqual, int limit, IComparer comparer) - { - if (limit > 0) - { - for (int i = 0; i < levels.Length; i++) - { - if (ColaStore.IsFree(i, count)) continue; - - var segment = levels[i]; - - int to = ColaStore.BinarySearch(segment, 0, segment.Length, end, comparer); - if (to >= 0) - { - if (!endOrEqual) - { - to--; - } - } - else - { - to = ~to; - } - if (to < 0 || to >= segment.Length) continue; - - int from = ColaStore.BinarySearch(segment, 0, segment.Length, begin, comparer); - if (from >= 0) - { - if (!beginOrEqual) - { - ++from; - } - } - else - { - from = ~from; - } - if (from >= segment.Length) continue; - - if (from > to) continue; - - for (int j = from; j <= to && limit > 0; j++) - { - yield return segment[j]; - --limit; - } - if (limit <= 0) break; - } - } - } - - /// Find the next smallest key pointed by a list of cursors - /// List of source arrays - /// Lit of cursors in source arrays - /// Key comparer - /// Received the next smallest element if the method returns true; otherwise set to default(T) - /// The index of the level that returned the value, or -1 if all levels are done - internal static int IterateFindNext(T[][] inputs, int[] cursors, int min, int max, IComparer comparer, out T result) - { - Contract.Requires(inputs != null && cursors != null && min >= 0 && max >= min && comparer != null); - - int index = NOT_FOUND; - int pos = NOT_FOUND; - var next = default(T); - - // look for the smallest element - // note: we scan from the bottom up, because older items are usually in the lower levels - for (int i = max; i >= min; i--) - { - int cursor = cursors[i]; - if (cursor < 0) continue; - var segment = inputs[i]; - if (cursor >= segment.Length) continue; - var x = segment[cursor]; - if (index == NOT_FOUND || comparer.Compare(x, next) < 0) - { // found a candidate - index = i; - pos = cursor; - next = x; - } - } - - if (index != NOT_FOUND) - { - ++pos; - if (pos >= (1 << index)) - { // this array is done - pos = NOT_FOUND; - } - cursors[index] = pos; - result = next; - return index; - } - - result = default(T); - return NOT_FOUND; - } - - /// Find the next largest key pointed by a list of cursors - /// List of source arrays - /// Lit of cursors in source arrays - /// Key comparer - /// Received the next largest element if the method returns true; otherwise set to default(T) - /// The index of the level that returned the value, or -1 if all levels are done - internal static int IterateFindPrevious(T[][] inputs, int[] cursors, int min, int max, IComparer comparer, out T result) - { - Contract.Requires(inputs != null && cursors != null && min >= 0 && max >= min && comparer != null); - // NOT TESTED !!!!! - // NOT TESTED !!!!! - // NOT TESTED !!!!! - - //Trace.WriteLine("IterateFindPrevious(" + min + ".." + max + ")"); - - int index = NOT_FOUND; - int pos = NOT_FOUND; - var next = default(T); - - // look for the largest element - // note: we scan from the top down, because more recent items are usually in the upper levels - for (int i = min; i >= max; i--) - { - int cursor = cursors[i]; - if (cursor < 0) continue; - var segment = inputs[i]; - if (cursor >= segment.Length) continue; - var x = segment[cursor]; - if (index == NOT_FOUND || comparer.Compare(x, next) < 0) - { // found a candidate - index = i; - pos = cursor; - next = x; - } - } - - if (index != NOT_FOUND) - { - --pos; - if (pos < 0) - { // this array is done - pos = NOT_FOUND; - } - cursors[index] = pos; - result = next; - return index; - } - - result = default(T); - return NOT_FOUND; - } - - /// Iterate over all the values in the set, using their natural order - internal static IEnumerable IterateOrdered(int count, T[][] inputs, IComparer comparer, bool reverse) - { - Contract.Requires(count >= 0 && inputs != null && comparer != null && count < (1 << inputs.Length)); - // NOT TESTED !!!!! - // NOT TESTED !!!!! - // NOT TESTED !!!!! - - Contract.Requires(count >= 0 && inputs != null && comparer != null); - - // We will use a list of N cursors, set to the start of their respective levels. - // A each turn, look for the smallest key referenced by the cursors, return that one, and advance its cursor. - // Once a cursor is past the end of its level, it is set to -1 and is ignored for the rest of the operation - - if (count > 0) - { - // setup the cursors, with the empty levels already marked as completed - var cursors = new int[inputs.Length]; - for (int i = 0; i < cursors.Length; i++) - { - if (ColaStore.IsFree(i, count)) - { - cursors[i] = NOT_FOUND; - } - } - - // pre compute the first/last active level - int min = ColaStore.LowestBit(count); - int max = ColaStore.HighestBit(count); - - while (count-- > 0) - { - T item; - int pos; - if (reverse) - { - pos = IterateFindPrevious(inputs, cursors, min, max, comparer, out item); - } - else - { - pos = IterateFindNext(inputs, cursors, min, max, comparer, out item); - } - - if (pos == NOT_FOUND) - { // we unexpectedly ran out of stuff before the end ? - //TODO: should we fail or stop here ? - throw new InvalidOperationException("Not enough data in the source arrays to fill the output array"); - } - yield return item; - - // update the bounds if needed - if (pos == max) - { - if (cursors[max] == NOT_FOUND) --max; - } - else if (pos == min) - { - if (cursors[min] == NOT_FOUND) ++min; - } - } - } - } - - /// Iterate over all the values in the set, without any order guarantee - internal static IEnumerable IterateUnordered(int count, T[][] inputs) - { - Contract.Requires(count >= 0 && inputs != null && count < (1 << inputs.Length)); - - for (int i = 0; i < inputs.Length; i++) - { - if (ColaStore.IsFree(i, count)) continue; - var segment = inputs[i]; - Contract.Assert(segment != null && segment.Length == 1 << i); - for (int j = 0; j < segment.Length; j++) - { - yield return segment[j]; - } - } - } - - internal static void ThrowStoreVersionChanged() - { - throw new InvalidOperationException("The version of the store has changed. This usually means that the collection has been modified while it was being enumerated"); - } - - [StructLayout(LayoutKind.Sequential)] - public struct Enumerator : IEnumerator, IDisposable - { - private readonly ColaStore m_items; - private readonly bool m_reverse; - private int[] m_cursors; - private T m_current; - private int m_min; - private int m_max; - - internal Enumerator(ColaStore items, bool reverse) - { - m_items = items; - m_reverse = reverse; - m_cursors = ColaStore.CreateCursors(m_items.Count, out m_min); - m_max = m_cursors.Length - 1; - m_current = default(T); - } - - public bool MoveNext() - { - int pos; - if (m_reverse) - { - pos = ColaStore.IterateFindPrevious(m_items.Levels, m_cursors, m_min, m_max, m_items.Comparer, out m_current); - } - else - { - pos = ColaStore.IterateFindNext(m_items.Levels, m_cursors, m_min, m_max, m_items.Comparer, out m_current); - } - - if (pos == NOT_FOUND) - { // that was the last item! - return false; - } - - // update the bounds if necessary - if (pos == m_max) - { - if (m_cursors[m_max] == NOT_FOUND) --m_max; - } - else if (pos == m_min) - { - if (m_cursors[m_min] == NOT_FOUND) ++m_min; - } - - return true; - } - - public T Current - { - get { return m_current; } - } - - public bool Reverse - { - get { return m_reverse; } - } - - public void Dispose() - { - // we are a struct that can be copied by value, so there is no guarantee that Dispose() will accomplish anything anyway... - } - - object System.Collections.IEnumerator.Current - { - get { return m_current; } - } - - void System.Collections.IEnumerator.Reset() - { - m_cursors = ColaStore.CreateCursors(m_items.Count, out m_min); - m_max = m_cursors.Length - 1; - m_current = default(T); - } - - } - - public sealed class Iterator - { - private const int DIRECTION_PREVIOUS = -1; - private const int DIRECTION_SEEK = 0; - private const int DIRECTION_NEXT = +1; - - private readonly T[][] m_levels; - private readonly int m_count; - private readonly IComparer m_comparer; - private readonly int[] m_cursors; - private readonly int m_min; - private T m_current; - private int m_currentLevel; - private int m_direction; - - internal Iterator(T[][] levels, int count, IComparer comparer) - { - Contract.Requires(levels != null && count >= 0 && comparer != null); - m_levels = levels; - m_count = count; - m_comparer = comparer; - - m_cursors = ColaStore.CreateCursors(m_count, out m_min); - } - - [Conditional("FULLDEBUG")] - private void Debug_Dump(string label = null) - { - Trace.WriteLine("* Cursor State: " + label); - for (int i = m_min; i < m_cursors.Length; i++) - { - if (ColaStore.IsFree(i, m_count)) - { - Trace.WriteLine(" - L" + i + ": unallocated"); - continue; - } - - int p = m_cursors[i]; - Trace.WriteLine(" - L" + i + ": " + p + " [" + (1 << i) + "] = " + (p < 0 ? "" : (p >= (1 << i)) ? "" : ("" + m_levels[i][p]))); - } - Trace.WriteLine(" > Current at " + m_currentLevel + " : " + m_current); - } - - /// Set the cursor just before the first key in the store - public void SeekBeforeFirst() - { - var cursors = m_cursors; - for (int i = m_min; i < cursors.Length; i++) - { - cursors[i] = -1; - } - m_currentLevel = NOT_FOUND; - m_current = default(T); - m_direction = DIRECTION_SEEK; - } - - /// Set the cursor just before the first key in the store - public void SeekAfterLast() - { - var cursors = m_cursors; - for (int i = m_min; i < cursors.Length; i++) - { - cursors[i] = 1 << i; - } - m_currentLevel = NOT_FOUND; - m_current = default(T); - m_direction = DIRECTION_SEEK; - } - - /// Seek the cursor to the smallest key in the store - public bool SeekFirst() - { - T min = default(T); - int minLevel = NOT_FOUND; - - var cursors = m_cursors; - - for (int i = m_min; i < cursors.Length; i++) - { - if (IsFree(i, m_count)) continue; - - cursors[i] = 0; - var segment = m_levels[i]; - Contract.Assert(segment != null && segment.Length == 1 << i); - if (minLevel < 0 || m_comparer.Compare(segment[0], min) < 0) - { - min = segment[0]; - minLevel = i; - } - } - - m_current = min; - m_currentLevel = minLevel; - m_direction = DIRECTION_SEEK; - - Debug_Dump("SeekFirst"); - - return minLevel >= 0; - } - - /// Seek the cursor to the largest key in the store - public bool SeekLast() - { - T max = default(T); - int maxLevel = NOT_FOUND; - - var cursors = m_cursors; - - for (int i = m_min; i < cursors.Length; i++) - { - if (IsFree(i, m_count)) continue; - var segment = m_levels[i]; - Contract.Assert(segment != null && segment.Length == 1 << i); - int pos = segment.Length - 1; - cursors[i] = pos; - if (maxLevel < 0 || m_comparer.Compare(segment[pos], max) > 0) - { - max = segment[segment.Length - 1]; - maxLevel = i; - } - } - - m_current = max; - m_currentLevel = maxLevel; - m_direction = DIRECTION_SEEK; - - Debug_Dump("SeekLast"); - - return maxLevel >= 0; - } - - - - /// Seek the iterator at the smallest value that is closest to the desired item - /// Item to seek to - /// If true, then seek to this item is found. If false, seek to the previous value - /// If true, the cursors are setup for moving backward (by calling Previous). Is false, the cursors are set up for moving forward (by calling Next) - public bool Seek(T item, bool orEqual) - { - // Goal: we want to find the item key itself (if it exists and orEqual==true), or the max key that is stricly less than item - // We can use BinarySearch to look in each segment for where that key would be, but we have to compensate for the fact that BinarySearch looks for the smallest key that is greater than or equal to the search key. - - // Also, the iterator can be used to move: - // - forward: from the current location, find the smallest key that is greater than the current cursor position - // - backward: from the current location, find the largest key that is smaller than the current cursor position - - T max = default(T); - int maxLevel = NOT_FOUND; - bool exact = false; - - var cursors = m_cursors; - var count = m_count; - - for (int i = m_min; i < cursors.Length; i++) - { - if (IsFree(i, count)) continue; - - var segment = m_levels[i]; - - int pos = BinarySearch(segment, 0, segment.Length, item, m_comparer); - - if (pos >= 0) - { // we found a match in this segment - - if (orEqual) - { // the item exist and is allowed - max = segment[pos]; - maxLevel = i; - exact = true; // stop checking for the max in other levels - } - else - { // the previous value is by definition less than 'item' - --pos; - } - } - else - { // not in this segment - - pos = ~pos; // <- position of where item would be place in this segment == position of the first item that is larger than item - // since segment[pos] > item, and item is not in segment, then segment[pos - 1] < item - --pos; - } - - // bound check - - if (pos < 0) - { // the value would be before this segment - cursors[i] = 0; - } - else if (pos >= segment.Length) - { // the value would be after this segment - cursors[i] = segment.Length; - } - else - { - cursors[i] = pos; - if (!exact && (maxLevel < 0 || m_comparer.Compare(segment[pos], max) > 0)) - { - max = segment[pos]; - maxLevel = i; - } - } - } - - m_currentLevel = maxLevel; - m_current = max; - m_direction = DIRECTION_SEEK; - Debug_Dump("Seek"); - return maxLevel >= 0; - } - - /// Move the cursor the the smallest value that is greater than the current value - public bool Next() - { - // invalid position, or no more values - if (m_currentLevel < 0) return false; - - var cursors = m_cursors; - var count = m_count; - - T prev = m_current; - T min = default(T); - int minLevel = NOT_FOUND; - int pos; - - if (m_direction >= DIRECTION_SEEK) - { // we know that the current position CANNOT be the next value, so increment that cursor - cursors[m_currentLevel]++; - Debug_Dump("Next:continue"); - } - else - { // previous call was a Previous() - // we know that the current is the largest value of all the current cursors. Since we want even larger than that, we have to increment ALL the cursors - for (int i = m_min; i < cursors.Length; i++) - { - if (!IsFree(i, count) && ((pos = cursors[i]) < m_levels[i].Length)) cursors[i] = pos + 1; - } - Debug_Dump("Next:reverse"); - } - - for (int i = m_min; i < cursors.Length; i++) - { - if (IsFree(i, count)) continue; - - pos = cursors[i]; - if (pos < 0) continue; //?? - - var segment = m_levels[i]; - - T x = default(T); - while(pos < segment.Length && m_comparer.Compare((x = segment[pos]), prev) < 0) - { // cannot be less than the previous value - cursors[i] = ++pos; - } - if (pos >= segment.Length) continue; - - if (minLevel < 0 || m_comparer.Compare(x, min) < 0) - { // new minimum - min = x; - minLevel = i; - } - } - - m_current = min; - m_currentLevel = minLevel; - m_direction = DIRECTION_NEXT; - return minLevel >= 0; - } - - /// Move the cursor the the largest value that is smaller than the current value - public bool Previous() - { - // invalid position, or no more values - if (m_currentLevel < 0) return false; - - var cursors = m_cursors; - var count = m_count; - - T prev = m_current; - T max = default(T); - int pos; - int maxLevel = NOT_FOUND; - - if (m_direction <= DIRECTION_SEEK) - { // we know that the current position CANNOT be the next value, so decrement that cursor - cursors[m_currentLevel]--; - Debug_Dump("Previous:continue"); - } - else - { // previous call was a call to Seek(), or Next() - // we know that the current is the smallest value of all the current cursors. Since we want even smaller than that, we have to decrement ALL the cursors - for (int i = m_min; i < cursors.Length; i++) - { - if (!IsFree(i, count) && ((pos = cursors[i]) >= 0)) cursors[i] = pos - 1; - } - Debug_Dump("Previous:reverse"); - } - - for (int i = m_min; i < cursors.Length; i++) - { - if (IsFree(i, count)) continue; - - pos = cursors[i]; - var segment = m_levels[i]; - if (pos >= segment.Length) continue; //?? - - T x = default(T); - while (pos >= 0 && m_comparer.Compare((x = segment[pos]), prev) > 0) - { // cannot be more than the previous value - cursors[i] = --pos; - } - if (pos < 0) continue; - - if (maxLevel < 0 || m_comparer.Compare(x, max) > 0) - { // new maximum - max = x; - maxLevel = i; - } - } - - m_current = max; - m_currentLevel = maxLevel; - m_direction = DIRECTION_PREVIOUS; - return maxLevel >= 0; - } - - /// Value of the current entry - public T Current - { - get { return m_current; } - } - - /// Checks if the current position of the iterator is valid - public bool Valid - { - get { return m_currentLevel >= 0; } - } - - /// Direction of the last operation - public int Direction - { - get { return m_direction; } - } - - } - - } -} \ No newline at end of file diff --git a/FoundationDB.Storage.Memory/Collections/ColaStore`1.cs b/FoundationDB.Storage.Memory/Collections/ColaStore`1.cs deleted file mode 100644 index 596e0df74..000000000 --- a/FoundationDB.Storage.Memory/Collections/ColaStore`1.cs +++ /dev/null @@ -1,1115 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -// enables consitency checks after each operation to the set -#undef ENFORCE_INVARIANTS - -namespace FoundationDB.Storage.Memory.Core -{ - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Diagnostics.Contracts; - using System.Globalization; - using System.Linq; - using System.Runtime.CompilerServices; - - /// Store elements in a list of ordered levels - /// Type of elements stored in the set - public sealed class ColaStore - { - - #region Documentation - - // Based on http://supertech.csail.mit.edu/papers/sbtree.pdf (COLA) - - /* - The cache-oblivious lookahead array (COLA) is similar to the binomial list structure [9] of Bentley and Saxe. It consists of ⌈log2 N⌉ arrays, - or levels, each of which is either completely full or completely empty. The kth array is of size 2^k and the arrays are stored contiguously in memory. - - The COLA maintains the following invariants: - 1. The kth array contains items if and only if the kth least significant bit of the binary representation of N is a 1. - 2. Each array contains its items in ascending order by key - */ - - // DEFINITIONS - // - // "Level" is the index in the list of segments with level 0 being the top (or root) - // "Segment" is an array whose length is equal to 2^i (where i is the "level" of the segment). - // "Doubling Array" means that each segment has double the length of its predecessor - // "Cache Oblivious" means that the algorithm is not tuned for a specific CPU cache size (L1, L2, ou block size on disk), and amortize the cost of insertion over the lifespan of the set. - // - // INVARIANTS: - // - // * Each segment is twice the size of the previous segment, i.e.: m_levels[i].Length == 1 << i - // 0 [ ] 1 - // 1 [ , ] 2 - // 2 [ , , , ] 4 - // 3 [ , , , , , , , ] 8 - // 4 [ , , , , , , , , , , , , , , , ] 16 - // ... - // * A segment is either EMPTY, or completely FULL - // legal: [ , , , ] or [1,2,3,4] - // illegal: [1,2,3, ] - // * A segment has all its elements sorted - // legal: [3,12,42,66] - // illegal: [12,66,42,3] - // - // NOTES: - // - // - 50% of all inserts will always be done on the root (level 0), so will be O(1) - // - 87.5% of all inserts will only touch levels 0, 1 and 2, which should be contiguous in memory - // - For random insertions, it is difficult to predict in which level a specific value will be found, except that older values are towards the bottom, and younger values are towards the top. - // - A range of values (ex: "from 10 to 20") can have its elements scattered in multiple segments - // - If all inserts are ordered, then all items of level N will be sorted after all the items of level N + 1 - // - Most inserts are usually pretty fast, but every times the count goes to the next power of 2, the duration will be more and more noticeable (ie : the (2^N)th INSERT will have to merge (2^N) values) - // - // COST - // - // The cost for inserting N values is about N.Log2(N) comparisons - // - This is amortized to Log2(N) per insert, which means that insertion is O(log(N)) - // - This means that N should stay relatively low (ideally under 2^10 items) - - #endregion - - private const int INITIAL_LEVELS = 5; // 5 initial levels will pre-allocate space for 31 items - private const int MAX_SPARE_ORDER = 6; // 6 levels of spares will satisfy ~98.4% of all insertions, while only allocating the space for 63 items (~500 bytes for reference types) - private const int NOT_FOUND = -1; - - /// Number of elements in the store - private volatile int m_count; - - /// Array of all the segments making up the levels - private T[][] m_levels; - - /// Shortcut to level 0 (of size 1) - private T[] m_root; - - /// List of spare temporary buffers, used during merging - private T[][] m_spares; -#if ENFORCE_INVARIANTS - private bool[] m_spareUsed; -#endif - - /// Key comparer - private readonly IComparer m_comparer; - - #region Constructors... - - /// Allocates a new store - /// Initial capacity, or 0 for the default capacity - /// Comparer used to order the elements - public ColaStore(int capacity, IComparer comparer) - { - if (capacity < 0) throw new ArgumentOutOfRangeException("capacity", "Capacity cannot be less than zero."); - if (comparer == null) throw new ArgumentNullException("comparer"); - Contract.EndContractBlock(); - - int levels; - if (capacity == 0) - { // use the default capacity - levels = INITIAL_LEVELS; - } - else - { // L levels will only store (2^L - 1) - // note: there is no real penalty if the capacity was not correctly estimated, appart from the fact that all levels will not be contiguous in memory - // 1 => 1 - // 2..3 => 2 - // 4..7 => 3 - levels = ColaStore.HighestBit(capacity) + 1; - } - // allocating more than 31 levels would mean having an array of length 2^31, which is not possible - if (levels >= 31) throw new ArgumentOutOfRangeException("capacity", "Cannot allocate more than 30 levels"); - - // pre-allocate the segments and spares at the same time, so that they are always at the same memory location - var segments = new T[levels][]; - var spares = new T[MAX_SPARE_ORDER][]; - for (int i = 0; i < segments.Length; i++) - { - segments[i] = new T[1 << i]; - if (i < spares.Length) spares[i] = new T[1 << i]; - } - - m_levels = segments; - m_root = segments[0]; - m_spares = spares; -#if ENFORCE_INVARIANTS - m_spareUsed = new bool[spares.Length]; -#endif - m_comparer = comparer; - } - - [Conditional("ENFORCE_INVARIANTS")] - private void CheckInvariants() - { - Contract.Assert(m_count >= 0, "Count cannot be less than zero"); - Contract.Assert(m_levels != null, "Storage array should not be null"); - Contract.Assert(m_levels.Length > 0, "Storage array should always at least contain one level"); - Contract.Assert(object.ReferenceEquals(m_root, m_levels[0]), "The root should always be the first level"); - Contract.Assert(m_count < 1 << m_levels.Length, "Count should not exceed the current capacity"); - - for (int i = 0; i < m_levels.Length; i++) - { - var segment = m_levels[i]; - Contract.Assert(segment != null, "All segments should be allocated in memory"); - Contract.Assert(segment.Length == 1 << i, "The size of a segment should be 2^LEVEL"); - - if (IsFree(i)) - { // All unallocated segments SHOULD be filled with default(T) - for (int j = 0; j < segment.Length; j++) - { - if (!EqualityComparer.Default.Equals(segment[j], default(T))) - { - if (Debugger.IsAttached) { Debug_Dump(); Debugger.Break(); } - Contract.Assert(false, String.Format("Non-zero value at offset {0} of unused level {1} : {2}", j, i, String.Join(", ", segment))); - } - } - } - else - { // All allocated segments SHOULD be sorted - T previous = segment[0]; - for (int j = 1; j < segment.Length; j++) - { - T x = segment[j]; - if (m_comparer.Compare(previous, x) >= 0) - { - if (Debugger.IsAttached) { Debug_Dump(); Debugger.Break(); } - Contract.Assert(false, String.Format("Unsorted value {3} at offset {0} of allocated level {1} : {2}", j, i, String.Join(", ", segment), segment[j])); - } - previous = segment[j]; - } - } - - if (i < m_spares.Length) - { -#if ENFORCE_INVARIANTS - Contract.Assert(!m_spareUsed[i], "A spare level wasn't returned after being used!"); -#endif - var spare = m_spares[i]; - if (spare == null) continue; - // All spare segments SHOULD be filled with default(T) - for (int j = 0; j < spare.Length; j++) - { - if (!EqualityComparer.Default.Equals(spare[j], default(T))) - { - if (Debugger.IsAttached) { Debug_Dump(); Debugger.Break(); } - Contract.Assert(false, String.Format("Non-zero value at offset {0} of spare level {1} : {2}", j, i, String.Join(", ", spare))); - } - } - - } - } - } - - #endregion - - #region Public Properties... - - /// Gets the number of elements in the store. - public int Count - { - get { return m_count; } - } - - /// Gets the current capacity of the store. - public int Capacity - { - // note: the capacity is always 2^L - 1 where L is the number of levels - get { return m_levels == null ? 0 : (1 << m_levels.Length) - 1; } - } - - /// Gets the comparer used to sort the elements in the store - public IComparer Comparer - { - get { return m_comparer; } - } - - /// Gets the current number of levels - /// Note that the last level may not be currently used! - public int Depth - { - get { return m_levels.Length; } - } - - /// Gets the index of the first currently allocated level - public int MinLevel - { - get { return ColaStore.HighestBit(m_count); } - } - - /// Gets the index of the last currently allocated level - public int MaxLevel - { - get { return ColaStore.HighestBit(m_count); } - } - - /// Gets the list of all levels - public T[][] Levels - { - get { return m_levels; } - } - - /// Returns the content of a level - /// Index of the level (0-based) - /// Segment that contains all the elements of that level - public T[] GetLevel(int level) - { - Contract.Assert(level >= 0 && level < m_levels.Length); - return m_levels[level]; - } - - /// Gets of sets the value store at the specified index - /// Absolute index in the vector-array - /// Value stored at that location, or default(T) if the location is in an unallocated level - public T this[int arrayIndex] - { - get - { - if (m_count == 1 && arrayIndex == 0) return m_root[0]; - return GetAt(arrayIndex); - } - set - { - SetAt(arrayIndex, value); - } - } - - #endregion - - #region Public Methods... - - /// Finds the location of an element in the array - /// Value of the element to search for. - /// Receives the offset of the element inside the level if found; otherwise, 0. - /// Level that contains the element if found; otherwise, -1. - public int Find(T value, out int offset, out T actualValue) - { - if ((m_count & 1) != 0) - { - // If someone gets the last inserted key, there is a 50% change that it is in the root - // (if not, it will the the last one of the first non-empty level) - if (m_comparer.Compare(value, m_root[0]) == 0) - { - offset = 0; - actualValue = m_root[0]; - return 0; - } - } - - var levels = m_levels; - for (int i = 1; i < levels.Length; i++) - { - if (IsFree(i)) - { // this segment is not allocated - continue; - } - - int p = ColaStore.BinarySearch(levels[i], 0, 1 << i, value, m_comparer); - if (p >= 0) - { - offset = p; - actualValue = levels[i][p]; - return i; - } - } - offset = 0; - actualValue = default(T); - return NOT_FOUND; - } - - /// Search for the smallest element that is larger than a reference element - /// Reference element - /// If true, return the position of the value itself if it is found. If false, return the position of the closest value that is smaller. - /// Receive the offset within the level of the next element, or 0 if not found - /// Receive the value of the next element, or default(T) if not found - /// Level of the next element, or -1 if was already the largest - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public int FindNext(T value, bool orEqual, out int offset, out T result) - { - return ColaStore.FindNext(m_levels, m_count, value, orEqual, m_comparer, out offset, out result); - } - - /// Search for the smallest element that is larger than a reference element - /// Reference element - /// If true, return the position of the value itself if it is found. If false, return the position of the closest value that is smaller. - /// Receive the offset within the level of the next element, or 0 if not found - /// Receive the value of the next element, or default(T) if not found - /// Level of the next element, or -1 if was already the largest - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public int FindNext(T value, bool orEqual, IComparer comparer, out int offset, out T result) - { - return ColaStore.FindNext(m_levels, m_count, value, orEqual, comparer ?? m_comparer, out offset, out result); - } - - /// Search for the largest element that is smaller than a reference element - /// Reference element - /// If true, return the position of the value itself if it is found. If false, return the position of the closest value that is smaller. - /// Receive the offset within the level of the previous element, or 0 if not found - /// Receive the value of the previous element, or default(T) if not found - /// Level of the previous element, or -1 if was already the smallest - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public int FindPrevious(T value, bool orEqual, out int offset, out T result) - { - return ColaStore.FindPrevious(m_levels, m_count, value, orEqual, m_comparer, out offset, out result); - } - - /// Search for the largest element that is smaller than a reference element - /// Reference element - /// If true, return the position of the value itself if it is found. If false, return the position of the closest value that is smaller. - /// Receive the offset within the level of the previous element, or 0 if not found - /// Receive the value of the previous element, or default(T) if not found - /// Level of the previous element, or -1 if was already the smallest - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public int FindPrevious(T value, bool orEqual, IComparer comparer, out int offset, out T result) - { - return ColaStore.FindPrevious(m_levels, m_count, value, orEqual, comparer ?? m_comparer, out offset, out result); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public IEnumerable FindBetween(T begin, bool beginOrEqual, T end, bool endOrEqual, int limit) - { - return ColaStore.FindBetween(m_levels, m_count, begin, beginOrEqual, end, endOrEqual, limit, m_comparer); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public IEnumerable FindBetween(T begin, bool beginOrEqual, T end, bool endOrEqual, int limit, IComparer comparer) - { - return ColaStore.FindBetween(m_levels, m_count, begin, beginOrEqual, end, endOrEqual, limit, comparer ?? m_comparer); - } - - /// Return the value stored at a specific location in the array - /// Absolute index in the vector-array - /// Value stored at this location, or default(T) if the level is not allocated - public T GetAt(int arrayIndex) - { - Contract.Assert(arrayIndex >= 0 && arrayIndex <= this.Capacity); - - int offset; - int level = ColaStore.FromIndex(arrayIndex, out offset); - - return GetAt(level, offset); - } - - /// Returns the value at a specific location in the array - /// Index of the level (0-based) - /// Offset in the level (0-based) - /// Returns the value at this location, or default(T) if the level is not allocated - public T GetAt(int level, int offset) - { - Contract.Assert(level >= 0 && level < m_levels.Length && offset >= 0 && offset < 1 << level); - //TODO: check if level is allocated ? - - var segment = m_levels[level]; - Contract.Assert(segment != null && segment.Length == 1 << level); - return segment[offset]; - } - - /// Store a value at a specific location in the arrayh - /// Absolute index in the vector-array - /// Value to store - /// Previous value at that location - public T SetAt(int arrayIndex, T value) - { - Contract.Assert(arrayIndex >= 0 && arrayIndex <= this.Capacity); - - int offset; - int level = ColaStore.FromIndex(arrayIndex, out offset); - - return SetAt(level, offset, value); - } - - /// Overwrites a specific location in the array with a new value, and returns its previous value - /// Index of the level (0-based) - /// Offset in the level (0-based) - /// New value for this location - /// Previous value at this location - public T SetAt(int level, int offset, T value) - { - Contract.Assert(level >= 0 && level < m_levels.Length && offset >= 0 && offset < 1 << level); - //TODO: check if level is allocated ? - - var segment = m_levels[level]; - Contract.Assert(segment != null && segment.Length == 1 << level); - T previous = segment[offset]; - segment[offset] = value; - return previous; - } - - /// Clear the array - public void Clear() - { - for (int i = 0; i < m_levels.Length; i++) - { - if (i < MAX_SPARE_ORDER) - { - Array.Clear(m_levels[i], 0, 1 << i); - } - else - { - m_levels[i] = null; - } - } - m_count = 0; - if (m_levels.Length > MAX_SPARE_ORDER) - { - Array.Resize(ref m_levels, MAX_SPARE_ORDER); - } - - CheckInvariants(); - } - - /// Add a value to the array - /// Value to add to the array - /// If already exists in the array and is true, it will be overwritten with - /// If the value did not if the value was been added to the array, or false if it was already there. - public bool SetOrAdd(T value, bool overwriteExistingValue) - { - T _; - int offset, level = Find(value, out offset, out _); - if (level >= 0) - { - if (overwriteExistingValue) - { - m_levels[level][offset] = value; - } - return false; - } - - Insert(value); - return true; - } - - /// Insert a new element in the set, and returns its index. - /// Value to insert. Warning: if the value already exists, the store will be corrupted ! - /// The index is the absolute index, as if all the levels where a single, contiguous, array (0 = root, 7 = first element of level 3) - public void Insert(T value) - { - if (IsFree(0)) - { // half the inserts (when the count is even) can be done in the root - m_root[0] = value; - } - else if (IsFree(1)) - { // a quarter of the inserts only need to move the root and the value to level 1 - ColaStore.MergeSimple(m_levels[1], m_root[0], value, m_comparer); - m_root[0] = default(T); - } - else - { // we need to merge one or more levels - - var spare = GetSpare(0); - if (object.ReferenceEquals(spare, m_root)) Debugger.Break(); - Contract.Assert(spare != null && spare.Length == 1); - spare[0] = value; - MergeCascade(1, m_root, spare); - PutSpare(0, spare); - m_root[0] = default(T); - } - ++m_count; - - CheckInvariants(); - } - - /// Insert two elements in the set. - public void InsertItems(T first, T second) - { - Contract.Requires(m_comparer.Compare(first, second) != 0, "Cannot insert the same value twice"); - - if (IsFree(1)) - { - ColaStore.MergeSimple(m_levels[1], first, second, m_comparer); - } - else - { - //Console.WriteLine("InsertItems([2]) Cascade"); - var spare = GetSpare(1); - spare[0] = first; - spare[1] = second; - var segment = m_levels[1]; - MergeCascade(2, segment, spare); - segment[0] = default(T); - segment[1] = default(T); - PutSpare(1, spare); - } - m_count += 2; - - CheckInvariants(); - } - - /// Insert one or more new elements in the set. - /// Array of elements to insert. Warning: if a value already exist, the store will be corrupted ! - /// If true, the entries in are guaranteed to already be sorted (using the store default comparer). - /// The best performances are achieved when inserting a number of items that is a power of 2. The worst performances are when doubling the size of a store that is full. - /// Warning: if is true but is not sorted, or is sorted using a different comparer, then the store will become corrupted ! - /// - public void InsertItems(List values, bool ordered = false) - { - if (values == null) throw new ArgumentNullException("values"); - - int count = values.Count; - T[] segment, spare; - - if (count < 2) - { - if (count == 1) - { - Insert(values[0]); - } - return; - } - - if (count == 2) - { - if (IsFree(1)) - { - segment = m_levels[1]; - if (ordered) - { - segment[0] = values[0]; - segment[1] = values[1]; - } - else - { - ColaStore.MergeSimple(segment, values[0], values[1], m_comparer); - } - } - else - { - spare = GetSpare(1); - spare[0] = values[0]; - spare[1] = values[1]; - segment = m_levels[1]; - MergeCascade(2, segment, spare); - segment[0] = default(T); - segment[1] = default(T); - PutSpare(1, spare); - } - } - else - { - // Inserting a size that is a power of 2 is very simple: - // * either the corresponding level is empty, in that case we just copy the items and do a quicksort - // * or it is full, then we just need to do a cascade merge - // For non-power of 2s, we can split decompose them into a suite of power of 2s and insert them one by one - - int min = ColaStore.LowestBit(count); - int max = ColaStore.HighestBit(count); - - if (max >= m_levels.Length) - { // we need to allocate new levels - Grow(max); - } - - int p = 0; - for (int i = min; i <= max; i++) - { - if (ColaStore.IsFree(i, count)) continue; - - segment = m_levels[i]; - if (IsFree(i)) - { // the target level is free, we can copy and sort in place - values.CopyTo(p, segment, 0, segment.Length); - if (!ordered) Array.Sort(segment, 0, segment.Length, m_comparer); - p += segment.Length; - m_count += segment.Length; - } - else - { // the target level is used, we will have to do a cascade merge, using a spare - spare = GetSpare(i); - values.CopyTo(p, spare, 0, spare.Length); - if (!ordered) Array.Sort(spare, 0, spare.Length, m_comparer); - p += segment.Length; - MergeCascade(i + 1, segment, spare); - Array.Clear(segment, 0, segment.Length); - PutSpare(i, spare); - m_count += segment.Length; - } - } - Contract.Assert(p == count); - } - - CheckInvariants(); - } - - /// Remove the value at the specified location - /// Absolute index in the vector-array - /// Value that was removed - public T RemoveAt(int arrayIndex) - { - Contract.Requires(arrayIndex >= 0 && arrayIndex <= this.Capacity); - int offset, level = ColaStore.FromIndex(arrayIndex, out offset); - return RemoveAt(level, offset); - } - - /// Remove the value at the specified location - /// Index of the level (0-based) - /// Offset in the level (0-based) - /// Value that was removed - public T RemoveAt(int level, int offset) - { - Contract.Assert(level >= 0 && offset >= 0 && offset < 1 << level); - //TODO: check if level is allocated ? - - var segment = m_levels[level]; - Contract.Assert(segment != null && segment.Length == 1 << level); - T removed = segment[offset]; - - if (level == 0) - { // removing the last inserted value - segment[0] = default(T); - } - else if (level == 1) - { // split the first level in two - if (IsFree(0)) - { // move up to root - - // ex: remove 'b' at (1,1) and move the 'a' back to the root - // 0 [_] => [a] - // 1 [a,b] => [_,_] - - m_root[0] = segment[1 - offset]; - segment[0] = default(T); - segment[1] = default(T); - } - else - { // merge the root in missing spot - - // ex: remove 'b' at (1,1) and move the 'c' down a level - // N = 3 N = 2 - // 0 [c] => 0 [_] - // 1 [a,b] => 1 [a,c] - - ColaStore.MergeSimple(segment, m_root[0], segment[1 - offset], m_comparer); - m_root[0] = default(T); - } - } - else if ((m_count & 1) == 1) - { // Remove an item from an odd-numbered set - - // Since the new count will be even, we only need to merge the root in place with the level that is missing a spot - - // ex: replace the 'b' at (2,1) with the 'e' in the root - // N = 5 N = 4 - // 0 [e] => 0 [_] - // 1 [_,_] 1 [_,_] - // 2 [a,b,c,d] => 2 [a,c,d,e] - - ColaStore.MergeInPlace(segment, offset, m_root[0], m_comparer); - m_root[0] = default(T); - } - else - { - // we are missing a spot in out modified segment, that need to fill - // > we will take the first non empty segment, and break it in pieces - // > its last item will be used to fill the empty spot - // > the rest of its items will be spread to all the previous empty segments - - // find the first non empty segment that can be broken - int firstNonEmptyLevel = ColaStore.LowestBit(m_count); - - if (firstNonEmptyLevel == level) - { // we are the first level, this is easy ! - - // move the empty spot at the start - if (offset > 0) Array.Copy(segment, 0, segment, 1, offset); - - // and spread the rest to all the previous levels - ColaStore.SpreadLevel(level, m_levels); - //TODO: modify SpreadLevel(..) to take the offset of the value to skip ? - } - else - { // break that level, and merge its last item with the level that is missing one spot - - // break down this level - T tmp = ColaStore.SpreadLevel(firstNonEmptyLevel, m_levels); - - // merge its last item with the empty spot in the modified level - ColaStore.MergeInPlace(m_levels[level], offset, tmp, m_comparer); - } - } - - --m_count; - - if (m_levels.Length > MAX_SPARE_ORDER) - { // maybe release the last level if it is empty - ShrinkIfRequired(); - } - - CheckInvariants(); - - return removed; - } - - public bool RemoveItem(T item) - { - T _; - int offset, level = Find(item, out offset, out _); - if (level < 0) return false; - _ = RemoveAt(level, offset); - CheckInvariants(); - return true; - } - - public int RemoveItems(IEnumerable items) - { - if (items == null) throw new ArgumentNullException("items"); - - T _; - int count = 0; - - //TODO: optimize this !!!! - foreach(var item in items) - { - int offset, level = Find(item, out offset, out _); - if (level >= 0) - { - RemoveAt(level, offset); - ++count; - } - - } - CheckInvariants(); - return count; - } - - public void CopyTo(T[] array, int arrayIndex, int count) - { - if (array == null) throw new ArgumentNullException("array"); - if (arrayIndex < 0) throw new ArgumentOutOfRangeException("Index cannot be less than zero."); - if (count < 0) throw new ArgumentOutOfRangeException("Count cannot be less than zero."); - if (arrayIndex > array.Length || count > (array.Length - arrayIndex)) throw new ArgumentException("Destination array is too small"); - Contract.EndContractBlock(); - - int p = arrayIndex; - count = Math.Min(count, m_count); - foreach (var item in ColaStore.IterateOrdered(count, m_levels, m_comparer, false)) - { - array[p++] = item; - } - Contract.Assert(p == arrayIndex + count); - } - - /// Checks if a level is currently not allocated - /// Index of the level (0-based) - /// True is the level is unallocated and does not store any elements; otherwise, false. - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public bool IsFree(int level) - { - Contract.Requires(level >= 0); - return (m_count & (1 << level)) == 0; - } - - /// Gets a temporary buffer with the length corresponding to the specified level - /// Level of this spare buffer - /// Temporary buffer whose size is 2^level - /// The buffer should be returned after use by calling - public T[] GetSpare(int level) - { - Contract.Requires(level >= 0 && m_spares != null); - - if (level < m_spares.Length) - { // this level is kept in the spare list - -#if ENFORCE_INVARIANTS - Contract.Assert(!m_spareUsed[level], "this spare is already in use!"); -#endif - - var t = m_spares[level]; - if (t == null) - { // allocate a new one - t = new T[1 << level]; - m_spares[level] = t; - } -#if ENFORCE_INVARIANTS - m_spareUsed[level] = true; -#endif - return t; - } - else - { // this level is always allocated - return new T[1 << level]; - } - } - - /// Return a temporary buffer after use - /// Level of the temporary buffer - /// True if the buffer has been cleared and returned to the spare list, false if it was discarded - /// Kept buffers are cleared to prevent values from being kept alive and not garbage collected. - public bool PutSpare(int level, T[] spare) - { - Contract.Assert(level >= 0 && spare != null); - -#if ENFORCE_INVARIANTS - // make sure that we do not mix levels and spares - for (int i = 0; i < m_levels.Length; i++) - { - if (object.ReferenceEquals(m_levels[i], spare)) Debugger.Break(); - } -#endif - - // only clear spares that are kept alive - if (level < m_spares.Length) - { -#if ENFORCE_INVARIANTS - Contract.Assert(m_spareUsed[level], "this spare wasn't used"); -#endif - - // clear it in case it holds onto dead values that could be garbage collected - spare[0] = default(T); - if (level > 0) - { - spare[1] = default(T); - if (level > 1) Array.Clear(spare, 2, spare.Length - 2); - } -#if ENFORCE_INVARIANTS - m_spareUsed[level] = false; -#endif - return true; - } - return false; - } - - /// Find the smallest element in the store - /// Smallest element found, or default(T) if the store is empty - public T Min() - { - switch (m_count) - { - case 0: return default(T); - case 1: return m_root[0]; - case 2: return m_levels[1][0]; - default: - { - - int level = ColaStore.LowestBit(m_count); - int end = ColaStore.HighestBit(m_count); - T min = m_levels[level][0]; - while (level <= end) - { - if (!IsFree(level) && m_comparer.Compare(min, m_levels[level][0]) > 0) - { - min = m_levels[level][0]; - } - ++level; - } - return min; - } - } - } - - /// Find the largest element in the store - /// Largest element found, or default(T) if the store is empty - public T Max() - { - switch (m_count) - { - case 0: return default(T); - case 1: return m_root[0]; - case 2: return m_levels[1][1]; - default: - { - int level = ColaStore.LowestBit(m_count); - int end = ColaStore.HighestBit(m_count); - T max = m_levels[level][0]; - while (level <= end) - { - if (!IsFree(level) && m_comparer.Compare(max, m_levels[level][0]) < 0) - { - max = m_levels[level][0]; - } - ++level; - } - return max; - } - } - - } - - /// Returns the smallest and largest element in the store - /// Receives the value of the smallest element (or default(T) is the store is Empty) - /// Receives the value of the largest element (or default(T) is the store is Empty) - /// If the store contains only one element, than min and max will be equal - public void GetBounds(out T min, out T max) - { - switch (m_count) - { - case 0: - { - min = default(T); - max = default(T); - break; - } - case 1: - { - min = m_root[0]; - max = min; - break; - } - case 2: - { - min = m_levels[1][0]; - max = m_levels[1][1]; - break; - } - default: - { - - int level = ColaStore.LowestBit(m_count); - int end = ColaStore.HighestBit(m_count); - var segment = m_levels[level]; - min = segment[0]; - max = segment[segment.Length - 1]; - while (level <= end) - { - if (IsFree(level)) continue; - segment = m_levels[level]; - if (m_comparer.Compare(min, segment[0]) > 0) min = segment[0]; - if (m_comparer.Compare(max, segment[segment.Length - 1]) < 0) min = segment[segment.Length - 1]; - ++level; - } - break; - } - } - } - - public ColaStore.Iterator GetIterator() - { - return new ColaStore.Iterator(m_levels, m_count, m_comparer); - } - - /// Pre-allocate memory in the store so that it can store a specified amount of items - /// Number of items that will be inserted in the store - public void EnsureCapacity(int minimumRequired) - { - int level = ColaStore.HighestBit(minimumRequired); - if ((1 << level) < minimumRequired) ++level; - - if (level >= m_levels.Length) - { - Grow(level); - } - } - - #endregion - - private void MergeCascade(int level, T[] left, T[] right) - { - Contract.Requires(level > 0, "level"); - Contract.Requires(left != null && left.Length == (1 << (level - 1)), "left"); - Contract.Requires(right != null && right.Length == (1 << (level - 1)), "right"); - - if (IsFree(level)) - { // target level is empty - - if (level >= m_levels.Length) Grow(level); - Contract.Assert(level < m_levels.Length); - - ColaStore.MergeSort(m_levels[level], left, right, m_comparer); - } - else if (IsFree(level + 1)) - { // the next level is empty - - if (level + 1 >= m_levels.Length) Grow(level + 1); - Contract.Assert(level + 1 < m_levels.Length); - - var spare = GetSpare(level); - ColaStore.MergeSort(spare, left, right, m_comparer); - var next = m_levels[level]; - ColaStore.MergeSort(m_levels[level + 1], next, spare, m_comparer); - Array.Clear(next, 0, next.Length); - PutSpare(level, spare); - } - else - { // both are full, need to do a cascade merge - - Contract.Assert(level < m_levels.Length); - - // merge N and N +1 - var spare = GetSpare(level); - ColaStore.MergeSort(spare, left, right, m_comparer); - - // and cascade to N + 2 ... - var next = m_levels[level]; - MergeCascade(level + 1, next, spare); - Array.Clear(next, 0, next.Length); - PutSpare(level, spare); - } - } - - /// Grow the capacity of the level array - /// Minimum level required - private void Grow(int level) - { - Contract.Requires(level >= 0); - - // note: we want m_segments[level] to not be empty, which means there must be at least (level + 1) entries in the level array - int current = m_levels.Length; - int required = level + 1; - Contract.Assert(current < required); - - var tmpSegments = m_levels; - Array.Resize(ref tmpSegments, required); - for (int i = current; i < required; i++) - { - tmpSegments[i] = new T[1 << i]; - } - m_levels = tmpSegments; - - Contract.Ensures(m_levels != null && m_levels.Length > level); - } - - private void ShrinkIfRequired() - { - int n = m_levels.Length - 1; - if (n <= MAX_SPARE_ORDER) return; - if (IsFree(n)) - { // less than 50% full - - // to avoid the degenerate case of constantly Adding/Removing when at the threshold of a new level, - // we will only remove the last level if the previous level is also empty - - if (IsFree(n - 1)) - { // less than 25% full - - // remove the last level - var tmpSegments = new T[n][]; - Array.Copy(m_levels, tmpSegments, n); - m_levels = tmpSegments; - } - } - } - - internal IEnumerable IterateOrdered(bool reverse = false) - { - return ColaStore.IterateOrdered(m_count, m_levels, m_comparer, reverse); - } - - internal IEnumerable IterateUnordered() - { - return ColaStore.IterateUnordered(m_count, m_levels); - } - - //TODO: remove or set to internal ! - [Conditional("DEBUG")] - public void Debug_Dump(Func dump = null) - { - Trace.WriteLine("> " + m_levels.Length + " levels:"); - for(int i = 0; i < m_levels.Length; i++) - { - string s = dump == null ? String.Join(", ", m_levels[i]) : String.Join(", ", m_levels[i].Select(dump)); - Trace.WriteLine(String.Format(CultureInfo.InvariantCulture, " - {0,2}|{1}: {2}", i, IsFree(i) ? "_" : "#", s)); - } -#if false - Trace.WriteLine("> " + m_spares.Length + " spares:"); - for (int i = 0; i < m_spares.Length; i++) - { - var spare = m_spares[i]; - Trace.WriteLine(String.Format(CultureInfo.InvariantCulture, "> {0,2}: {1}", i, spare == null ? "" : String.Join(", ", spare))); - } -#endif - Trace.WriteLine("> " + m_count + " items"); - } - - } - -} diff --git a/FoundationDB.Storage.Memory/Core/Entry.cs b/FoundationDB.Storage.Memory/Core/Entry.cs deleted file mode 100644 index 041e44c21..000000000 --- a/FoundationDB.Storage.Memory/Core/Entry.cs +++ /dev/null @@ -1,107 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Core -{ - using System; - using System.Diagnostics; - using System.Runtime.InteropServices; - - public enum EntryType : ushort - { - Free = 0, - Key = 1, - Value = 2, - Search = 3 - } - - [DebuggerDisplay("Header={Header}, Size={Size}")] - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal unsafe struct Entry - { - /// Default alignement for objects (8 by default) - public const int ALIGNMENT = 8; // MUST BE A POWER OF 2 ! - public const int ALIGNMENT_MASK = ~(ALIGNMENT - 1); - - /// A read lock has been taken on this entry - public const ushort FLAGS_READ_LOCK = 1 << 8; - - /// A write lock has been taken on this entry - public const ushort FLAGS_WRITE_LOCK = 1 << 9; - - /// A GC lock has been taken on this entry - public const ushort FLAGS_GC_LOCK = 1 << 10; - - /// This entry has been moved to another page by the last GC - public const ushort FLAGS_MOVED = 1 << 11; - - /// This key has been flaged as being unreachable by current of future transaction (won't survive the next GC) - public const ushort FLAGS_UNREACHABLE = 1 << 12; - - /// The entry has been disposed and should be access anymore - public const ushort FLAGS_DISPOSED = 1 << 15; - - public const int TYPE_SHIFT = 13; - public const ushort TYPE_MASK_AFTER_SHIFT = 0x3; - - // Object Layout - // ============== - - // Offset Field Type Desc - // - // 0 HEADER ushort type, Flags, ... - // 2 HASH ushort 16-bit hashcode - // 4 SIZE uint Size of the data - // ... object fields ... - // x DATA byte[] Value of the object, size in the SIZE field - // y (pad) 0..7 padding bytes (set to 00 or FF ?) - // - // HEADER: bit flags - // - bit 8: READ LOCK - // - bit 9: WRITE LOCK - // - bit 10: GC LOCK - // - bit 11: MOVED - // - bit 12: UNREACHABLE - // - bit 13-14: TYPE - // - bit 15: DISPOSED, set if object is disposed - - /// Various flags - public ushort Header; - - public ushort Hash; - - /// Size of the key (in bytes) - public uint Size; - - /// Return the type of the object - public static unsafe EntryType GetObjectType(void* item) - { - return item == null ? EntryType.Free : (EntryType)((((Entry*)item)->Header >> TYPE_SHIFT) & TYPE_MASK_AFTER_SHIFT); - } - - /// Checks if the object is disposed - public static unsafe bool IsDisposed(void* item) - { - return item == null || (((Entry*)item)->Header & FLAGS_DISPOSED) != 0; - } - - internal static byte* Align(byte* ptr) - { - long r = ((long)ptr) & (ALIGNMENT - 1); - if (r > 0) ptr += ALIGNMENT - r; - return ptr; - } - - internal static bool IsAligned(void* ptr) - { - return (((long)ptr) & (ALIGNMENT - 1)) == 0; - } - - internal static int Padding(void* ptr) - { - return (int)(((long)ptr) & (ALIGNMENT - 1)); - } - } - -} diff --git a/FoundationDB.Storage.Memory/Core/Key.cs b/FoundationDB.Storage.Memory/Core/Key.cs deleted file mode 100644 index ce75d2dd2..000000000 --- a/FoundationDB.Storage.Memory/Core/Key.cs +++ /dev/null @@ -1,93 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Core -{ - using FoundationDB.Storage.Memory.Utils; - using System; - using System.Diagnostics.Contracts; - using System.Runtime.InteropServices; - - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal unsafe struct Key - { - // A Key contains the key's bytes, an hashcode, and a pointer to the most current Value for this key, or null if the key is currently deleted - - // Field Offset Bits Type Desc - // HEADER 0 16 flags Type, status flags, deletion or mutation flags, .... - // SIZE 2 16 uint16 Size of the DATA field (from 0 to 10,000). Note: bit 14 and 15 are usually 0 and could be used for something?) - // HASHCODE 4 32 uint32 Hashcode (note: size only need 2 bytes, so maybe we could extand this to 24 bits?) - // VALUEPTR 8 64 Value* Pointer to the most current value of this key (or null if the DELETION bit is set in the header) - // DATA 16 .. byte[] First byte of the key - - // The HEADER flags are as follow: - // - bit 0: NEW If set, this key has been inserted after the last GC - // - bit 1: MUTATED If set, this key has changed aster the last GC - // - bit 2-5: unused - // - bit 7: HAS_WATCH If set, this key is currently being watched - // - bit 8-15: ENTRY_FLAGS (inherited from Entry) - - public static readonly uint SizeOf = (uint)Marshal.OffsetOf(typeof(Key), "Data").ToInt32(); - - /// The key has been inserted after the last GC - public const ushort FLAGS_NEW = 1 << 0; - /// The key has been created/mutated since the last GC - public const ushort FLAGS_MUTATED = 1 << 1; - /// There is a watch listening on this key - public const ushort FLAGS_HAS_WATCH = 1 << 7; - - /// Various flags (TODO: enum?) - public ushort Header; - /// Size of the key (in bytes) - public ushort Size; - /// Hashcode of the key - public int HashCode; - /// Pointer to the head of the value chain for this key (should not be null) - public Value* Values; - /// Offset to the first byte of the key - public byte Data; - - public static USlice GetData(Key* self) - { - if (self == null) return default(USlice); - Contract.Assert((self->Header & Entry.FLAGS_DISPOSED) == 0, "Attempt to read a key that was disposed"); - return new USlice(&(self->Data), self->Size); - } - - public static bool StillAlive(Key* self, ulong sequence) - { - if (self == null) return false; - - if ((self->Header & Entry.FLAGS_UNREACHABLE) != 0) - { // we have been marked as dead - - var value = self->Values; - if (value == null) return false; - - // check if the last value is a deletion? - if (value->Sequence <= sequence && (value->Header & Value.FLAGS_DELETION) != 0) - { // it is deleted - return false; - } - } - - return true; - } - - public static bool IsDisposed(Key* self) - { - return (self->Header & Entry.FLAGS_DISPOSED) != 0; - } - - /// Return the address of the following value in the heap - internal static Key* WalkNext(Key* self) - { - Contract.Requires(self != null && Entry.GetObjectType(self) == EntryType.Key); - - return (Key*)Entry.Align((byte*)self + Key.SizeOf + self->Size); - } - - } - -} diff --git a/FoundationDB.Storage.Memory/Core/Memory/ElasticHeap`1.cs b/FoundationDB.Storage.Memory/Core/Memory/ElasticHeap`1.cs deleted file mode 100644 index 336ab1922..000000000 --- a/FoundationDB.Storage.Memory/Core/Memory/ElasticHeap`1.cs +++ /dev/null @@ -1,169 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Core -{ - using FoundationDB.Storage.Memory.Utils; - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Diagnostics.Contracts; - using System.Linq; - using System.Runtime.InteropServices; - - /// Generic implementation of an elastic heap that uses one or more page to store objects of the same type, using multiple buckets for different page sizes - /// Type of the pages in the elastic heap - internal abstract class ElasticHeap : IDisposable - where TPage : EntryPage - { - private const uint MinAllowedPageSize = 4096; // ~= memory mapped page - private const uint MaxAllowedPageSize = 1 << 30; // 1GB - - protected readonly TPage[] m_currents; - protected readonly PageBucket[] m_buckets; - protected Func m_allocator; - private volatile bool m_disposed; - - protected struct PageBucket - { - public readonly uint PageSize; - public readonly List Pages; - public readonly List FreeList; - - public PageBucket(uint size) - { - this.PageSize = size; - this.Pages = new List(); - this.FreeList = new List(); - } - } - - protected ElasticHeap(uint[] sizes, Func allocator) - { - if (sizes == null) throw new ArgumentNullException("sizes"); - if (allocator == null) throw new ArgumentNullException("allocator"); - if (sizes.Length == 0) throw new ArgumentException("There must be at least one allocation size"); - - var buckets = new PageBucket[sizes.Length]; - for (int i = 0; i < buckets.Length; i++) - { - if (sizes[i] < MinAllowedPageSize || sizes[i] > MaxAllowedPageSize) throw new ArgumentException(String.Format("Page size {0} too small or not a power of two", sizes[i]), "sizes"); - if (sizes[i] % Entry.ALIGNMENT != 0) throw new ArgumentException(String.Format("Page size {0} must be aligned to {1} bytes", sizes[i], Entry.ALIGNMENT)); - buckets[i] = new PageBucket(sizes[i]); - } - m_buckets = buckets; - m_currents = new TPage[sizes.Length]; - m_allocator = allocator; - } - - /// Allocate a new page for a specific bucket - /// Bucet index - protected TPage CreateNewPage(int bucket) - { - uint size = m_buckets[bucket].PageSize; - - UnmanagedHelpers.SafeLocalAllocHandle handle = null; - try - { - handle = UnmanagedHelpers.AllocMemory(size); - return m_allocator(handle, size); - } - catch (Exception e) - { - if (handle != null) - { - if (!handle.IsClosed) handle.Dispose(); - handle = null; - } - if (e is OutOfMemoryException) - { - throw new OutOfMemoryException(String.Format("Failed to allocate new memory for new page of size {0}", size), e); - } - throw; - } - finally - { - if (handle != null) GC.AddMemoryPressure(size); - } - } - - /// Returns the estimated allocated size in all the buckets - public ulong GetAllocatedSize() - { - ulong sum = 0; - foreach (var bucket in m_buckets) - { - if (bucket.PageSize > 0 && bucket.Pages != null) - { - sum += (ulong)bucket.PageSize * (uint)bucket.Pages.Count; - } - } - return sum; - } - - public void Dispose() - { - Dispose(true); - GC.SuppressFinalize(this); - } - - protected virtual void Dispose(bool disposing) - { - if (!m_disposed) - { - m_disposed = true; - if (disposing) - { - for (int i = 0; i < m_buckets.Length; i++) - { - foreach (var page in m_buckets[i].Pages) - { - if (page != null) page.Dispose(); - } - foreach (var page in m_buckets[i].FreeList) - { - if (page != null) page.Dispose(); - } - } - Array.Clear(m_buckets, 0, m_buckets.Length); - Array.Clear(m_currents, 0, m_currents.Length); - } - m_allocator = null; - } - } - - [Conditional("DEBUG")] - public void Debug_Dump(bool detailed) - { - Debug.WriteLine("# Dumping {0} heap ({1:N0} pages in {2:N0} buckets)", this.GetType().Name, m_buckets.Sum(b => (long)b.Pages.Count), m_buckets.Length); - //TODO: needs locking but should only be called from unit tests anyway... - ulong entries = 0; - ulong allocated = 0; - ulong used = 0; - for (int i = 0; i < m_buckets.Length; i++) - { - var bucket = m_buckets[i]; - if (bucket.Pages == null) continue; - if (bucket.Pages.Count == 0) - { - Debug.WriteLine(" # Bucket #{0}: {1:N0} bytes is empty", i, bucket.PageSize); - } - else - { - Debug.WriteLine(" # Bucket #{0}: {1:N0} bytes (allocated: {2:N0} pages, free: {3:N0} pages)", i, bucket.PageSize, bucket.Pages.Count, bucket.FreeList.Count); - foreach (var page in bucket.Pages) - { - if (page == null) continue; - page.Debug_Dump(detailed); - allocated += bucket.PageSize; - entries += (uint)page.Count; - used += page.MemoryUsage; - } - } - } - Debug.WriteLine("# Found a total of {0:N0} entries using {1:N0} bytes out of {2:N0} bytes allocated", entries, used, allocated); - } - } - -} diff --git a/FoundationDB.Storage.Memory/Core/Memory/EntryPage.cs b/FoundationDB.Storage.Memory/Core/Memory/EntryPage.cs deleted file mode 100644 index b68edc69e..000000000 --- a/FoundationDB.Storage.Memory/Core/Memory/EntryPage.cs +++ /dev/null @@ -1,172 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Core -{ - using System; - using System.Diagnostics; - using System.Diagnostics.Contracts; - using System.Runtime.InteropServices; - using System.Threading; - - /// Base implementation of a page of memory that can store items of the same type - [DebuggerDisplay("Start={m_start}, Current={m_current}, Entries={m_count}, Usage={(m_current-m_start)} / {m_capacity}")] - internal unsafe abstract class EntryPage : IDisposable - { - /// Pointer to the next free slot in the page - protected byte* m_current; - /// Pointer to the first byte of the page - protected byte* m_start; - /// Pointer to the next byte after the last byte of the page - protected byte* m_end; - /// Size of the page - protected uint m_capacity; - /// Number of entries stored in this page - protected int m_count; - /// Handle to the allocated memory - protected SafeHandle m_handle; - - protected EntryPage(SafeHandle handle, uint capacity) - { - Contract.Requires(handle != null && !handle.IsInvalid && !handle.IsClosed); - - m_handle = handle; - m_capacity = capacity; - m_start = (byte*) handle.DangerousGetHandle(); - m_end = m_start + capacity; - m_current = m_start; - CheckInvariants(); - } - - ~EntryPage() - { - Dispose(false); - } - - [Conditional("DEBUG")] - protected void CheckInvariants() - { - Contract.Assert(!m_handle.IsInvalid, "Memory handle should not be invalid"); - Contract.Assert(!m_handle.IsClosed, "Memory handle should not be closed"); - Contract.Ensures(Entry.IsAligned(m_current), "Current pointer should always be aligned"); - Contract.Assert(m_current <= m_start + m_capacity, "Current pointer should never be outside the page"); - } - - /// Number of entries store in this page - public int Count { get { return m_count; } } - - /// Number of bytes allocated inside this page - public ulong MemoryUsage { get { return (ulong)(m_current - m_start); } } - - /// Type of the entries stored in this page - public abstract EntryType Type { get; } - - public void Dispose() - { - Dispose(true); - GC.SuppressFinalize(this); - } - - protected virtual void Dispose(bool disposing) - { - try - { - if (disposing) - { - var handle = m_handle; - if (handle != null && !handle.IsClosed) - { - m_handle.Close(); - GC.RemoveMemoryPressure(m_capacity); - } - } - } - finally - { - m_handle = null; - m_start = null; - m_current = null; - } - } - - private void ThrowDisposed() - { - throw new ObjectDisposedException(this.GetType().Name); - } - - /// Align a pointer in this page - /// Unaligned location in the page - /// Aligned pointer cannot be greater than or equal to this address - /// New pointer that is aligned, and is guaranteed to be less than the - internal static byte* Align(byte* ptr, byte* end) - { - long r = ((long)ptr) & (Entry.ALIGNMENT - 1); - if (r > 0) ptr += Entry.ALIGNMENT - r; - if (ptr > end) return end; - return ptr; - } - - /// Try to allocate a segment in this page - /// Minimum size of the segment - /// Pointer to the start of the allocated segment, or null if this page cannot satisfy the allocation - /// The pointer will be aligned before being returned. The method may return null even if there was enough space remaining, if the aligment padding causes the segment to overshoot the end of the page. - protected byte* TryAllocate(uint size) - { - // try to allocate an amount of memory - // - returns null if the page is full, or too small - // - returns a pointer to the allocated space - - byte* ptr = m_current; - if (ptr == null) ThrowDisposed(); - byte* end = m_end; - byte* next = ptr + size; - if (next > m_end) - { // does not fit in this page - return null; - } - - // update the cursor for the next value - next = (byte*) (((long)next + Entry.ALIGNMENT - 1) & Entry.ALIGNMENT_MASK); - if (next > end) next = end; - m_current = next; - ++m_count; - - CheckInvariants(); - return ptr; - } - - /// Update this instance to use another memory location, and release the previously allocated memory - /// Page that will be absorbed - /// The content of the current page will be deleted, and will be disposed - public void Swap(EntryPage target) - { - if (target == null) throw new ArgumentNullException("target"); - Contract.Requires(target.m_handle != null); - - if (m_current == null) ThrowDisposed(); - if (target.m_current == null) target.ThrowDisposed(); - - try - { } - finally - { - var old = m_handle; - m_handle = Interlocked.Exchange(ref target.m_handle, null); - m_count = target.m_count; - m_capacity = target.m_capacity; - m_start = target.m_start; - m_end = target.m_end; - m_current = target.m_current; - - old.Dispose(); - target.Dispose(); - } - CheckInvariants(); - } - - [Conditional("DEBUG")] - public abstract void Debug_Dump(bool detailed); - } - -} diff --git a/FoundationDB.Storage.Memory/Core/Memory/KeyHeap.cs b/FoundationDB.Storage.Memory/Core/Memory/KeyHeap.cs deleted file mode 100644 index beff5aabe..000000000 --- a/FoundationDB.Storage.Memory/Core/Memory/KeyHeap.cs +++ /dev/null @@ -1,235 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Core -{ - using FoundationDB.Client; - using FoundationDB.Storage.Memory.Utils; - using System; - using System.Diagnostics; - using System.Diagnostics.Contracts; - using System.Runtime.InteropServices; - - internal unsafe class KeyHeap : ElasticHeap - { - - // Some facts about keys: - // - The overhead per key is 16 bytes on x64 - // - The maximum allowed size for a key is 10,000 bytes. - // - Well designed layers will tend to use small keys. - // - Text-based indexes may need longer keys. - // - Very large keys should be rare and will already be slow due to longer memcmps anyway. - // - The smallest possible entry will be 16 bytes (empty key) which can occur only once per database - // - A typical small key "(42, small_int)" will be ~24 bytes - // - Page size of 4 KB can fit 170 keys with waste ~ 0.4% - // - Page size of 16 KB can fit 682 keys with waste ~ 0.1% - // - Page size of 64 KB can fit 2730 keys with waste negligible - // - A typical index composite key "(42, GUID, TimeStamp, int16)" will be ~48 bytes - // - Page size of 4 KB can fit 85 keys with waste ~ 0.4% - // - Page size of 16 KB can fit 341 keys with waste ~ 0.1% - // - Page size of 64 KB can fit 1365 keys with waste negligible - // - A somewhat longer key "(42, 1, GUID, GUID, TimeStamp, int16)" will be ~64 bytes - // - Page size of 4 KB can fit 64 keys with no waste - // - Page size of 16 KB can fit 256 keys with no waste - // - Page size of 64 KB can fit 1024 keys with no waste - // - A "big" key will be ~1000 bytes and should be pretty rare (either very specific scenario, or badly designed Layer) - // - Page size of 4 KB can fit 4 keys with waste ~ 2.3% - // - Page size of 16 KB can fit 16 keys with waste ~ 2.3% - // - Page size of 64 KB can fit 64 keys with waste ~ 0.8% - // - Page size of 128 KB can fit 128 keys with waste negligible - // - The largest possible entry size is 10,016 bytes and should never happen in well designed Layers - // - Page size smaller than 16KB are not possible (too small) - // - Page size of 16 KB can fit 1 key with a waste of 6368 bytes (38.8%) - // - Page size of 32 KB can fit 3 keys with a waste of 2720 bytes ( 8.3%) - // - Page size of 64 KB can fit 6 keys with a waste of 5440 bytes ( 8.3%) - // - Page size of 128 KB can fit 13 keys with a waste of 864 bytes ( 0.6%) - // - Page size of 256 KB can fit 26 keys with a waste of 1728 bytes ( 0.6%) - // - Page size of 1 MB can fit 104 keys with a waste of 6912 bytes ( 0.6%) - - // We should probably optimize for keys up to ~100 bytes, and try our best for longer keys. - // => We will use 4 buckets for the pages, and try to have at least 256 entries per page - // - SMALL : keys up to 64 bytes, with page size of 16 KB - // - MEDIUM: keys up to 256 bytes, with page size of 64 KB - // - LARGE : keys up to 1,024 bytes, with page size of 256 KB - // - HUGE : keys up to 10,016 bytes, with page size of 1 MB (fit up to 104 entries) - - /// Page of memory used to store Keys - public sealed unsafe class Page : EntryPage - { - - public Page(SafeHandle handle, uint capacity) - : base(handle, capacity) - { } - - public override EntryType Type - { - get { return EntryType.Key; } - } - - /// Copy an existing value to this page, and return the pointer to the copy - /// Value that must be copied to this page - /// Pointer to the copy in this page - public Key* TryAppend(Key* value) - { - Contract.Requires(value != null && Entry.GetObjectType(value) == EntryType.Value); - - uint rawSize = Key.SizeOf + value->Size; - var entry = (Key*)TryAllocate(rawSize); - if (entry == null) return null; // this page is full - - UnmanagedHelpers.CopyUnsafe((byte*)entry, (byte*)value, rawSize); - - return entry; - } - - public Key* TryAppend(USlice buffer) - { - Contract.Requires(buffer.Data != null - && buffer.Count >= Key.SizeOf - && ((Key*)buffer.Data)->Size == buffer.Count - Key.SizeOf); - - var entry = (Key*)TryAllocate(buffer.Count); - if (entry == null) return null; // this page is full - - UnmanagedHelpers.CopyUnsafe((byte*)entry, buffer.Data, buffer.Count); - entry->Header = ((ushort)EntryType.Key) << Entry.TYPE_SHIFT; - - return entry; - } - - public void Collect(KeyHeap.Page target, ulong sequence) - { - var current = (Key*)m_start; - var end = (Key*)m_current; - - while (current < end) - { - bool keep = Key.StillAlive(current, sequence); - - if (keep) - { // copy to the target page - - var moved = target.TryAppend(current); - if (moved == null) throw new InvalidOperationException("The target page was too small"); - - var values = current->Values; - if (values != null) - { - values->Parent = moved; - } - - current->Header |= Entry.FLAGS_MOVED | Entry.FLAGS_DISPOSED; - } - else - { - current->Header |= Entry.FLAGS_DISPOSED; - } - - current = Key.WalkNext(current); - } - - - } - - public override void Debug_Dump(bool detailed) - { - Contract.Requires(m_start != null && m_current != null); - Key* current = (Key*)m_start; - Key* end = (Key*)m_current; - - Trace.WriteLine(" # KeyPage: count=" + m_count.ToString("N0") + ", used=" + this.MemoryUsage.ToString("N0") + ", capacity=" + m_capacity.ToString("N0") + ", start=0x" + new IntPtr(m_start).ToString("X8") + ", end=0x" + new IntPtr(m_current).ToString("X8")); - if (detailed) - { - while (current < end) - { - Trace.WriteLine(" - [" + Entry.GetObjectType(current).ToString() + "] 0x" + new IntPtr(current).ToString("X8") + " : " + current->Header.ToString("X8") + ", size=" + current->Size + ", h=0x" + current->HashCode.ToString("X4") + " : " + FdbKey.Dump(Key.GetData(current).ToSlice())); - var value = current->Values; - while (value != null) - { - Trace.WriteLine(" -> [" + Entry.GetObjectType(value) + "] 0x" + new IntPtr(value).ToString("X8") + " @ " + value->Sequence + " : " + Value.GetData(value).ToSlice().ToAsciiOrHexaString()); - value = value->Previous; - } - current = Key.WalkNext(current); - } - } - } - } - - private const int NUM_BUCKETS = 4; - private const uint SMALL_KEYS = 64; - private const uint MEDIUM_KEYS = 256; - private const uint LARGE_KEYS = 1024; - private const uint HUGE_KEYS = uint.MaxValue; // should nether be larger than 10,016 bytes - - private static readonly uint[] KeySizes = new uint[NUM_BUCKETS] { - SMALL_KEYS, - MEDIUM_KEYS, - LARGE_KEYS, - HUGE_KEYS - }; - - private static readonly uint[] PageSizes = new uint[NUM_BUCKETS] - { - /* SMALL */ 16 * 1024, - /* MEDIUM */ 64 * 1024, - /* LARGE */ 256 * 1024, - /* HUGE */ 1024 * 1024 - }; - - public KeyHeap() - : base(PageSizes, (handle, size) => new KeyHeap.Page(handle, size)) - { } - - private static int GetBucket(uint size) - { - if (size <= SMALL_KEYS) return 0; - if (size <= MEDIUM_KEYS) return 1; - if (size <= LARGE_KEYS) return 2; - return 3; - } - - public Key* Append(USlice buffer) - { - int bucket = GetBucket(buffer.Count + Key.SizeOf); - - var page = m_currents[bucket]; - var entry = page != null ? page.TryAppend(buffer) : null; - if (entry == null) - { // allocate a new page and try again - entry = AppendSlow(bucket, buffer); - } - return entry; - } - - private Key* AppendSlow(int bucket, USlice buffer) - { - var page = CreateNewPage(bucket); - Contract.Assert(page != null); - m_currents[bucket] = page; - m_buckets[bucket].Pages.Add(page); - - var entry = page.TryAppend(buffer); - if (entry == null) throw new OutOfMemoryException(String.Format("Failed to allocate memory from the key heap ({0})", m_buckets[bucket].PageSize)); - return entry; - } - - public void Collect(ulong sequence) - { - for (int bucket = 0; bucket < m_buckets.Length; bucket++) - { - if (m_buckets[bucket].Pages.Count > 0) - { - //TODO:!!! - //- allocate a scratch page - //- for all pages in bucket that have more than x% of free space - // - copy as many surviving keys into scratch page - // - if scratch page is too small, add it to the list, allocate new scratch page (note: from the free list?) - // - put page into "free list" - } - } - } - - } - -} diff --git a/FoundationDB.Storage.Memory/Core/Memory/ValueHeap.cs b/FoundationDB.Storage.Memory/Core/Memory/ValueHeap.cs deleted file mode 100644 index f16eb971a..000000000 --- a/FoundationDB.Storage.Memory/Core/Memory/ValueHeap.cs +++ /dev/null @@ -1,310 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Core -{ - using FoundationDB.Storage.Memory.Utils; - using System; - using System.Diagnostics; - using System.Diagnostics.Contracts; - using System.Runtime.InteropServices; - - internal unsafe class ValueHeap : ElasticHeap - { - - // Some facts about values: - // - The overhead per value is 32 bytes on x64 - // - The largest possible value size is 100,000 bytes - // - A lot of layers (indexes, ...) use empty keys which could be optimized away and not take any space. - // - Document layers that split documents into a field per key will use values from 1 or 2 bytes (bool, ints) to ~64 bytes (strings, labels, text GUIDs, ...) - // - Some layers may pack complete documents in keys, or pack arrays, which will occupy a couple of KB - // - Blob-type layers will need to split very large documents (files, pictures, logs, ...) into as few chunks as possible, and be pegged at 10,000 or 100,000 bytes - // - A typical small value is an 32-bit or 64-bit integer or counter, which will be padded to 40 bytes - // - Page size of 4 KB can fit 170 keys with waste ~ 0.4% - // - Page size of 16 KB can fit 682 keys with waste ~ 0.1% - // - Page size of 64 KB can fit 2730 keys with waste negligible - // - A GUID will be 48 bytes - // - Page size of 4 KB can fit 85 keys with waste ~ 0.4% - // - Page size of 16 KB can fit 341 keys with waste ~ 0.1% - // - Page size of 64 KB can fit 1365 keys with waste negligible - // - A very small JSON doc {Id:"..",Value:"...",Tag:".."} will be less than ~128 bytes - // - Page size of 4 KB can fit 85 keys with waste ~ 0.4% - // - An array of 60 doubles will be 512 bytes - // - Page size of 4 KB can fit 85 keys with waste ~ 0.4% - // - A "small" chunk of a blob layer will be ~16K - // - Page size of 16 KB can fit 1 key with no waste - // - Page size of 64 KB can fit 64 keys with waste ~ 0.8% - // - Page size of 128 KB can fit 128 keys with waste negligible - // - The largest possible key is 10,032 bytes (header + pointer + 10,000 bytes) and should never happen in well designed Layers - // - Page size smaller than 16KB are not possible (too small) - // - Page size of 16 KB can fit 1 key with a waste of 6368 bytes (38.8%) - // - Page size of 32 KB can fit 3 keys with a waste of 2720 bytes ( 8.3%) - // - Page size of 64 KB can fit 6 keys with a waste of 5440 bytes ( 8.3%) - // - Page size of 128 KB can fit 13 keys with a waste of 864 bytes ( 0.6%) - // - Page size of 256 KB can fit 26 keys with a waste of 1728 bytes ( 0.6%) - // - Page size of 1 MB can fit 104 keys with a waste of 6912 bytes ( 0.6%) - - // pb: layers wanting to target a size that is a power of two (1K, 2K, 16K, ...) will always be misaligned due to the 32 bytes overhead and may create waste in pages (especially small pages) - - // We should probably optimize for keys up to ~100 bytes, and try our best for longer keys. - // => We will use 4 buckets for the pages, and try to have at least 256 entries per page - // - SMALL : keys up to 64 bytes, with page size of 16 KB - // - MEDIUM: keys up to 256 bytes, with page size of 64 KB - // - LARGE : keys up to 1,024 bytes, with page size of 256 KB - // - HUGE : keys up to 10,016 bytes, with page size of 1 MB (fit up to 104 entries) - - /// Page of memory used to store Values - public sealed class Page : EntryPage - { - - public Page(SafeHandle handle, uint capacity) - : base(handle, capacity) - { } - - public override EntryType Type - { - get { return EntryType.Value; } - } - - /// Copy an existing value to this page, and return the pointer to the copy - /// Value that must be copied to this page - /// Pointer to the copy in this page - public Value* TryAppend(Value* value) - { - Contract.Requires(value != null && Entry.GetObjectType(value) == EntryType.Value); - - uint rawSize = Value.SizeOf + value->Size; - Value* entry = (Value*)TryAllocate(rawSize); - if (entry == null) return null; // the page is full - - UnmanagedHelpers.CopyUnsafe((byte*)entry, (byte*)value, rawSize); - - return entry; - } - - public Value* TryAppend(USlice buffer) - { - Contract.Requires(buffer.Data != null - && buffer.Count >= Value.SizeOf - && ((Key*)buffer.Data)->Size == buffer.Count - Value.SizeOf); - - var entry = (Value*)TryAllocate(buffer.Count); - if (entry == null) return null; // the page is full - UnmanagedHelpers.CopyUnsafe((byte*)entry, buffer.Data, buffer.Count); - - return entry; - } - - public Value* TryAllocate(uint dataSize, ulong sequence, Value* previous, void* parent) - { - Value* entry = (Value*)TryAllocate(Value.SizeOf + dataSize); - if (entry == null) return null; // the page is full - - entry->Header = ((ushort)EntryType.Value) << Entry.TYPE_SHIFT; - entry->Size = dataSize; - entry->Sequence = sequence; - entry->Previous = previous; - entry->Parent = parent; - - return entry; - } - - public void Collect(Page target, ulong sequence) - { - var current = (Value*)m_start; - var end = (Value*)m_current; - - while (current < end) - { - bool keep = Value.StillAlive(current, sequence); - - void* parent = current->Parent; - - if (keep) - { // copy to the target page - - var moved = target.TryAppend(current); - if (moved == null) throw new InvalidOperationException(); // ?? - - // update the parent - switch (Entry.GetObjectType(parent)) - { - case EntryType.Key: - { - ((Key*)parent)->Values = moved; - break; - } - case EntryType.Value: - { - ((Value*)parent)->Previous = moved; - break; - } - case EntryType.Free: - { - //NO-OP - break; - } - default: - { - throw new InvalidOperationException("Unexpected parent while moving value"); - } - } - current->Header |= Entry.FLAGS_MOVED | Entry.FLAGS_DISPOSED; - } - else - { - // we need to kill the link from the parent - switch (Entry.GetObjectType(parent)) - { - case EntryType.Key: - { - ((Key*)parent)->Values = null; - break; - } - case EntryType.Value: - { - ((Value*)parent)->Previous = null; - break; - } - case EntryType.Free: - { - //NO-OP - break; - } - default: - { - throw new InvalidOperationException("Unexpected parent while destroying value"); - } - } - - current->Header |= Entry.FLAGS_DISPOSED; - } - - current = Value.WalkNext(current); - } - } - - public override void Debug_Dump(bool detailed) - { - Contract.Requires(m_start != null && m_current != null); - Value* current = (Value*)m_start; - Value* end = (Value*)m_current; - - Trace.WriteLine(" # ValuePage: count=" + m_count.ToString("N0") + ", used=" + this.MemoryUsage.ToString("N0") + ", capacity=" + m_capacity.ToString("N0") + ", start=0x" + new IntPtr(m_start).ToString("X8") + ", end=0x" + new IntPtr(m_current).ToString("X8")); - if (detailed) - { - while (current < end) - { - Trace.WriteLine(" - [" + Entry.GetObjectType(current).ToString() + "] 0x" + new IntPtr(current).ToString("X8") + " : " + current->Header.ToString("X8") + ", seq=" + current->Sequence + ", size=" + current->Size + " : " + Value.GetData(current).ToSlice().ToAsciiOrHexaString()); - if (current->Previous != null) Trace.WriteLine(" -> Previous: [" + Entry.GetObjectType(current->Previous) + "] 0x" + new IntPtr(current->Previous).ToString("X8")); - if (current->Parent != null) Trace.WriteLine(" <- Parent: [" + Entry.GetObjectType(current->Parent) + "] 0x" + new IntPtr(current->Parent).ToString("X8")); - - current = Value.WalkNext(current); - } - } - } - - } - - private const int NUM_BUCKETS = 5; - - //note: we try to target more than 100 entries per page to reduce overhead and possible waste - - private const int TINY_VALUES = 16 + 32; // note (GUIDs or smaller) - private const uint SMALL_VALUES = 128 + 32; // a tiny JSON doc should fit without problem - private const uint MEDIUM_VALUES = 60 * 8 + 32; // an array of 60 doubles - private const uint LARGE_VALUES = 4096 + 32; // a small size JSON doc (possibly compressed) - private const uint HUGE_VALUES = uint.MaxValue; // > 2KB would be "large documents", chunks of very large documents, or binary blobs - - private static readonly uint[] KeySizes = new uint[NUM_BUCKETS] { - TINY_VALUES, - SMALL_VALUES, - MEDIUM_VALUES, - LARGE_VALUES, - HUGE_VALUES - }; - - private static readonly uint[] PageSizes = new uint[NUM_BUCKETS] - { - /* TINY */ 16 * 1024, // from 341 to 512 per page - /* SMALL */ 64 * 1024, // from 409 to 1337 per page - /* MEDIUM */ 128 * 1024, // from 256 to 814 per page - /* LARGE */ 256 * 1024, // from 63 to 511 per page - /* HUGE */ 1024 * 1024, // from 10 to 253 per page - }; - - public ValueHeap() - : base(PageSizes, (handle, size) => new ValueHeap.Page(handle, size)) - { } - - private static int GetBucket(uint size) - { - if (size <= TINY_VALUES) return 0; - if (size <= SMALL_VALUES) return 1; - if (size <= MEDIUM_VALUES) return 2; - if (size <= LARGE_VALUES) return 3; - return 4; - } - - public Value* Allocate(uint dataSize, ulong sequence, Value* previous, void* parent) - { - int bucket = GetBucket(dataSize + Value.SizeOf); - - var page = m_currents[bucket]; - var entry = page != null ? page.TryAllocate(dataSize, sequence, previous, parent) : null; - if (entry == null) - { - entry = AllocateSlow(bucket, dataSize, sequence, previous, parent); - } - return entry; - } - - private Value* AllocateSlow(int bucket, uint dataSize, ulong sequence, Value* previous, void* parent) - { - var page = CreateNewPage(bucket); - Contract.Assert(page != null); - m_currents[bucket] = page; - m_buckets[bucket].Pages.Add(page); - - var entry = page.TryAllocate(dataSize, sequence, previous, parent); - if (entry == null) throw new OutOfMemoryException(String.Format("Failed to allocate memory from the the value heap ({0})", m_buckets[bucket].PageSize)); - return entry; - } - - public void Collect(ulong sequence) - { - for (int bucket = 0; bucket < m_buckets.Length; bucket++) - { - if (m_buckets[bucket].Pages.Count > 0) - { - //TODO:!!! - //- allocate a scratch page - //- for all pages in bucket that have more than x% of free space - // - copy as many surviving keys into scratch page - // - if scratch page is too small, add it to the list, allocate new scratch page (note: from the free list?) - // - put page into "free list" - } - } - -#if REFACTORED - foreach (var page in m_pages) - { - var target = CreateNewPage(m_pageSize, Entry.ALIGNMENT); - if (page.Count == 1) - { // this is a standalone page - page.Collect(target, sequence); - page.Swap(target); - } - else - { - page.Collect(target, sequence); - page.Swap(target); - } - } -#endif - } - - } - -} diff --git a/FoundationDB.Storage.Memory/Core/NativeKeyComparer.cs b/FoundationDB.Storage.Memory/Core/NativeKeyComparer.cs deleted file mode 100644 index 4ee2eaf9c..000000000 --- a/FoundationDB.Storage.Memory/Core/NativeKeyComparer.cs +++ /dev/null @@ -1,109 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -#undef INSTRUMENT - -namespace FoundationDB.Storage.Memory.Core -{ - using FoundationDB.Storage.Memory.Utils; - using System; - using System.Collections.Generic; - using System.Diagnostics.Contracts; - - internal unsafe sealed class NativeKeyComparer : IComparer, IEqualityComparer - { - - public int Compare(IntPtr left, IntPtr right) - { -#if INSTRUMENT - System.Threading.Interlocked.Increment(ref s_compareCalls); -#endif - // this method will be called A LOT, so it should be as fast as possible... - // We know that: - // - caller should never compare nulls (it's a bug) - // - empty keys can exist - // - number of calls with left == right will be very small so may not be worth it to optimize (will slow down everything else) - // - for db using the DirectoryLayer, almost all keys will start with 0x15 (prefix for an int in a tuple) so checking the first couple of bytes will not help much (long runs of keys starting with the same 2 or 3 bytes) - Contract.Assert(left != IntPtr.Zero && right != IntPtr.Zero); - - // unwrap as pointers to the Key struct - var leftKey = (Key*)left; - var rightKey = (Key*)right; - - // these will probably cause a cache miss - uint leftCount = leftKey->Size; - uint rightCount = rightKey->Size; - - // but then memcmp will probably have the data in the cpu cache... - int c = UnmanagedHelpers.NativeMethods.memcmp( - &(leftKey->Data), - &(rightKey->Data), - new UIntPtr(leftCount < rightCount ? leftCount : rightCount) - ); - return c != 0 ? c : (int)leftCount - (int)rightCount; - } - - public bool Equals(IntPtr left, IntPtr right) - { -#if INSTRUMENT - System.Threading.Interlocked.Increment(ref s_equalsCalls); -#endif - // unwrap as pointers to the Key struct - var leftKey = (Key*)left; - var rightKey = (Key*)right; - - if (leftKey->HashCode != rightKey->HashCode) - { - return false; - } - - uint leftCount, rightCount; - - if (leftKey == null || (leftCount = leftKey->Size) == 0) return rightKey == null || rightKey->Size == 0; - if (rightKey == null || (rightCount = rightKey->Size) == 0) return false; - - return leftCount == rightCount && 0 == UnmanagedHelpers.NativeMethods.memcmp(&(leftKey->Data), &(rightKey->Data), new UIntPtr(leftCount)); - } - - public int GetHashCode(IntPtr value) - { -#if INSTRUMENT - System.Threading.Interlocked.Increment(ref s_getHashCodeCalls); -#endif - var key = (Key*)value; - if (key == null) return -1; - return key->HashCode; - } - -#if INSTRUMENT - private static long s_compareCalls; - private static long s_equalsCalls; - private static long s_getHashCodeCalls; -#endif - - public static void GetCounters(out long compare, out long equals, out long getHashCode) - { -#if INSTRUMENT - compare = System.Threading.Interlocked.Read(ref s_compareCalls); - equals = System.Threading.Interlocked.Read(ref s_equalsCalls); - getHashCode = System.Threading.Interlocked.Read(ref s_getHashCodeCalls); -#else - compare = 0; - equals = 0; - getHashCode = 0; -#endif - } - - public static void ResetCounters() - { -#if INSTRUMENT - System.Threading.Interlocked.Exchange(ref s_compareCalls, 0); - System.Threading.Interlocked.Exchange(ref s_equalsCalls, 0); - System.Threading.Interlocked.Exchange(ref s_getHashCodeCalls, 0); -#endif - } - - } - -} diff --git a/FoundationDB.Storage.Memory/Core/SequenceComparer.cs b/FoundationDB.Storage.Memory/Core/SequenceComparer.cs deleted file mode 100644 index a1a931407..000000000 --- a/FoundationDB.Storage.Memory/Core/SequenceComparer.cs +++ /dev/null @@ -1,35 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Core -{ - using System; - using System.Collections.Generic; - - internal sealed class SequenceComparer : IComparer, IEqualityComparer - { - public static readonly SequenceComparer Default = new SequenceComparer(); - - private SequenceComparer() - { } - - public int Compare(ulong x, ulong y) - { - if (x < y) return -1; - if (x > y) return +1; - return 0; - } - - public bool Equals(ulong x, ulong y) - { - return x == y; - } - - public int GetHashCode(ulong x) - { - return (((int)x) ^ ((int)(x >> 32))); - } - } - -} diff --git a/FoundationDB.Storage.Memory/Core/TransactionWindow.cs b/FoundationDB.Storage.Memory/Core/TransactionWindow.cs deleted file mode 100644 index 237e69998..000000000 --- a/FoundationDB.Storage.Memory/Core/TransactionWindow.cs +++ /dev/null @@ -1,169 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Core -{ - using FoundationDB.Client; - using FoundationDB.Storage.Memory.Utils; - using System; - using System.Diagnostics; - using System.Diagnostics.Contracts; - - [DebuggerDisplay("Sarted={m_startedUtc}, Min={m_minVersion}, Max={m_maxVersion}, Closed={m_closed}, Disposed={m_disposed}")] - internal sealed class TransactionWindow : IDisposable - { - /// Creation date of this transaction window - private readonly DateTime m_startedUtc; - /// First commit version for this transaction window - private readonly ulong m_minVersion; - /// Sequence of the last commited transaction from this window - private ulong m_maxVersion; - /// Counter for committed write transactions - private int m_commitCount; - /// If true, the transaction is closed (no more transaction can write to it) - private bool m_closed; - /// If true, the transaction has been disposed - private volatile bool m_disposed; - - /// Heap used to store the write conflict keys - private readonly UnmanagedMemoryHeap m_keys = new UnmanagedMemoryHeap(65536); - - /// List of all the writes made by transactions committed in this window - private readonly ColaRangeDictionary m_writeConflicts = new ColaRangeDictionary(USliceComparer.Default, SequenceComparer.Default); - - public TransactionWindow(DateTime startedUtc, ulong version) - { - m_startedUtc = startedUtc; - m_minVersion = version; - } - - public bool Closed { get { return m_closed; } } - - public ulong FirstVersion { get { return m_minVersion; } } - - public ulong LastVersion { get { return m_maxVersion; } } - - public DateTime StartedUtc { get { return m_startedUtc; } } - - /// Number of write transaction that committed during this window - public int CommitCount { get { return m_commitCount; } } - - public ColaRangeDictionary Writes { get { return m_writeConflicts; } } - - public void Close() - { - Contract.Requires(!m_closed && !m_disposed); - - if (m_disposed) ThrowDisposed(); - - m_closed = true; - } - - private unsafe USlice Store(Slice data) - { - uint size = checked((uint)data.Count); - var buffer = m_keys.AllocateAligned(size); - UnmanagedHelpers.CopyUnsafe(buffer, data); - return new USlice(buffer, size); - } - - public void MergeWrites(ColaRangeSet writes, ulong version) - { - Contract.Requires(!m_closed && writes != null && version >= m_minVersion && (!m_closed || version <= m_maxVersion)); - - if (m_disposed) ThrowDisposed(); - if (m_closed) throw new InvalidOperationException("This transaction has already been closed"); - - //Debug.WriteLine("* Merging writes conflicts for version " + version + ": " + String.Join(", ", writes)); - - foreach (var range in writes) - { - var begin = range.Begin; - var end = range.End; - - USlice beginKey, endKey; - if (begin.Offset == end.Offset && object.ReferenceEquals(begin.Array, end.Array) && end.Count >= begin.Count) - { // overlapping keys - endKey = Store(end); - beginKey = endKey.Substring(0, (uint)begin.Count); - } - else - { - beginKey = Store(begin); - endKey = Store(end); - } - - m_writeConflicts.Mark(beginKey, endKey, version); - } - - ++m_commitCount; - if (version > m_maxVersion) - { - m_maxVersion = version; - } - } - - /// Checks if a list of reads conflicts with at least one write performed in this transaction window - /// List of reads to check for conflicts - /// Sequence number of the transaction that performed the reads - /// True if at least one read is conflicting with a write with a higher sequence number; otherwise, false. - public bool Conflicts(ColaRangeSet reads, ulong version) - { - Contract.Requires(reads != null); - - //Debug.WriteLine("* Testing for conflicts for: " + String.Join(", ", reads)); - - if (version > m_maxVersion) - { // all the writes are before the reads, so no possible conflict! - //Debug.WriteLine(" > cannot conflict"); - return false; - } - - using (var scratch = new UnmanagedSliceBuilder()) - { - //TODO: do a single-pass version of intersection checking ! - foreach (var read in reads) - { - scratch.Clear(); - scratch.Append(read.Begin); - var p = scratch.Count; - scratch.Append(read.End); - var begin = scratch.ToUSlice(p); - var end = scratch.ToUSlice(p, scratch.Count - p); - - if (m_writeConflicts.Intersect(begin, end, version, (v, min) => v > min)) - { - Debug.WriteLine(" > Conflicting read: " + read); - return true; - } - } - } - - //Debug.WriteLine(" > No conflicts found"); - return false; - } - - private void ThrowDisposed() - { - throw new ObjectDisposedException(this.GetType().Name); - } - - public void Dispose() - { - if (!m_disposed) - { - m_disposed = true; - m_keys.Dispose(); - - } - GC.SuppressFinalize(this); - } - - public override string ToString() - { - return String.Format(System.Globalization.CultureInfo.InvariantCulture, "#{0} [{1}~{2}]", m_startedUtc.Ticks / TimeSpan.TicksPerMillisecond, m_minVersion, m_maxVersion); - } - } - -} diff --git a/FoundationDB.Storage.Memory/Core/Value.cs b/FoundationDB.Storage.Memory/Core/Value.cs deleted file mode 100644 index c24b38dfa..000000000 --- a/FoundationDB.Storage.Memory/Core/Value.cs +++ /dev/null @@ -1,91 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Core -{ - using FoundationDB.Storage.Memory.Utils; - using System; - using System.Diagnostics.Contracts; - using System.Runtime.InteropServices; - - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal unsafe struct Value - { - - // A Value contains the pointer to the key's bytes, and a pointer to the most current Value for this key, or null if the key is currently deleted - - // Field Offset Bits Type Desc - // HEADER 0 16 flags Type, status flags, deletion or mutation flags, .... - // reserved 2 16 uint16 unused - // SIZE 4 32 uint Size of the DATA field (can be 0, should only use 24 bits at most) - // SEQUENCE 8 64 ulong Sequence version of this value - // PREVIOUS 16 64 Value* Pointer to the previous value that was supersed by this entry (or null if we are the oldest one in the chain) - // PARENT 24 64 void* Pointer to the parent of this value - // DATA 32 .. byte[] First byte of the key - - // The HEADER flags are as follow: - // - bit 0: DELETION If set, this value is a deletion marker (and its size must be zero) - // - bit 1: MUTATED If set, this value is not the last one for this key - // - bit 2-5: unused - // - bit 7: HAS_WATCH If set, this key is currently being watched - // - bit 8-15: ENTRY_FLAGS (inherited from Entry) - // - bit 8-15: ENTRY_FLAGS (inherited from Entry) - - public static readonly uint SizeOf = (uint)Marshal.OffsetOf(typeof(Value), "Data").ToInt32(); - - /// This value is a deletion marker - public const ushort FLAGS_DELETION = 1 << 0; - - /// This value has been mutated and is not up to date - public const ushort FLAGS_MUTATED = 1 << 1; - - /// Various flags (TDB) - public ushort Header; - /// Not used - public uint Reseved; - /// Size of the value - public uint Size; - /// Version where this version of the key first appeared - public ulong Sequence; - /// Pointer to the previous version of this key, or NULL if this is the earliest known - public Value* Previous; - /// Pointer to the parent node (can be a Key or a Value) - public void* Parent; - /// Offset to the first byte of the value - public byte Data; - - public static USlice GetData(Value* value) - { - if (value == null) return default(USlice); - - Contract.Assert((value->Header & Entry.FLAGS_DISPOSED) == 0, "Attempt to read a value that was disposed"); - return new USlice(&(value->Data), value->Size); - } - - public static bool StillAlive(Value* value, ulong sequence) - { - if (value == null) return false; - if ((value->Header & Value.FLAGS_MUTATED) != 0) - { - return value->Sequence >= sequence; - } - return true; - } - - public static bool IsDisposed(Value* value) - { - return (value->Header & Entry.FLAGS_DISPOSED) != 0; - } - - /// Return the address of the following value in the heap - internal static Value* WalkNext(Value* self) - { - Contract.Requires(self != null && Entry.GetObjectType(self) == EntryType.Value); - - return (Value*)Entry.Align((byte*)self + Value.SizeOf + self->Size); - } - - } - -} diff --git a/FoundationDB.Storage.Memory/FoundationDB.Storage.Memory.csproj b/FoundationDB.Storage.Memory/FoundationDB.Storage.Memory.csproj deleted file mode 100644 index e335a0c73..000000000 --- a/FoundationDB.Storage.Memory/FoundationDB.Storage.Memory.csproj +++ /dev/null @@ -1,110 +0,0 @@ - - - - - Debug - AnyCPU - {CC98DB39-31A1-4642-B4FC-9CB0AB26BF2E} - Library - Properties - FoundationDB.Storage.Memory - FoundationDB.Storage.Memory - v4.5 - 512 - ..\ - true - - - true - full - false - bin\Debug\ - DEBUG;TRACE - prompt - 4 - true - - - pdbonly - true - bin\Release\ - TRACE - prompt - 4 - true - - - true - - - FoundationDB.Storage.Memory.snk - - - - - True - ..\packages\System.Collections.Immutable.1.1.32-beta\lib\portable-net45+win8+wp8+wpa81\System.Collections.Immutable.dll - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - {773166b7-de74-4fcc-845c-84080cc89533} - FoundationDB.Client - - - - - - \ No newline at end of file diff --git a/FoundationDB.Storage.Memory/FoundationDB.Storage.Memory.snk b/FoundationDB.Storage.Memory/FoundationDB.Storage.Memory.snk deleted file mode 100644 index 5b29927c3..000000000 Binary files a/FoundationDB.Storage.Memory/FoundationDB.Storage.Memory.snk and /dev/null differ diff --git a/FoundationDB.Storage.Memory/IO/SnapshotFormat.cs b/FoundationDB.Storage.Memory/IO/SnapshotFormat.cs deleted file mode 100644 index 6a59025a4..000000000 --- a/FoundationDB.Storage.Memory/IO/SnapshotFormat.cs +++ /dev/null @@ -1,86 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.IO -{ - using FoundationDB.Client; - using FoundationDB.Storage.Memory.Utils; - using System; - - internal static class SnapshotFormat - { - - [Flags] - public enum Flags : ulong - { - None = 0, - - TYPE_SNAPSHOT_VERSIONNED = 0, - TYPE_SNAPSHOT_COMPACT = 1, - - COMPRESSED = 0x100, - SIGNED = 0x200, - ENCRYPTED = 0x400, - } - - // Size of the in-memory buffer while writing a snapshot (optimized for SSD?) - public const int FLUSH_SIZE_BITS = 20; // 1MB - public const int FLUSH_SIZE = 1 << FLUSH_SIZE_BITS; - - // For convenience, some variable-size sections (header, ...) will be padded to a 'page' size. - // => note: the Jump Table must fit in a single page so could probably not be smaller than 512 ... - public const int PAGE_SIZE_BITS = 10; // 1KB - public const int PAGE_SIZE = 1 << PAGE_SIZE_BITS; - - public const uint HEADER_MAGIC_NUMBER = 0x42444E50; // "PNDB" - public const uint JUMP_TABLE_MAGIC_NUMBER = 0x54504D4A; // "JMPT" - public const uint LEVEL_MAGIC_NUMBER = 0x204C564C; // "LVL "; - - // Size of the header CRC (in bytes) - public const int HEADER_METADATA_BYTES = 64; - public const int HEADER_CRC_SIZE = 4; - public const int LEVEL_HEADER_BYTES = 16; - - // The maximum size for key + value is 10,000 + 100,000 with 2 + 3 additional bytes to encode the variable-length size - // The buffer size should be multiple of the pageSize value AND a power of two for convenience. - // Also, it would help if the buffer is x2 that to simplify buffering - // The worst case scenario would be where the first byte of the key starts on the last byte of a page, and last byte of the value cross into a new page, added 2 pages to the total - // Minimum size will be 2 + 10,000 + 3 + 100,000 + 2 * 1,024 = 112,053 and the next power of two is 2 ^ 17, so use 2 ^ 18 for double buffering - public const int MAX_KEYVALUE_BITS = 18; - public const int BUFFER_SIZE = 1 << MAX_KEYVALUE_BITS; - - public static uint ComputeChecksum(Slice data) - { - if (data.Offset < 0 || data.Count < 0 || (data.Array == null && data.Count > 0)) throw new ArgumentException("Data is invalid"); - - unsafe - { - fixed (byte* ptr = data.Array) - { - return ComputeChecksum(ptr + data.Offset, (ulong)data.Count); - } - } - } - - public static unsafe uint ComputeChecksum(byte* start, ulong count) - { - if (start == null && count != 0) throw new ArgumentException("Invalid address"); - - byte* ptr = start; - byte* end = checked(ptr + count); - - // : unoptimized 32 bits FNV-1a implementation - uint h = 2166136261; // FNV1 32 bits offset basis - while (ptr < end) - { - h = (h ^ *ptr) * 16777619; // FNV1 32 prime - ++ptr; - } - return h; - // - } - - } - -} diff --git a/FoundationDB.Storage.Memory/IO/SnapshotReader.cs b/FoundationDB.Storage.Memory/IO/SnapshotReader.cs deleted file mode 100644 index be450ed3c..000000000 --- a/FoundationDB.Storage.Memory/IO/SnapshotReader.cs +++ /dev/null @@ -1,353 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.IO -{ - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using FoundationDB.Layers.Tuples; - using FoundationDB.Storage.Memory.API; - using FoundationDB.Storage.Memory.Utils; - using System; - using System.Collections.Generic; - using System.Diagnostics.Contracts; - using System.Threading; - using System.Threading.Tasks; - - internal unsafe sealed class SnapshotReader - { - private struct LevelAddress - { - public ulong Offset; - public ulong Size; - public ulong PaddedSize; - } - - private readonly Win32MemoryMappedFile m_file; - - private bool m_hasHeader; - private bool m_hasJumpTable; - private LevelAddress[] m_jumpTable; - - private Version m_version; - private SnapshotFormat.Flags m_dbFlags; - private Uuid128 m_uid; - private ulong m_sequence; - private long m_itemCount; - private ulong m_timestamp; - private uint m_headerChecksum; - private Dictionary m_attributes; - - private uint m_pageSize; - private uint m_headerSize; - - private ulong m_dataStart; - private ulong m_dataEnd; - - private int m_levels; - - public SnapshotReader(Win32MemoryMappedFile file) - { - Contract.Requires(file != null); //TODO: && file.CanRead ? - m_file = file; - } - - public int Depth - { - get { return m_levels; } - } - - public ulong Sequence { get { return m_sequence; } } - public ulong TimeStamp { get { return m_timestamp; } } - public Version Version { get { return m_version; } } - public Uuid128 Id { get { return m_uid; } } - - private Exception ParseError(string message) - { - message = "Database snapshot is invalid or corrupted: " + message; -#if DEBUG - if (System.Diagnostics.Debugger.IsAttached) System.Diagnostics.Debugger.Break(); -#endif - return new InvalidOperationException(message); - } - - private Exception ParseError(string message, params object[] args) - { - return ParseError(String.Format(message, args)); - } - - private static uint RoundDown(uint size, uint pageSize) - { - return size & ~(pageSize - 1U); - } - - private static uint RoundUp(uint size, uint pageSize) - { - return checked(size + pageSize - 1U) & ~(pageSize - 1U); - } - - private static ulong RoundDown(ulong size, uint pageSize) - { - return size & ~((ulong)pageSize - 1UL); - } - - private static ulong RoundUp(ulong size, uint pageSize) - { - return checked(size + pageSize - 1UL) & ~((ulong)pageSize - 1UL); - } - - public void ReadHeader(CancellationToken ct) - { - ct.ThrowIfCancellationRequested(); - - // minimum header prolog size is 64 but most will only a single page - // we can preallocate a full page, and we will resize it later if needed - - var reader = m_file.CreateReader(0, SnapshotFormat.HEADER_METADATA_BYTES); - - // "PNDB" - var signature = reader.ReadFixed32(); - // v1.0 - uint major = reader.ReadFixed16(); - uint minor = reader.ReadFixed16(); - m_version = new Version((int)major, (int)minor); - // FLAGS - m_dbFlags = (SnapshotFormat.Flags) reader.ReadFixed64(); - // Database ID - m_uid = new Uuid128(reader.ReadBytes(16).GetBytes()); - // Database Version - m_sequence = reader.ReadFixed64(); - // Number of items in the database - m_itemCount = checked((long)reader.ReadFixed64()); - // Database Timestamp - m_timestamp = reader.ReadFixed64(); - // Page Size - m_pageSize = reader.ReadFixed32(); - // Header Size - m_headerSize = reader.ReadFixed32(); - - Contract.Assert(!reader.HasMore); - - #region Sanity checks - - // Signature - if (signature != SnapshotFormat.HEADER_MAGIC_NUMBER) throw ParseError("Invalid magic number"); - - // Version - if (m_version.Major != 1) throw ParseError("Unsupported file version (major)"); - if (m_version.Minor > 0) throw ParseError("Unsupported file version (minor)"); - - // Flags - - // Page Size - if (m_pageSize != UnmanagedHelpers.NextPowerOfTwo(m_pageSize)) throw ParseError("Page size ({0}) is not a power of two", m_pageSize); - if (m_pageSize < SnapshotFormat.HEADER_METADATA_BYTES) throw ParseError("Page size ({0}) is too small", m_pageSize); - if (m_pageSize > 1 << 20) throw ParseError("Page size ({0}) is too big", m_pageSize); - - // Header Size - if (m_headerSize < 64 + 4 + 4) throw ParseError("Header size ({0}) is too small", m_headerSize); - if (m_headerSize > m_file.Length) throw ParseError("Header size is bigger than the file itself ({0} < {1})", m_headerSize, m_file.Length); - if (m_headerSize > 1 << 10) throw ParseError("Header size ({0}) exceeds the maximum allowed size", m_headerSize); - - #endregion - - // we know the page size and header size, read the rest... - - // read the rest - reader = m_file.CreateReader(0, m_headerSize); - reader.Skip(SnapshotFormat.HEADER_METADATA_BYTES); - - // parse the attributes - Contract.Assert(reader.Offset == SnapshotFormat.HEADER_METADATA_BYTES); - var attributeCount = checked((int)reader.ReadFixed32()); - if (attributeCount < 0 || attributeCount > 1024) throw ParseError("Attributes count is invalid"); - - var attributes = new Dictionary(attributeCount); - for (int i = 0; i < attributeCount; i++) - { - var name = reader.ReadVarbytes().ToSlice(); //TODO: max size ? - if (name.IsNullOrEmpty) throw ParseError("Header attribute name is empty"); - - var data = reader.ReadVarbytes().ToSlice(); //TODO: max size + have a small scratch pad buffer for these ? - var value = FdbTuple.Unpack(data); - attributes.Add(name.ToUnicode(), value); - } - m_attributes = attributes; - - // read the header en marker - var marker = reader.ReadFixed32(); - if (marker != uint.MaxValue) throw ParseError("Header end marker is invalid"); - - // verify the header checksum - uint actualHeaderChecksum = SnapshotFormat.ComputeChecksum(reader.Base, reader.Offset); - uint headerChecksum = reader.ReadFixed32(); - m_headerChecksum = headerChecksum; - - if (headerChecksum != actualHeaderChecksum) - { - throw ParseError("The header checksum does not match ({0} != {1}). This may be an indication of data corruption", headerChecksum, actualHeaderChecksum); - } - - m_dataStart = RoundUp(m_headerSize, m_pageSize); - m_hasHeader = true; - } - - public bool HasLevel(int level) - { - return m_hasJumpTable && level >= 0 && level < m_jumpTable.Length && m_jumpTable[level].Size != 0; - } - - public void ReadJumpTable(CancellationToken ct) - { - ct.ThrowIfCancellationRequested(); - - if (!m_hasHeader) - { - throw new InvalidOperationException("Cannot read the Jump Table without reading the Header first!"); - } - - // an empty database will have at least 2 pages: the header and the JT - if (m_file.Length < checked(m_pageSize << 1)) - { - throw ParseError("File size ({0}) is too small to be a valid snapshot", m_file.Length); - } - - // the jumptable is always in the last page of the file and is expected to fit nicely - // > file size MUST be evenly divible by page size - // > then JT offset will be file.Length - pageSize - if (m_file.Length % m_pageSize != 0) - { - throw ParseError("The file size ({0}) is not a multiple of the page size ({1}), which may be a symptom of truncation", m_file.Length, m_pageSize); - } - - var jumpTableStart = m_file.Length - m_pageSize; - Contract.Assert(jumpTableStart % m_pageSize == 0); - m_dataEnd = jumpTableStart; - - var reader = m_file.CreateReader(jumpTableStart, m_pageSize); - - // "JMPT" - var signature = reader.ReadFixed32(); - // Page Size (repeated) - var pageSizeRepeated = (int)reader.ReadFixed32(); - // Sequence Number (repeated) - var sequenceRepeated = reader.ReadFixed64(); - // Database ID (repeated) - var uidRepeated = new Uuid128(reader.ReadBytes(16).GetBytes()); - // Header CRC (repeated) - var headerChecksumRepeated = reader.ReadFixed32(); - - // Sanity checks - - if (signature != SnapshotFormat.JUMP_TABLE_MAGIC_NUMBER) throw ParseError("Last page does not appear to be the Jump Table"); - if (pageSizeRepeated != m_pageSize) throw ParseError("Page size in Jump Table does not match the header value"); - if (sequenceRepeated != m_sequence) throw ParseError("Sequence in Jump Table does not match the header value"); - if (uidRepeated != m_uid) throw ParseError("Database ID in Jump Table does not match the header value"); - if (headerChecksumRepeated != m_headerChecksum) throw ParseError("Database ID in Jump Table does not match the header value"); - - // read the table itself - int levels = (int)reader.ReadFixed32(); - if (levels < 0 || levels > 32) throw ParseError("The number of levels in the snapshot does not appear to be valid"); - - var table = new LevelAddress[levels]; - for (int level = 0; level < levels; level++) - { - ulong offset = reader.ReadFixed64(); - ulong size = reader.ReadFixed64(); - - // Offset and Size cannot be negative - // Empty levels (size == 0) must have a zero offset - // Non empty levels (size > 0) must have a non zero offset that is greater than the headerSize - if ((size == 0 && offset != 0) || (size > 0 && offset < m_dataStart)) throw ParseError("Level in Jump Table has invalid size ({0}) or offset ({1})", size, offset); - if (checked(offset + size) > m_dataEnd) throw ParseError("Level in Jump Table would end after the end of the file"); - - table[level].Offset = offset; - table[level].Size = size; - table[level].PaddedSize = RoundUp(size, m_pageSize); - } - - // end attributes - uint attributeCount = reader.ReadFixed32(); - if (attributeCount != 0) throw new NotImplementedException("Footer attributes not yet implemented!"); - - // end marker - if (reader.ReadFixed32() != uint.MaxValue) throw ParseError("Jump Table end marker not found"); - - // checksum - uint actualChecksum = SnapshotFormat.ComputeChecksum(reader.Base, reader.Offset); - uint checksum = reader.ReadFixed32(); - if (actualChecksum != checksum) throw ParseError("Jump Table checksum does not match ({0} != {1}). This may be an indication of data corruption", checksum, actualChecksum); - - m_jumpTable = table; - m_levels = levels; - m_hasJumpTable = true; - } - - public void ReadLevel(int level, LevelWriter writer, CancellationToken ct) - { - Contract.Requires(level >= 0 && writer != null); - ct.ThrowIfCancellationRequested(); - - if (!m_hasJumpTable) - { - throw new InvalidOperationException("Cannot read a level without reading the Jump Table first!"); - } - - int itemCount = checked(1 << level); - - var address = m_jumpTable[level]; - - if (address.Offset < m_dataStart || address.Offset > m_dataEnd) - { - throw ParseError("Level {0} offset ({1}) is invalid", level, address.Offset); - } - if (checked(address.Offset + address.PaddedSize) > m_dataEnd) - { - throw ParseError("Level {0} size ({1}) is invalid", level, address.PaddedSize); - } - - var reader = m_file.CreateReader(address.Offset, address.PaddedSize); - - // "LVL_" - var signature = reader.ReadFixed32(); - // Level Flags - var flags = reader.ReadFixed32(); - // Level ID - int levelId = (int)reader.ReadFixed32(); - // Item count (always 2^level) - int levelCount = (int)reader.ReadFixed32(); - - if (signature != SnapshotFormat.LEVEL_MAGIC_NUMBER) throw ParseError("Page does not appear to be a valid Level header"); - //TODO: check flags - if (levelId != level) throw ParseError("Page contains the header of a different Level ({0} != {1})", levelId, level); - if (levelCount != itemCount) throw ParseError("Item count ({0}) in level {1} header is not valid", levelCount, level); - - for (int i = 0; i < levelCount;i++) - { - // read the key - uint keySize = reader.ReadVarint32(); - if (keySize > MemoryDatabaseHandler.MAX_KEY_SIZE) throw ParseError("Key size ({0}) is too big", keySize); - USlice key = keySize == 0 ? USlice.Nil : reader.ReadBytes(keySize); - - // read the sequence - ulong sequence = reader.ReadVarint64(); - - // read the value - uint valueSize = reader.ReadVarint32(); - if (valueSize > MemoryDatabaseHandler.MAX_VALUE_SIZE) throw ParseError("Value size ({0) is too big", valueSize); - USlice value = valueSize == 0 ? USlice.Nil : reader.ReadBytes(valueSize); - - writer.Add(sequence, key, value); - } - - if (reader.ReadFixed32() != uint.MaxValue) throw ParseError("Invalid end marker in level"); - //TODO: check end marker, CRC, ... ? - uint checksum = reader.ReadFixed32(); - //TODO: verify checksum! - } - - } - - -} diff --git a/FoundationDB.Storage.Memory/IO/SnapshotWriter.cs b/FoundationDB.Storage.Memory/IO/SnapshotWriter.cs deleted file mode 100644 index ae5ba8c61..000000000 --- a/FoundationDB.Storage.Memory/IO/SnapshotWriter.cs +++ /dev/null @@ -1,324 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.IO -{ - using FoundationDB.Client; - using FoundationDB.Layers.Tuples; - using FoundationDB.Storage.Memory.API; - using FoundationDB.Storage.Memory.Core; - using FoundationDB.Storage.Memory.Utils; - using System; - using System.Collections.Generic; - using System.Diagnostics.Contracts; - using System.Threading; - using System.Threading.Tasks; - using System.Runtime.InteropServices; - - internal class SnapshotWriter - { - private readonly Win32SnapshotFile m_file; - private SliceWriter m_writer; - - private readonly int m_levels; - private readonly int m_pageSize; - private readonly int m_bufferSize; - - private Uuid128 m_uid; - private ulong m_sequence; - private long m_itemCount; - private long m_timestamp; - private uint m_headerChecksum; - - private readonly KeyValuePair[] m_jumpTable; - - public SnapshotWriter(Win32SnapshotFile file, int levels, int pageSize, int bufferSize) - { - Contract.Requires(file != null && levels >= 0 && pageSize >= 0 && bufferSize >= pageSize); //TODO: && file.CanRead ? - m_file = file; - m_pageSize = pageSize; - m_bufferSize = bufferSize; - //TODO: verify pageSize is a power of two, and bufferSize is a multiple of pageSize! - Contract.Assert(bufferSize % pageSize == 0); - - m_writer = new SliceWriter(bufferSize); - m_levels = levels; - - m_jumpTable = new KeyValuePair[levels]; - for (int i = 0; i < levels; i++) - { - m_jumpTable[i] = new KeyValuePair(0, 0); - } - } - - /// Write the header to the file - /// - /// - /// - /// - /// - /// - /// This needs to be called before writing any level to the file - public Task WriteHeaderAsync(SnapshotFormat.Flags headerFlags, Uuid128 uid, ulong sequence, long count, long timestamp, IDictionary attributes) - { - // The header will be use on ore more "pages", to simplify the job of loading / peeking at a stream content (no need for fancy buffering, just need to read 4K pages) - // > The last page is padded with 0xAAs to detect corruption. - - m_uid = uid; - m_sequence = sequence; - m_itemCount = count; - m_timestamp = timestamp; - - // HEADER - // - DB_HEADER (64 bytes) - // - DB ATTRIBUTES (variable size list of k/v) - // - END_MARKER + HEADER_CRC - // - PADDING (to fill last page) - - // DB Header - - // "PNDB" - m_writer.WriteFixed32(SnapshotFormat.HEADER_MAGIC_NUMBER); - // v1.0 - m_writer.WriteFixed16(1); // major - m_writer.WriteFixed16(0); // minor - // FLAGS - m_writer.WriteFixed64((ulong)headerFlags); - // Database ID - m_writer.WriteBytes(uid.ToSlice()); - // Database Version - m_writer.WriteFixed64(sequence); - // Number of items in the database - m_writer.WriteFixed64((ulong)count); - // Database Timestamp - m_writer.WriteFixed64((ulong)timestamp); - // Page Size - m_writer.WriteFixed32(SnapshotFormat.PAGE_SIZE); - // Header Size (not known yet and will be filled in later) - int offsetToHeaderSize = m_writer.Skip(4); - - // we should be at the 64 byte mark - Contract.Assert(m_writer.Position == SnapshotFormat.HEADER_METADATA_BYTES); - - // DB Attributes - m_writer.WriteFixed32((uint)attributes.Count); - foreach (var kvp in attributes) - { - // Name - m_writer.WriteVarbytes(Slice.FromString(kvp.Key)); - - // Value - m_writer.WriteVarbytes(kvp.Value.ToSlice()); - } - - // Mark the end of the header - m_writer.WriteFixed32(uint.MaxValue); - - // we now have the size of the header, and can fill in the blank - var headerEnd = m_writer.Position; - m_writer.Position = offsetToHeaderSize; - // write the header size (includes the CRC) - m_writer.WriteFixed32((uint)checked(headerEnd + SnapshotFormat.HEADER_CRC_SIZE)); - m_writer.Position = headerEnd; - - // now we can compute the actual CRC - uint headerChecksum = SnapshotFormat.ComputeChecksum(m_writer.ToSlice()); - m_writer.WriteFixed32(headerChecksum); - m_headerChecksum = headerChecksum; - - // optional padding to fill the rest of the page - PadPageIfNeeded(SnapshotFormat.PAGE_SIZE, 0xFD); - - return TaskHelpers.CompletedTask; - } - - public async Task WriteLevelAsync(int level, IntPtr[] segment, CancellationToken ct) - { - ct.ThrowIfCancellationRequested(); - - if (m_jumpTable[level].Value > 0) - { - throw new InvalidOperationException("The level has already be written to this snapshot"); - } - - var levelStart = checked(m_file.Length + (uint)m_writer.Position); - //Console.WriteLine("## level " + level + " starts at " + levelStart); - - //TODO: ensure that we start on a PAGE? - - //Console.WriteLine("> Writing level " + level); - - // "LVL_" - m_writer.WriteFixed32(SnapshotFormat.LEVEL_MAGIC_NUMBER); - // Level Flags - m_writer.WriteFixed32(0); //TODO: flags! - // Level ID - m_writer.WriteFixed32((uint)level); - // Item count (always 2^level) - m_writer.WriteFixed32((uint)segment.Length); - - for (int i = 0; i < segment.Length; i++) - { - unsafe - { -#if __MonoCS__ - var valuePointer =new IntPtr((void*) MemoryDatabaseHandler.ResolveValueAtVersion(segment[i], m_sequence)); - - if (valuePointer == IntPtr.Zero) - continue; - - Value value = new Value(); - Marshal.PtrToStructure(valuePointer, value); - - var keyPointer = new IntPtr((void*)segment[i]); - - Key key = new Key(); - Marshal.PtrToStructure(keyPointer, key); - - Contract.Assert(key.Size <= MemoryDatabaseHandler.MAX_KEY_SIZE); - - // Key Size - uint size = key.Size; - m_writer.WriteVarint32(size); - m_writer.WriteBytesUnsafe(&(key.Data), (int)size); - - // Value - m_writer.WriteVarint64(value.Sequence); // sequence - size = value.Size; - if (size == 0) - { // empty key - m_writer.WriteByte(0); - } - else - { - m_writer.WriteVarint32(size); // value size - m_writer.WriteBytesUnsafe(&(value.Data), (int)size); // value data - } -#else - - Value* value = MemoryDatabaseHandler.ResolveValueAtVersion(segment[i], m_sequence); - if (value == null) - { - continue; - } - Key* key = (Key*)segment[i]; //.ToPointer(); - - Contract.Assert(key != null && key->Size <= MemoryDatabaseHandler.MAX_KEY_SIZE); - - // Key Size - uint size = key->Size; - m_writer.WriteVarint32(size); - m_writer.WriteBytesUnsafe(&(key->Data), (int)size); - - // Value - - m_writer.WriteVarint64(value->Sequence); // sequence - size = value->Size; - if (size == 0) - { // empty key - m_writer.WriteByte(0); - } - else - { - m_writer.WriteVarint32(size); // value size - m_writer.WriteBytesUnsafe(&(value->Data), (int)size); // value data - } -#endif - } - - if (m_writer.Position >= SnapshotFormat.FLUSH_SIZE) - { - //Console.WriteLine("> partial flush (" + writer.Position + ")"); - int written = await m_file.WriteCompletePagesAsync(m_writer.Buffer, m_writer.Position, ct).ConfigureAwait(false); - if (written > 0) m_writer.Flush(written); - } - } - - m_writer.WriteFixed32(uint.MaxValue); - - //TODO: CRC? (would need to be computed on the fly, because we don't have the full slice in memory probably) - m_writer.WriteFixed32(0); - - var levelEnd = checked(m_file.Length + (uint)m_writer.Position); - m_jumpTable[level] = new KeyValuePair(levelStart, levelEnd - levelStart); - //Console.WriteLine("## level " + level + " ends at " + levelEnd); - - // optional padding to fill the rest of the page - PadPageIfNeeded(SnapshotFormat.PAGE_SIZE, (byte)(0xFC - level)); - - } - - public Task WriteJumpTableAsync(CancellationToken ct) - { - ct.ThrowIfCancellationRequested(); - - // The jump table is the last page of the file - // - it contains the list of (offset, size) of all the levels that are in the file - // - it contains any additional attributes (that were only known after writing all the data) - // - it repeats a few important values (sequence, header crc, ...) - // - it would contain any optional signature or data that is only know after writing the data to disk, and are needed to decode the rest - - // marks the start of the JT because we will need to compute the checksum later on - int startOffset = m_writer.Position; - - // "JMPT" - m_writer.WriteFixed32(SnapshotFormat.JUMP_TABLE_MAGIC_NUMBER); - // Page Size (repeated) - m_writer.WriteFixed32((uint)m_pageSize); - // Sequence Number (repeated) - m_writer.WriteFixed64(m_sequence); - // Database ID (repeated) - m_writer.WriteBytes(m_uid.ToSlice()); - // Header CRC (repeated) - m_writer.WriteFixed32(m_headerChecksum); - - int levels = m_levels; - m_writer.WriteFixed32((uint)levels); // Level Count - for (int level = 0; level < levels; level++) - { - // Level Offset (from start of file) - m_writer.WriteFixed64((ulong)m_jumpTable[level].Key); - // Level Size (in bytes) - m_writer.WriteFixed64((ulong)m_jumpTable[level].Value); - } - - //TODO: additional attributes! - m_writer.WriteFixed32(0); // 0 for now - - // End Marker - m_writer.WriteFixed32(uint.MaxValue); - - // Checksum - int endOffset = m_writer.Position; - uint jumpTableChecksum = SnapshotFormat.ComputeChecksum(m_writer[startOffset, endOffset]); - m_writer.WriteFixed32(jumpTableChecksum); - - // optional padding to fill the rest of the page - PadPageIfNeeded(SnapshotFormat.PAGE_SIZE, 0xFE); - - // we are done ! - return TaskHelpers.CompletedTask; - } - - public Task FlushAsync(CancellationToken ct) - { - //Console.WriteLine("> final flush (" + writer.Position + ")"); - return m_file.FlushAsync(m_writer.Buffer, m_writer.Position, ct); - } - - private void PadPageIfNeeded(int pageSize, byte padByte) - { - // Ensure the page is full - int pageOffset = m_writer.Position & (SnapshotFormat.PAGE_SIZE - 1); - if (pageOffset != 0) - { // Pad the remainder of the page - int pad = SnapshotFormat.PAGE_SIZE - pageOffset; - m_writer.Skip(pad, padByte); - //Console.WriteLine("@@@ added " + pad + " pad bytes => " + m_writer.Position); - } - } - - } - -} diff --git a/FoundationDB.Storage.Memory/IO/Win32MemoryMappedFile.cs b/FoundationDB.Storage.Memory/IO/Win32MemoryMappedFile.cs deleted file mode 100644 index 077d669fc..000000000 --- a/FoundationDB.Storage.Memory/IO/Win32MemoryMappedFile.cs +++ /dev/null @@ -1,254 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.IO -{ - using FoundationDB.Storage.Memory.Utils; - using Microsoft.Win32.SafeHandles; - using System; - using System.Diagnostics.Contracts; - using System.IO; - using System.Runtime.InteropServices; - using System.Security; - using System.Security.AccessControl; - - [SuppressUnmanagedCodeSecurity] - internal static class UnsafeNativeMethods - { - - [StructLayout(LayoutKind.Sequential)] - public sealed class SECURITY_ATTRIBUTES - { - public int nLength; - public IntPtr lpSecurityDescriptor; - public int bInheritHandle; - } - - [StructLayout(LayoutKind.Sequential)] - public struct SYSTEM_INFO - { - internal int dwOemId; - internal int dwPageSize; - internal IntPtr lpMinimumApplicationAddress; - internal IntPtr lpMaximumApplicationAddress; - internal IntPtr dwActiveProcessorMask; - internal int dwNumberOfProcessors; - internal int dwProcessorType; - internal uint dwAllocationGranularity; - internal short wProcessorLevel; - internal short wProcessorRevision; - } - - [StructLayout(LayoutKind.Sequential, CharSet = CharSet.Auto)] - internal class MEMORYSTATUSEX - { - internal uint dwLength = ((uint)Marshal.SizeOf(typeof(UnsafeNativeMethods.MEMORYSTATUSEX))); - internal uint dwMemoryLoad; - internal ulong ullTotalPhys; - internal ulong ullAvailPhys; - internal ulong ullTotalPageFile; - internal ulong ullAvailPageFile; - internal ulong ullTotalVirtual; - internal ulong ullAvailVirtual; - internal ulong ullAvailExtendedVirtual; - } - - [Flags] - public enum FileMapProtection : uint - { - PageReadonly = 0x02, - PageReadWrite = 0x04, - PageWriteCopy = 0x08, - PageExecuteRead = 0x20, - PageExecuteReadWrite = 0x40, - SectionCommit = 0x8000000, - SectionImage = 0x1000000, - SectionNoCache = 0x10000000, - SectionReserve = 0x4000000, - } - - [Flags] - public enum FileMapAccess : uint - { - FileMapCopy = 0x0001, - FileMapWrite = 0x0002, - FileMapRead = 0x0004, - FileMapAllAccess = 0x001f, - FileMapExecute = 0x0020, - } - - [SecurityCritical, DllImport("kernel32.dll", SetLastError = true)] - private static extern void GetSystemInfo(ref SYSTEM_INFO lpSystemInfo); - - [SecurityCritical, DllImport("kernel32.dll", SetLastError = true)] - [return: MarshalAs(UnmanagedType.Bool)] - public static extern bool GlobalMemoryStatusEx([In, Out] MEMORYSTATUSEX lpBuffer); - - [SecurityCritical, DllImport("kernel32.dll", CharSet = CharSet.Unicode, SetLastError = true)] - public static extern SafeMemoryMappedFileHandle CreateFileMapping(SafeFileHandle hFile, SECURITY_ATTRIBUTES lpAttributes, FileMapProtection fProtect, uint dwMaximumSizeHigh, uint dwMaximumSizeLow, string lpName); - - [SecurityCritical, DllImport("kernel32.dll", SetLastError = true, ExactSpelling = true)] - public static extern SafeMemoryMappedViewHandle MapViewOfFile(SafeMemoryMappedFileHandle handle, FileMapAccess dwDesiredAccess, uint dwFileOffsetHigh, uint dwFileOffsetLow, UIntPtr dwNumberOfBytesToMap); - - /// Gets the granularity for the starting address at which virtual memory can be allocated. - [SecurityCritical] - public static uint GetSystemPageAllocationGranularity() - { - var sysInfo = new SYSTEM_INFO(); - GetSystemInfo(ref sysInfo); - return sysInfo.dwAllocationGranularity; - } - - /// Gets the total size of the user mode portion of the virtual address space of the calling process, in bytes. - [SecurityCritical] - public static ulong GetTotalVirtualAddressSpaceSize() - { - var memStatusEx = new MEMORYSTATUSEX(); - GlobalMemoryStatusEx(memStatusEx); - return memStatusEx.ullTotalVirtual; - } - } - - internal unsafe sealed class Win32MemoryMappedFile : IDisposable - { - private readonly SafeMemoryMappedFileHandle m_mapHandle; - private readonly SafeMemoryMappedViewHandle m_viewHandle; - private readonly FileStream m_file; - private readonly ulong m_size; - private readonly byte* m_baseAddress; - private bool m_disposed; - - private Win32MemoryMappedFile(FileStream fs, SafeMemoryMappedFileHandle handle, ulong size) - { - Contract.Requires(fs != null && handle != null && !handle.IsInvalid && !handle.IsClosed); - m_mapHandle = handle; - m_file = fs; - m_size = size; - - // verify that it fits on 32 bit OS... - if (IntPtr.Size == 4 && size > uint.MaxValue) - { // won't work with 32-bit pointers - throw new InvalidOperationException("Memory mapped file size is too big to be opened on a 32-bit system."); - } - - // verifiy that it will fit in the virtual address space of the process - var totalVirtual = UnsafeNativeMethods.GetTotalVirtualAddressSpaceSize(); - if (size > totalVirtual) - { - throw new InvalidOperationException("Memory mapped file size is too big to fit in the current process virtual address space"); - } - - SafeMemoryMappedViewHandle view = null; - byte* baseAddress = null; - try - { - view = UnsafeNativeMethods.MapViewOfFile(m_mapHandle, UnsafeNativeMethods.FileMapAccess.FileMapRead, 0, 0, new UIntPtr(size)); - if (view.IsInvalid) throw Marshal.GetExceptionForHR(Marshal.GetHRForLastWin32Error()); - view.Initialize(size); - m_viewHandle = view; - - view.AcquirePointer(ref baseAddress); - m_baseAddress = baseAddress; - } - catch - { - if (baseAddress != null) view.ReleasePointer(); - if (view != null) view.Dispose(); - m_file = null; - m_viewHandle = null; - m_mapHandle = null; - m_baseAddress = null; - throw; - } - } - - [SecurityCritical] - public static Win32MemoryMappedFile OpenRead(string path) - { - Contract.Requires(!string.IsNullOrEmpty(path)); - - - if (!File.Exists(path)) - { - throw new FileNotFoundException("Memory mapped file not found", path); - } - - FileStream fs = null; - SafeMemoryMappedFileHandle handle = null; - try - { - // Open the file - fs = new FileStream(path, FileMode.Open, FileSystemRights.ListDirectory, FileShare.None, 0x1000, FileOptions.SequentialScan); - Contract.Assert(fs != null); - ulong capacity = checked((ulong)fs.Length); - if (capacity == 0) throw new ArgumentException("Cannot memory map an empty file"); - - // Create the memory mapping - uint dwMaximumSizeLow = (uint)(capacity & 0xffffffffL); - uint dwMaximumSizeHigh = (uint)(capacity >> 32); - handle = UnsafeNativeMethods.CreateFileMapping(fs.SafeFileHandle, null /*TODO?*/, UnsafeNativeMethods.FileMapProtection.PageReadonly, dwMaximumSizeHigh, dwMaximumSizeLow, null); - int errorCode = Marshal.GetLastWin32Error(); - if (handle.IsInvalid || errorCode == 183) - { - throw Marshal.GetExceptionForHR(errorCode); - } - - return new Win32MemoryMappedFile(fs, handle, capacity); - } - catch - { - if (handle != null) handle.Dispose(); - if (fs != null) fs.Dispose(); - throw; - } - } - - public string Name - { - get { return m_file.Name; } - } - - public ulong Length - { - get { return m_size; } - } - - private void EnsureReadable(ulong offset, ulong size) - { - if (m_disposed) throw new ObjectDisposedException(this.GetType().Name, "Memory mapped file has already been closed"); - if (offset > m_size) throw new ArgumentException("Offset is outside the bounds of the memory mapped file"); - if (checked(offset + size) > m_size) throw new ArgumentException("Size is outside the bounds of the memory mapped file"); - } - - public unsafe UnmanagedSliceReader CreateReader(ulong offset, ulong size) - { - EnsureReadable(offset, size); - - byte* start = m_baseAddress + offset; - - return UnmanagedSliceReader.FromAddress(start, size); - } - - #region IDisposable... - - public void Dispose() - { - if (!m_disposed) - { - m_disposed = true; - - if (m_viewHandle != null) - { - if (m_baseAddress != null) m_viewHandle.ReleasePointer(); - m_viewHandle.Dispose(); - } - if (m_mapHandle != null) m_mapHandle.Dispose(); - if (m_file != null) m_file.Dispose(); - } - } - - #endregion - } - -} diff --git a/FoundationDB.Storage.Memory/IO/Win32SnapshotFile.cs b/FoundationDB.Storage.Memory/IO/Win32SnapshotFile.cs deleted file mode 100644 index f26eb8be9..000000000 --- a/FoundationDB.Storage.Memory/IO/Win32SnapshotFile.cs +++ /dev/null @@ -1,187 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.IO -{ - using System; - using System.Diagnostics.Contracts; - using System.IO; - using System.Threading; - using System.Threading.Tasks; - - internal sealed class Win32SnapshotFile : IDisposable - { - private readonly string m_path; - private readonly int m_pageSize; - private FileStream m_fs; - - public const int SECTOR_SIZE = 4096; - - public Win32SnapshotFile(string path, bool read = false) - : this(path, SECTOR_SIZE, read) - { } - - public Win32SnapshotFile(string path, int pageSize, bool read = false) - { - if (string.IsNullOrEmpty(path)) throw new ArgumentNullException("path"); - if (pageSize < 512) throw new ArgumentException("Page size must be at least 512.", "pageSize"); - if (pageSize == 0) pageSize = SECTOR_SIZE; - //TODO: check that pageSize is a power of two ?? - - path = Path.GetFullPath(path); - m_path = path; - m_pageSize = pageSize; - - FileStream fs = null; - try - { - if (read) - { - fs = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read, pageSize, FileOptions.Asynchronous | FileOptions.SequentialScan | (FileOptions)0x20000000/* NO_BUFFERING */); - } - else - { - fs = new FileStream(path, FileMode.Create, FileAccess.Write, FileShare.Read, pageSize, FileOptions.Asynchronous | FileOptions.WriteThrough | (FileOptions)0x20000000/* NO_BUFFERING */); - } - } - catch(Exception) - { - if (fs != null) - { - fs.Dispose(); - fs = null; - } - throw; - } - finally - { - m_fs = fs; - } - - Contract.Ensures(m_fs != null && m_fs.IsAsync); - } - - public ulong Length - { - get - { - var fs = m_fs; - return fs != null ? (ulong)fs.Length : 0UL; - } - } - - public void Seek(ulong position) - { - Contract.Requires(position <= this.Length); - - long fpos = checked((long)position); - long apos = m_fs.Seek(fpos, SeekOrigin.Begin); - if (apos != fpos) throw new IOException("Failed to seek to the desired position"); - } - - /// Read a certain number of bytes into a buffer - /// Buffer where to store the data - /// Offset in the buffer where the data will be written - /// Number of bytes to read - /// - /// Number of bytes read. If it is less than , it means the file was truncated. - /// May execute more than one read operation if the first one did not return enough data (reading from a network stream or NFS share??) - public async Task ReadExactlyAsync(byte[] buffer, uint offset, ulong count, CancellationToken cancellationToken) - { - if (m_fs == null) throw new ObjectDisposedException(this.GetType().Name); - - if (count > int.MaxValue) throw new OverflowException("Count is too big"); - - int remaining = (int)count; - uint read = 0; - if (remaining > 0) - { - int p = (int)offset; - while (remaining > 0) - { - Contract.Assert(p >= 0 && p < buffer.Length && remaining > 0 && p + remaining <= buffer.Length, "Read buffer overflow"); - try - { - int n = await m_fs.ReadAsync(buffer, p, remaining, cancellationToken).ConfigureAwait(false); - if (n <= 0) break; - p += n; - remaining -= n; - read += (uint)n; - } - catch(IOException) - { - throw; - } - } - } - return read; - } - - /// Write as many full pages to the file - /// Buffer that contains the data to write - /// Number of bytes in the buffer (that may or may not be aligned to a page size) - /// Optional cancellation token - /// Number of bytes written to the disk (always a multiple of 4K), or 0 if the buffer did not contain enough data. - public async Task WriteCompletePagesAsync(byte[] buffer, int count, CancellationToken cancellationToken) - { - if (m_fs == null) throw new ObjectDisposedException(this.GetType().Name); - - int complete = (count / m_pageSize) * m_pageSize; - if (complete > 0) - { - await m_fs.WriteAsync(buffer, 0, complete, cancellationToken).ConfigureAwait(false); - } - - return complete; - } - - /// Flush the remaining of the buffer to the disk, and ensures that the content has been fsync'ed - /// Buffer that may contains data (can be null if is equal to 0) - /// Number of bytes remaining in the buffer (or 0 if there is no more data to written) - /// - /// - public async Task FlushAsync(byte[] buffer, int count, CancellationToken cancellationToken) - { - Contract.Assert(count == 0 || buffer != null); - - if (count > 0) - { - int complete = (count / m_pageSize) * m_pageSize; - if (complete > 0) - { - await m_fs.WriteAsync(buffer, 0, complete, cancellationToken).ConfigureAwait(false); - count -= complete; - } - if (count > 0) - { // we have to write full 4K sectors, so we'll need to copy the rest to a temp 4K buffer (padded with 0s) - var tmp = new byte[m_pageSize]; - Buffer.BlockCopy(buffer, complete, tmp, 0, count); - await m_fs.WriteAsync(tmp, 0, count, cancellationToken).ConfigureAwait(false); - } - } - //REVIEW: since we are using WRITE_THROUGH + NO_BUFFERING, the OS is *supposed* to write directly to the disk ... - // need to verify that this is actually the case! - await m_fs.FlushAsync(cancellationToken); - } - - public override string ToString() - { - return "Snapshot:" + m_path; - } - - public void Dispose() - { - try - { - var fs = m_fs; - if (fs != null) fs.Close(); - } - finally - { - m_fs = null; - } - } - } - -} diff --git a/FoundationDB.Storage.Memory/Properties/AssemblyInfo.cs b/FoundationDB.Storage.Memory/Properties/AssemblyInfo.cs deleted file mode 100644 index 9ddda036a..000000000 --- a/FoundationDB.Storage.Memory/Properties/AssemblyInfo.cs +++ /dev/null @@ -1,17 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -/* -TODO: CHOOSE A LICENSE! -*/ -#endregion - -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; - -[assembly: AssemblyTitle("FoundationDB.Storage.Memory")] -[assembly: AssemblyDescription("In-Memory Storage Engine for FoundationDB")] -[assembly: AssemblyConfiguration("")] - -[assembly: ComVisible(false)] - -[assembly: Guid("6eaef97f-bc30-4dcf-b8d4-c22f749907e3")] \ No newline at end of file diff --git a/FoundationDB.Storage.Memory/Properties/VersionInfo.cs b/FoundationDB.Storage.Memory/Properties/VersionInfo.cs deleted file mode 100644 index c479cd88f..000000000 --- a/FoundationDB.Storage.Memory/Properties/VersionInfo.cs +++ /dev/null @@ -1,19 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -/* -TODO: CHOOSE A LICENSE! -*/ -#endregion - -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; - -[assembly: AssemblyCompany("Doxense")] -[assembly: AssemblyProduct("FoundationDB.Storage.Memory")] -[assembly: AssemblyCopyright("Copyright © Doxense 2013-2014")] -[assembly: AssemblyTrademark("")] -[assembly: AssemblyCulture("")] - -[assembly: AssemblyVersion("0.1.0.0")] //note: Only change this when doing breaking API changes! -[assembly: AssemblyFileVersion("0.1.0.0")] //note: change this to "x.y.z.build_number" in a build step of your favorite C.I. build server -[assembly: AssemblyInformationalVersion("0.1.0-alpha")] //note: this is the version seen by NuGet, add "-alpha"/"-beta"/"-rc1" at the end to create pre-release packages diff --git a/FoundationDB.Storage.Memory/README.md b/FoundationDB.Storage.Memory/README.md deleted file mode 100644 index c477ea5c3..000000000 --- a/FoundationDB.Storage.Memory/README.md +++ /dev/null @@ -1,3 +0,0 @@ -This project contains an experimental In-Memory database that is compatible with the FoundationDB API - -**WARNING**: This doesn't work yet! It is only a very early prototype that is not intended for production use! \ No newline at end of file diff --git a/FoundationDB.Storage.Memory/Utils/TaskHelpers.cs b/FoundationDB.Storage.Memory/Utils/TaskHelpers.cs deleted file mode 100644 index e9b47433c..000000000 --- a/FoundationDB.Storage.Memory/Utils/TaskHelpers.cs +++ /dev/null @@ -1,60 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Utils -{ - using System; - using System.Threading; - using System.Threading.Tasks; - - /// Helper methods to work on tasks - internal static class TaskHelpers - { - - /// Return a task that is already completed - // README: There is a Task.CompletedTask object in the BCL that is internal, and one 'easy' way to get access to it is via Task.Delay(0) that returns it if param is equal to 0... - public static readonly Task CompletedTask = Task.Delay(0); - - /// Returns a failed Task that wraps an exception - /// Type of the result of the task - /// Exception that will be wrapped in the task - /// Task that is already completed, and that will rethrow the exception once observed - public static Task FromException(Exception e) - { - // There is a Task.FromException() method in the BCL, but unfortunately it is internal :( - // We can only emulate it by calling TrySetException on a dummy TaskCompletionSource - // Also, we should flattent AggregateException so as not to create huge chain of aggEx - - var tcs = new TaskCompletionSource(); - - var aggEx = e as AggregateException; - if (aggEx == null) - tcs.TrySetException(e); - else - tcs.TrySetException(aggEx.InnerExceptions); - - //note: also, to avoid blowing up the process if nobody observes the task, we observe it once - var _ = tcs.Task.Exception; - - return tcs.Task; - } - - /// Returns a cancelled Task that is linked with a specific token - /// Type of the result of the task - /// Cancellation token that should already be cancelled - /// Task in the cancelled state that is linked with this cancellation token - public static Task FromCancellation(CancellationToken cancellationToken) - { - // There is a Task.FromCancellation() method in the BCL, but unfortunately it is internal :( - // The "best" way I've seen to emulate the same behavior, is creating a fake task (with a dummy action) with the same alread-cancelled CancellationToken - // This should throw the correct TaskCanceledException that is linked with this token - - // ensure that it is actually cancelled, so that we don't deadlock - if (!cancellationToken.IsCancellationRequested) throw new InvalidOperationException(); - - return new Task(() => default(T), cancellationToken); - } - - } -} diff --git a/FoundationDB.Storage.Memory/Utils/USlice.cs b/FoundationDB.Storage.Memory/Utils/USlice.cs deleted file mode 100644 index d8057a82b..000000000 --- a/FoundationDB.Storage.Memory/Utils/USlice.cs +++ /dev/null @@ -1,198 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Utils -{ - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Diagnostics.Contracts; - using System.Runtime.ConstrainedExecution; - using System.Runtime.InteropServices; - using System.Security; - - /// Slice of unmanaged memory - [DebuggerDisplay("({Data}, {Count})"), DebuggerTypeProxy(typeof(USliceDebugView))] - public unsafe struct USlice : IEquatable, IComparable - { - public readonly byte* Data; - public readonly uint Count; - - /// Gets an empty slice (equivalent to the NULL pointer) - public static USlice Nil - { - get { return default(USlice); } - } - - public USlice(byte* data, uint count) - { - Contract.Requires(data != null || count == 0); - this.Data = data; - this.Count = count; - } - - /// Checks if this is the empty slice (NULL pointer) - public bool IsNull - { - get { return this.Data == null; } - } - - public byte* AtOffset(uint offset) - { - if (this.Data == null || offset >= this.Count) ThrowInvalidAccess(this.Data); - return this.Data + offset; - } - - public byte this[uint offset] - { - get { return *(AtOffset(offset)); } - } - - public USlice Substring(uint startIndex, uint count) - { - if (count == 0) return default(USlice); - Contract.Requires(this.Data != null && startIndex <= this.Count && count <= this.Count && startIndex + count <= this.Count); - - if (this.Data == null) ThrowNullReference(); - if (startIndex > this.Count) ThrowIndexOutsideTheSlice(); - if (count > this.Count || startIndex + count > this.Count) ThrowSliceTooSmall(); - - return new USlice(this.Data + startIndex, count); - } - - private static void ThrowIndexOutsideTheSlice() - { - throw new ArgumentOutOfRangeException("Start index must be inside the slice", "startIndex"); - } - - private static void ThrowSliceTooSmall() - { - throw new ArgumentOutOfRangeException("Slice is too small", "count"); - } - - public IntPtr GetPointer() - { - return new IntPtr(this.Data); - } - - public IntPtr GetPointer(uint offset) - { - return new IntPtr(AtOffset(offset)); - } - - public byte* Successor - { - get - { - if (this.Data == null) ThrowNullReference(); - return this.Data + this.Count; - } - } - - public byte[] GetBytes() - { - Contract.Requires(this.Count >= 0); - var tmp = new byte[this.Count]; - if (this.Count > 0) - { - Contract.Assert(this.Data != null); - fixed (byte* ptr = tmp) - { - UnmanagedHelpers.CopyUnsafe(ptr, this.Data, this.Count); - } - } - return tmp; - } - - public byte[] GetBytes(uint offset, uint count) - { - Contract.Requires(this.Count >= 0); - - if (offset > this.Count) throw new ArgumentOutOfRangeException("offset"); - if (offset + count >= this.Count) throw new ArgumentOutOfRangeException("count"); - - var tmp = new byte[count]; - if (count > 0) - { - Contract.Assert(this.Data != null); - fixed (byte* ptr = tmp) - { - UnmanagedHelpers.CopyUnsafe(ptr, this.Data + offset, count); - } - } - return tmp; - } - - public FoundationDB.Client.Slice ToSlice() - { - return FoundationDB.Client.Slice.Create(GetBytes()); - } - - public bool Equals(USlice other) - { - if (this.Count != other.Count) return false; - if (this.Data == other.Data) return true; - if (this.Data == null || other.Data == null) return false; - - //TODO: optimize! - return 0 == UnmanagedHelpers.CompareUnsafe(this.Data, this.Count, other.Data, other.Count); - } - - public int CompareTo(USlice other) - { - return UnmanagedHelpers.CompareUnsafe(this.Data, this.Count, other.Data, other.Count); - } - - public override bool Equals(object obj) - { - if (obj == null) return this.Data == null && this.Count == 0; - return obj is USlice && Equals((USlice)obj); - } - - public override int GetHashCode() - { - return UnmanagedHelpers.ComputeHashCode(ref this); - } - - public override string ToString() - { - return "{" + (long)this.Data + ", " + this.Count + "}"; - } - - private static void ThrowNullReference() - { - throw new InvalidOperationException("Cannot access NULL pointer"); - } - - private static void ThrowInvalidAccess(byte* ptr) - { - if (ptr == null) ThrowNullReference(); - throw new IndexOutOfRangeException(); - - } - - private sealed class USliceDebugView - { - private readonly USlice m_slice; - - public USliceDebugView(USlice slice) - { - m_slice = slice; - } - - public uint Size - { - get { return m_slice.Count; } - } - - public byte[] Data - { - get { return m_slice.GetBytes(); } - } - - } - - } - -} diff --git a/FoundationDB.Storage.Memory/Utils/USliceComparer.cs b/FoundationDB.Storage.Memory/Utils/USliceComparer.cs deleted file mode 100644 index 6761d5e29..000000000 --- a/FoundationDB.Storage.Memory/Utils/USliceComparer.cs +++ /dev/null @@ -1,54 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Utils -{ - using System; - using System.Collections.Generic; - - /// Performs optimized equality and comparison checks on Slices - public unsafe sealed class USliceComparer : IComparer, IEqualityComparer, IComparer> - { - /// Default instance of the slice comparator - public static readonly USliceComparer Default = new USliceComparer(); - - private USliceComparer() - { } - - /// Lexicographically compare two slices and returns an indication of their relative sort order - /// Slice compared with - /// Slice compared with - /// Returns a NEGATIVE value if is LESS THAN , ZERO if is EQUAL TO , and a POSITIVE value if is GREATER THAN . - /// If both and are nil or empty, the comparison will return ZERO. If only is nil or empty, it will return a NEGATIVE value. If only is nil or empty, it will return a POSITIVE value. - public int Compare(USlice x, USlice y) - { - return UnmanagedHelpers.CompareUnsafe(x.Data, x.Count, y.Data, y.Count); - } - - /// Checks if two slices are equal. - /// Slice compared with - /// Slice compared with - /// true if and have the same size and contain the same sequence of bytes; otherwise, false. - public bool Equals(USlice x, USlice y) - { - return x.Count == y.Count && 0 == UnmanagedHelpers.CompareUnsafe(x.Data, x.Count, y.Data, y.Count); - } - - /// Computes the hash code of a slice - /// A slice - /// A 32-bit signed hash coded calculated from all the bytes in the slice - public int GetHashCode(USlice obj) - { - if (obj.Data == null) return 0; - //return ComputeHashCode(obj.Array, obj.Offset, obj.Count); - return 123; //TODO! - } - - int IComparer>.Compare(KeyValuePair x, KeyValuePair y) - { - return UnmanagedHelpers.CompareUnsafe(x.Key.Data, x.Key.Count, y.Key.Data, y.Key.Count); - } - } - -} diff --git a/FoundationDB.Storage.Memory/Utils/UnmanagedHelpers.cs b/FoundationDB.Storage.Memory/Utils/UnmanagedHelpers.cs deleted file mode 100644 index 7e90d4656..000000000 --- a/FoundationDB.Storage.Memory/Utils/UnmanagedHelpers.cs +++ /dev/null @@ -1,382 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -#define USE_NATIVE_MEMORY_OPERATORS - -namespace FoundationDB.Storage.Memory.Utils -{ - using FoundationDB.Client; - using System; - using System.Diagnostics.Contracts; - using System.Runtime.CompilerServices; - using System.Runtime.ConstrainedExecution; - using System.Runtime.InteropServices; - using System.Security; - - internal static unsafe class UnmanagedHelpers - { - - /// Round a number to the next power of 2 - /// Positive integer that will be rounded up (if not already a power of 2) - /// Smallest power of 2 that is greater than or equal to - /// Will return 1 for = 0 (because 0 is not a power of 2 !), and will throw for < 0 - /// If is a negative number - public static uint NextPowerOfTwo(uint x) - { - // cf http://en.wikipedia.org/wiki/Power_of_two#Algorithm_to_round_up_to_power_of_two - - // special case - if (x == 0) return 1; - - --x; - x |= (x >> 1); - x |= (x >> 2); - x |= (x >> 4); - x |= (x >> 8); - x |= (x >> 16); - return x + 1; - } - - public static SafeLocalAllocHandle AllocMemory(uint size) - { - var handle = NativeMethods.LocalAlloc(0, new UIntPtr(size)); - if (handle.IsInvalid) throw new OutOfMemoryException(String.Format("Failed to allocate from unmanaged memory ({0} bytes)", size)); - return handle; - } - - /// Copy a managed slice to the specified memory location - /// Where to copy the bytes - /// Slice of managed memory that will be copied to the destination - [SecurityCritical, ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] - public static void CopyUnsafe(byte* dest, Slice src) - { - if (src.Count > 0) - { - Contract.Requires(dest != null && src.Array != null && src.Offset >= 0 && src.Count >= 0); - fixed (byte* ptr = src.Array) - { - CopyUnsafe(dest, ptr + src.Offset, (uint)src.Count); - } - } - } - - public static void CopyUnsafe(Slice dest, byte* src, uint count) - { - if (count > 0) - { - Contract.Requires(dest.Array != null && dest.Offset >= 0 && dest.Count >= 0 && src != null); - fixed (byte* ptr = dest.Array) - { - NativeMethods.memmove(ptr + dest.Offset, src, new UIntPtr(count)); - } - } - } - - /// Copy an unmanaged slice to the specified memory location - /// Where to copy the bytes - /// Slice un unmanaged memory that will be copied to the destination - [SecurityCritical, ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] - public static void CopyUnsafe(byte* dest, USlice src) - { - if (src.Count > 0) - { - Contract.Requires(dest != null && src.Data != null); - CopyUnsafe(dest, src.Data, src.Count); - } - } - - public static void CopyUnsafe(USlice dest, byte* src, uint count) - { - if (count > 0) - { - Contract.Requires(dest.Data != null && src != null); - CopyUnsafe(dest.Data, src, count); - } - } - - /// Dangerously copy native memory from one location to another - /// Where to copy the bytes - /// Where to read the bytes - /// Number of bytes to copy - [SecurityCritical, ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] - public static void CopyUnsafe(byte* dest, byte* src, uint count) - { - Contract.Requires(dest != null && src != null); - -#if USE_NATIVE_MEMORY_OPERATORS - NativeMethods.memmove(dest, src, new UIntPtr(count)); -#else - if (count >= 16) - { - do - { - *((int*)(dest + 0)) = *((int*)(src + 0)); - *((int*)(dest + 4)) = *((int*)(src + 4)); - *((int*)(dest + 8)) = *((int*)(src + 8)); - *((int*)(dest + 12)) = *((int*)(src + 12)); - dest += 16; - src += 16; - } - while ((count -= 16) >= 16); - } - if (count > 0) - { - if ((count & 8) != 0) - { - *((int*)(dest + 0)) = *((int*)(src + 0)); - *((int*)(dest + 4)) = *((int*)(src + 4)); - dest += 8; - src += 8; - } - if ((count & 4) != 0) - { - *((int*)dest) = *((int*)src); - dest += 4; - src += 4; - } - if ((count & 2) != 0) - { - *((short*)dest) = *((short*)src); - dest += 2; - src += 2; - } - if ((count & 1) != 0) - { - *dest = *src; - } - } -#endif - } - - /// Retourne l'offset de la première différence trouvée entre deux buffers de même taille - /// Pointeur sur le premier buffer (de taille égale à 'count') - /// Taille du premier buffer (en octets) - /// Pointeur sur le deuxième buffer (de taille égale à 'count') - /// Taille du deuxième buffer (en octets) - /// Offset vers le premier élément qui diffère, ou -1 si les deux buffers sont identiques - [SecurityCritical, ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public static int CompareUnsafe(byte* left, uint leftCount, byte* right, uint rightCount) - { - if (leftCount == 0) return rightCount == 0 ? 0 : -1; - if (rightCount == 0) return +1; - - Contract.Requires(left != null && right != null); - -#if USE_NATIVE_MEMORY_OPERATORS - int c = NativeMethods.memcmp(left, right, new UIntPtr(leftCount < rightCount ? leftCount : rightCount)); - if (c != 0) return c; - return (int)leftCount - (int)rightCount; -#else - - // On va scanner par segments de 8, en continuant tant qu'ils sont identiques. - // Dés qu'on tombe sur un segment de 8 différent, on backtrack au début du segment, et on poursuit en mode octet par octet - // Recherche la première position où les octets diffèrent, et retourne left[POS] - right[POS]. - // Si tous les octets sont identiques, retourne 0 - - byte* start = left; - - - // OPTIMISATION DE LA MORT QUI TUE - // Si on calcul le XOR entre les blocs de 8 bytes, chaque byte identique deviendra 0. - // Si le XOR total n'est pas 0, on regarde a quel endroit se trouve le premier byte non-0, et cela nous donne l'offset de la différence - - // Données identiques: - // left : "11 22 33 44 55 66 77 88" => 0x8877665544332211 - // right: "11 22 33 44 55 66 77 88" => 0x8877665544332211 - // left XOR right => 0x8877665544332211 ^ 0x8877665544332211 = 0 - - // Différence - // left : "11 22 33 44 55 66 77 88" => 0x8877665544332211 - // right: "11 22 33 44 55 AA BB CC" => 0xCCBBAA5544332211 - // left XOR right =0x8877665544332211 ^ 0xCCBBAA5544332211 = 0x44CCCC0000000000 - // le premier byte différent de 0 est le byte 5 (note: on part de la fin, offset 0 !) qui est 0xCC - - // d'abord, on compare 8 bytes par 8 bytes - while (count >= 8) - { - // XOR les deux segments - // => s'il sont identiques, on obtient 0 - // => sinon, le premier byte non 0 (big-endian) indiquera l'offset de la différence - ulong k = *((ulong*)left) ^ *((ulong*)right); - - if (k != 0) - { // on a trouvé une différence, mais cela pourrait être n'importe quel byte - //System.Diagnostics.Trace.WriteLine("Found mistmatch\n\t\t0x" + k.ToString("x16") + " between\n\t\t0x" + ((ulong*)left)[0].ToString("x16") + " and\n\t\t0x" + ((ulong*)right)[0].ToString("x16")); - int p = 0; - while ((k & 0xFF) == 0) - { - ++p; - k >>= 8; - } - //System.Diagnostics.Trace.WriteLine("First differing byte at +" + p + " => " + left[p] + " != " + right[p]); - return left[p] - right[p]; - } - left += 8; - right += 8; - count -= 8; - } - - // la taille restante est forcément entre 0 et 7 - if (count >= 4) - { - if (*((uint*)left) != *((uint*)right)) - { // on a trouvé une différence, mais cela pourrait être n'importe quel byte - goto compare_tail; - } - left += 4; - right += 4; - count -= 4; - } - - // la taille restante est forcément entre 0 et 3 - - compare_tail: - while (count-- > 0) - { - int n = *(left++) - *(right++); - if (n != 0) return n; - } - return 0; -#endif - } - - public static void FillUnsafe(byte* ptr, uint count, byte filler) - { - if (count == 0) return; - if (ptr == null) throw new ArgumentNullException("ptr"); - -#if USE_NATIVE_MEMORY_OPERATORS - NativeMethods.memset(ptr, filler, new UIntPtr(count)); - -#else - if (filler == 0) - { - while (count-- > 0) *ptr++ = 0; - } - else - { - while (count-- > 0) *ptr++ = filler; - } -#endif - } - - public static int ComputeHashCode(ref Slice slice) - { - if (slice.Array == null) return 0; - fixed (byte* ptr = slice.Array) - { - return ComputeHashCodeUnsafe(checked(ptr + slice.Offset), checked((uint)slice.Count)); - } - } - - public static int ComputeHashCode(ref USlice slice) - { - if (slice.Data == null) return 0; - return ComputeHashCodeUnsafe(slice.Data, slice.Count); - } - - /// Compute the hash code of a byte buffer - /// Buffer - /// Number of bytes in the buffer - /// A 32-bit signed hash code calculated from all the bytes in the segment. - public static int ComputeHashCodeUnsafe(byte* bytes, uint count) - { - //note: bytes is allowed to be null only if count == 0 - Contract.Requires(count == 0 || bytes != null); - - //TODO: use a better hash algorithm? (xxHash, CityHash, SipHash, ...?) - // => will be called a lot when Slices are used as keys in an hash-based dictionary (like Dictionary) - // => won't matter much for *ordered* dictionary that will probably use IComparer.Compare(..) instead of the IEqalityComparer.GetHashCode()/Equals() combo - // => we don't need a cryptographic hash, just something fast and suitable for use with hashtables... - // => probably best to select an algorithm that works on 32-bit or 64-bit chunks - - // : unoptimized 32 bits FNV-1a implementation - uint h = 2166136261; // FNV1 32 bits offset basis - while (count-- > 0) - { - h = (h ^ *bytes++) * 16777619; // FNV1 32 prime - } - return (int)h; - // - } - -#if USE_NATIVE_MEMORY_OPERATORS - - internal class SafeLocalAllocHandle : SafeBuffer - { - public static SafeLocalAllocHandle InvalidHandle - { - get {return new SafeLocalAllocHandle();} - } - - - private SafeLocalAllocHandle() - : base(true) - { } - - public SafeLocalAllocHandle(IntPtr handle) - : base(true) - { } - - [SecurityCritical] - protected override bool ReleaseHandle() - { - return NativeMethods.LocalFree(base.handle) == IntPtr.Zero; - } - - } - - [SuppressUnmanagedCodeSecurity] - internal static unsafe class NativeMethods - { - // C/C++ .NET - // --------------------------------- - // void* byte* (or IntPtr) - // size_t UIntPtr (or IntPtr) - // int int - // char byte - - /// Compare characters in two buffers. - /// First buffer. - /// Second buffer. - /// Number of bytes to compare. - /// The return value indicates the relationship between the buffers. - [DllImport("msvcrt.dll", CallingConvention = CallingConvention.Cdecl, SetLastError = false)] - public static extern int memcmp(byte* buf1, byte* buf2, UIntPtr count); - - /// Moves one buffer to another. - /// Destination object. - /// Source object. - /// Number of bytes to copy. - /// The value of dest. - /// Copies count bytes from src to dest. If some regions of the source area and the destination overlap, both functions ensure that the original source bytes in the overlapping region are copied before being overwritten. - [DllImport("msvcrt.dll", CallingConvention = CallingConvention.Cdecl, SetLastError = false)] - public static extern byte* memmove(byte* dest, byte* src, UIntPtr count); - - /// Sets buffers to a specified character. - /// Pointer to destination - /// Character to set - /// Number of characters - /// memset returns the value of dest. - /// The memset function sets the first count bytes of dest to the character c. - [DllImport("msvcrt.dll", CallingConvention = CallingConvention.Cdecl, SetLastError = false)] - public static extern byte* memset(byte* dest, int ch, UIntPtr count); - - [DllImport("kernel32.dll", CharSet = CharSet.Auto, SetLastError = true)] - public static extern SafeLocalAllocHandle LocalAlloc(uint uFlags, UIntPtr uBytes); - - [DllImport("kernel32.dll", SetLastError = true), ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] - public static extern IntPtr LocalFree(IntPtr hMem); - - [DllImport("kernel32.dll")] - public static extern IntPtr LocalReAlloc(IntPtr hMem, UIntPtr uBytes, uint uFlags); - - - } - -#endif - - } -} diff --git a/FoundationDB.Storage.Memory/Utils/UnmanagedMemoryHeap.cs b/FoundationDB.Storage.Memory/Utils/UnmanagedMemoryHeap.cs deleted file mode 100644 index 17140839b..000000000 --- a/FoundationDB.Storage.Memory/Utils/UnmanagedMemoryHeap.cs +++ /dev/null @@ -1,382 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Utils -{ - using FoundationDB.Client; - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Diagnostics.Contracts; - using System.Runtime.InteropServices; - using System.Text; - - /// Allocate of unmanage memory pages - [DebuggerDisplay("Used={m_memoryUsage}, PageSize={m_pageSize}, Pages={m_pages.Count}")] - public unsafe sealed class UnmanagedMemoryHeap : IDisposable - { - // Allocator strategy: - // To keep it simple, we have several pages that get filled one by one - // If a page is too small to fit the next allocation, a new one is allocated - // Large objects (more than half the size of the memory page) are allocated seperately on their own - - /// Default size for new pages - private const uint DefaultPageSize = 1024 * 1024; - - /// Default alignment for pointers (note: 8 minimum) - private static readonly uint DefaultAlignment = (uint) Math.Max(IntPtr.Size, 8); - - [DebuggerDisplay("Id={m_id}, Usage={this.Used} / {m_size}, Free={m_size-m_nextFree}, Ptr={m_handle}"), DebuggerTypeProxy(typeof(Page.DebugView))] - internal sealed unsafe class Page : IDisposable - { - - private readonly int m_id; - private IntPtr m_handle; - private uint m_size; - - private byte* m_begin; - private uint m_nextFree; - - public Page(int id, IntPtr handle, uint size) - { - Contract.Requires(handle != IntPtr.Zero && size > 0); - - m_id = id; - m_handle = handle; - m_size = size; - - m_begin = (byte*)handle; - - // fill with zeroes ! - UnmanagedHelpers.FillUnsafe(m_begin, size, 0); - - GC.AddMemoryPressure(size); - - Contract.Ensures(m_handle != IntPtr.Zero && m_size > 0 && m_nextFree == 0); - } - - ~Page() - { - Dispose(false); - } - - public byte* Start { get { return m_begin; } } - - public int Id { get { return m_id; } } - - public uint Size { get { return m_size; } } - - public uint Used { get { return m_nextFree; } } - - public uint Remaining { get { return m_size - m_nextFree; } } - - public bool Alive { get { return m_handle != IntPtr.Zero; } } - - private uint GetAlignmentOffset(uint alignment) - { - if (alignment <= 1) return 0; - uint r = m_nextFree & (alignment - 1); - return r == 0 ? 0 : (alignment - r); - } - - public bool CanFit(uint size, uint alignment) - { - Contract.Requires(size > 0); - - return m_nextFree + size + GetAlignmentOffset(alignment) <= m_size; - } - - public byte* Allocate(uint size, uint alignment) - { - Contract.Requires(size > 0); - - uint offset = GetAlignmentOffset(alignment); - - uint pos = m_nextFree + offset; - byte* ptr = m_begin + pos; - m_nextFree = pos + size; - - Contract.Ensures(ptr != null && ptr >= m_begin && ptr <= m_begin + m_size && m_nextFree <= m_size); - return ptr; - } - - public void Dispose() - { - Dispose(true); - GC.SuppressFinalize(this); - } - - private void Dispose(bool disposing) - { - try - { - } - finally - { - GC.RemoveMemoryPressure(m_size); - m_size = 0; - m_begin = null; - m_nextFree = 0; - - var handle = m_handle; - if (handle != IntPtr.Zero) Marshal.FreeHGlobal(handle); - m_handle = IntPtr.Zero; - } - } - - internal byte[] GetBytes() - { - if (m_handle == IntPtr.Zero) throw new ObjectDisposedException(this.GetType().Name); - - var tmp = new byte[this.Used]; - Marshal.Copy(m_handle, tmp, 0, tmp.Length); - return tmp; - } - - private sealed class DebugView - { - private readonly Page m_page; - - public DebugView(Page page) - { - m_page = page; - } - - public uint Size { get { return m_page.Size; } } - - public byte[] Data - { - get { return m_page.GetBytes(); } - } - } - - } - - // HACKHACKHACK - private readonly List m_pages = new List(); - - /// Current page used by the heap - private Page m_current = null; - - /// Default size for each memory page - private readonly uint m_pageSize; - - /// Default pointer alignment - private readonly uint m_alignment = DefaultAlignment; - - /// Total size of memory allocated from this heap - private long m_memoryUsage; - - #region Constructors... - - public UnmanagedMemoryHeap() - : this(0, 0) - { } - - public UnmanagedMemoryHeap(uint pageSize) - : this(pageSize, 0) - { } - - public UnmanagedMemoryHeap(uint pageSize, uint alignment) - { - if (pageSize > (1 << 30)) throw new ArgumentOutOfRangeException("pageSize", "Page size cannot be larger than 1 GB"); - if (pageSize == 0) pageSize = DefaultPageSize; - if (m_alignment == 0) m_alignment = DefaultAlignment; - - m_pageSize = pageSize; - m_alignment = alignment; - } - - #endregion - - #region Public Properties... - - public uint PageSize { get { return m_pageSize; } } - - public int PageCount { get { return m_pages.Count; } } - - internal IReadOnlyList Pages { get { return m_pages; } } - - public uint Alignment { get { return m_alignment; } } - - public long MemoryUsage { get { return m_memoryUsage; } } - - #endregion - - private Page AllocateNewPage(uint pageSize) - { - Page page; - try - { } - finally - { - var handle = IntPtr.Zero; - try - { - Contract.Assert(pageSize <= 1 << 30); - handle = Marshal.AllocHGlobal((int)pageSize); - page = new Page(m_pages.Count, handle, pageSize); - } - catch (Exception) - { - if (handle != IntPtr.Zero) Marshal.FreeHGlobal(handle); - throw; - } - - m_memoryUsage += pageSize; - m_pages.Add(page); - } - return page; - } - - /// Allocate a new slice of unmanaged memory - /// Size (in bytes) of the slice. Must be greater than zero. - /// Slice pointing to the newly allocated memory. - public byte* Allocate(uint size) - { - // even though the caller don't require alignemnt, we still want to align to a multiple of 2 so that at least memory moves/cmps are aligned on a WORD boundary. - return Allocate(size, 2); - } - - public byte* AllocateAligned(uint size) - { - // align using the platform's pointer size (4 on x86, 8 on x64) - return Allocate(size, m_alignment); - } - - private byte* Allocate(uint size, uint align) - { - Contract.Requires(align == 1 || (align & (align - 1)) == 0); // only accept alignemnts that are a power of 2 ! - - if (size == 0) throw new ArgumentOutOfRangeException("size", "Cannot allocate zero bytes"); - - Page page; - if (size > (m_pageSize >> 2)) - { // big data go into its own page - page = AllocateNewPage(size); - } - else - { // use the current page - page = m_current; - if (page == null || !page.CanFit(size, align)) - { // need to allocate a new page - page = AllocateNewPage(m_pageSize); - m_current = page; - } - } - Contract.Assert(page != null && page.Remaining >= size); - - byte* ptr = page.Allocate(size, align); - if (ptr == null) throw new OutOfMemoryException(); - return ptr; - } - - /// Copy the content of an unmanaged slice of memory - /// Slice of unmanaged memory to copy - /// New slice pointing to the copied bytes in the allocator memory - public USlice Memoize(USlice data) - { - return Memoize(data, 1); - } - - /// Copy the content of an unmanaged slice of memory, starting at an aligned address - /// Slice of unmanaged memory to copy - /// New slice pointing to the copied bytes in the allocator memory. The start address should be aligned to either 4 or 8 bytes, depending on the platform architecture. - public USlice MemoizeAligned(USlice data) - { - return Memoize(data, m_alignment); - } - - /// Copy the content of an unmanaged slice of memory, using a specific alignment - /// Slice of unmanaged memory to copy - /// Required memory alignment. MUST BE A POWER OF 2 ! - /// New slice pointing to the copied bytes in the allocator memory. The start address should be aligned to either 4 or 8 bytes, depending on the platform architecture. - private USlice Memoize(USlice data, uint align) - { - if (data.Count == 0) return default(USlice); - byte* ptr = Allocate(data.Count, align); - if (ptr == null) throw new OutOfMemoryException(); - UnmanagedHelpers.CopyUnsafe(ptr, data); - return new USlice(ptr, data.Count); - } - - public USlice Memoize(Slice data) - { - return Memoize(data, 1); - } - - public USlice MemoizeAligned(Slice data) - { - return Memoize(data, m_alignment); - } - - private USlice Memoize(Slice data, uint align) - { - if (data.Count < 0 || data.Offset < 0) throw new InvalidOperationException("Cannot allocate less than zero bytes"); - if (data.Count == 0) return default(USlice); - byte* ptr = Allocate((uint)data.Count, align); - if (ptr == null) throw new OutOfMemoryException(); - Marshal.Copy(data.Array, data.Offset, new IntPtr(ptr), data.Count); - return new USlice(ptr, (uint)data.Count); - } - - public void Dispose() - { - foreach (var page in m_pages) - { - if (page.Alive) page.Dispose(); - } - m_pages.Clear(); - - GC.SuppressFinalize(this); - } - - public void Dump(bool detailed = false) - { - Console.WriteLine("Dumping arena state:"); - long used = 0; - foreach (var page in m_pages) - { - Console.WriteLine("- Page #" + page.Id + " (Used=" + page.Used + " / " + page.Size + ", " + (page.Remaining * 100.0 / page.Size).ToString("N1") + "% free)"); - used += page.Used; - var data = page.GetBytes(); - if (detailed) - { - var sb = new StringBuilder(">"); - var txt = detailed ? new StringBuilder(32) : null; - for (int i = 0; i < data.Length; i++) - { - byte b = data[i]; - sb.Append(' ').Append(b.ToString("X2")); - if (detailed) txt.Append(b < 32 || b >= 254 ? '.' : (char)b); - - if (i % 32 == 31) - { - if (detailed) sb.Append("\t").Append(txt.ToString()); - txt.Clear(); - sb.Append("\r\n>"); - } - } - Console.WriteLine(sb.ToString()); - } - } - Console.WriteLine("> Memory usage: " + m_memoryUsage.ToString("N0") + " total, " + used.ToString("N0") + " used"); - } - - public void DumpToDisk(string path) - { - path = System.IO.Path.GetFullPath(path); - Console.WriteLine("> Dumping heap content on disk ({0} bytes): {1}", m_memoryUsage, path); - using (var fs = new System.IO.FileStream(path, System.IO.FileMode.Create, System.IO.FileAccess.Write, System.IO.FileShare.ReadWrite, 4096, System.IO.FileOptions.None)) - { - foreach (var page in m_pages) - { - var data = page.GetBytes(); - fs.Write(data, 0, data.Length); - } - } - } - } - -} diff --git a/FoundationDB.Storage.Memory/Utils/UnmanagedSliceBuilder.cs b/FoundationDB.Storage.Memory/Utils/UnmanagedSliceBuilder.cs deleted file mode 100644 index c12b6008c..000000000 --- a/FoundationDB.Storage.Memory/Utils/UnmanagedSliceBuilder.cs +++ /dev/null @@ -1,476 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Utils -{ - using FoundationDB.Client; - using System; - using System.Diagnostics; - using System.Diagnostics.Contracts; - using System.Runtime.InteropServices; - - /// Unmanaged slice builder backed by a pinned managed buffer - /// This class is not thread-safe. - [DebuggerDisplay("Count={m_count}, Capacity={m_capacity}"), DebuggerTypeProxy(typeof(UnmanagedSliceBuilder.DebugView))] - public unsafe sealed class UnmanagedSliceBuilder : IDisposable - { - private static readonly byte[] s_empty = new byte[0]; - - //TODO: define a good default value for this. - public const uint DEFAULT_CAPACITY = 1024; - - /// Managed buffer used to store the values - private byte[] m_buffer; - /// Pinned address of the buffer - private byte* m_data; - /// Number of bytes currently written to the buffer - private uint m_count; - /// GC handle used to pin the managed buffer - private GCHandle m_handle; - - #region Constuctors... - - public UnmanagedSliceBuilder() - : this(0) - { } - - public UnmanagedSliceBuilder(uint capacity) - { - if (capacity == 0) - { - m_buffer = s_empty; - } - else - { - GrowBuffer(capacity); - } - } - - public UnmanagedSliceBuilder(USlice slice) - : this(slice.Data, slice.Count) - { } - - public UnmanagedSliceBuilder(Slice slice) - { - if (slice.Count < 0 || slice.Offset < 0) ThrowMalformedManagedSlice(); - - uint size = (uint)slice.Count; - if (size > 0) - { - if (slice.Array == null || slice.Array.Length < slice.Offset + slice.Count) ThrowMalformedManagedSlice(); - GrowBuffer(size); - - fixed (byte* ptr = slice.Array) - { - UnmanagedHelpers.CopyUnsafe(this.Data, ptr + slice.Offset, size); - } - m_count = size; - } - } - - private static void ThrowMalformedManagedSlice() - { - throw new ArgumentException("Malformed slice", "slice"); - } - - public UnmanagedSliceBuilder(byte* data, uint size) - { - if (data == null && size != 0) throw new ArgumentNullException("data"); - if (size == 0) - { - m_buffer = s_empty; - } - else - { - GrowBuffer(size); - UnmanagedHelpers.CopyUnsafe(m_data, data, size); - m_count = size; - } - } - - ~UnmanagedSliceBuilder() - { - Dispose(false); - } - - #endregion - - #region Public Properties... - - /// Gets the managed buffer - public byte[] Buffer - { - get { return m_buffer; } - } - - /// Gets a pointer to the first byte in the buffer - public byte* Data - { - get { return m_data; } - } - - /// Gets the number of bytes written to the buffer - public uint Count - { - get { return m_count; } - } - - /// Checks if the builder is empty. - public bool Empty - { - get { return m_count == 0; } - } - - /// Gets the current capacity of the buffer - public uint Capacity - { - get { return m_buffer == null ? 0U : (uint)m_buffer.Length; } - } - - /// Gets or sets the byte at the specified offset - /// Offset from the start of the buffer (0-based) - /// Value of the byte at this offset - /// if is outside the current size of the buffer - public byte this[uint offset] - { - get - { - if (offset >= m_count) ThrowIndexOutOfRange(); - return this.Data[offset]; - } - set - { - if (offset >= m_count) ThrowIndexOutOfRange(); - this.Data[offset] = value; - } - } - - #endregion - - /// Grow the buffer to be able to hold the specified number of bytes - /// Minimum capacity required - /// The buffer may be resize to more than - private void GrowBuffer(uint required) - { - try - { } - finally - { - if (!m_handle.IsAllocated) - { // initial allocation of the buffer - uint newsize = UnmanagedHelpers.NextPowerOfTwo(Math.Max(required, DEFAULT_CAPACITY)); - var buffer = new byte[newsize]; - m_buffer = buffer; - m_count = 0; - } - else - { // resize an existing buffer - uint newsize = (uint)m_buffer.Length; - newsize = UnmanagedHelpers.NextPowerOfTwo(Math.Max(required, newsize << 1)); - if (newsize > int.MaxValue) - { // cannot alloc more than 2GB in managed code! - newsize = int.MaxValue; - if (newsize < required) throw new OutOfMemoryException("Cannot grow slice builder above 2GB"); - } - // temporary release the handle - m_data = null; - m_handle.Free(); - // resize to the new capacity, and re-pin - Array.Resize(ref m_buffer, (int)newsize); - } - m_handle = GCHandle.Alloc(m_buffer, GCHandleType.Pinned); - m_data = (byte*)m_handle.AddrOfPinnedObject(); - } - Contract.Ensures(m_buffer != null && m_handle.IsAllocated && m_data != null && m_count >= 0 && m_count <= m_buffer.Length, "GrowBuffer corruption"); - } - - public void Clear() - { - if (m_buffer == null) ThrowAlreadyDisposed(); - m_count = 0; - } - - private byte* AllocateInternal(uint size, bool zeroed) - { - if (m_buffer == null) ThrowAlreadyDisposed(); - Contract.Requires(size != 0, "size == 0"); - - Contract.Assert(m_buffer != null && m_count <= m_buffer.Length, "Builder is corrupted"); - uint remaining = checked(((uint)m_buffer.Length) - m_count); - if (remaining < size) - { - GrowBuffer(m_count + size); - } - - uint pos = m_count; - m_count = pos + size; - byte* ptr = this.Data + pos; - if (zeroed) UnmanagedHelpers.FillUnsafe(ptr, size, 0); - return ptr; - } - - public USlice Allocate(uint size, bool zeroed = false) - { - if (size == 0) return default(USlice); - return new USlice(AllocateInternal(size, zeroed), size); - } - - public void Append(byte* source, uint size) - { - if (size == 0) return; - if (source == null) ThrowInvalidSource(); - - byte* ptr = AllocateInternal(size, zeroed: false); - Contract.Assert(ptr != null, "AllocateInternal() => null"); - UnmanagedHelpers.CopyUnsafe(ptr, source, size); - } - - public void Append(USlice source) - { - if (source.Count == 0) return; - if (source.Data == null) ThrowInvalidSource(); - - byte* ptr = AllocateInternal(source.Count, zeroed: false); - Contract.Assert(ptr != null); - UnmanagedHelpers.CopyUnsafe(ptr, source); - } - - public void Append(Slice source) - { - if (source.Count > 0) - { - if (source.Array == null || source.Offset < 0) ThrowInvalidSource(); - - var ptr = AllocateInternal((uint)source.Count, zeroed: false); - Contract.Assert(ptr != null, "AllocateInternal() => null"); - UnmanagedHelpers.CopyUnsafe(ptr, source); - } - } - - public void Set(USlice source) - { - m_count = 0; - if (source.Count > 0) - { - if (source.Data == null) ThrowInvalidSource(); - - var ptr = AllocateInternal(source.Count, zeroed: false); - Contract.Assert(ptr != null); - UnmanagedHelpers.CopyUnsafe(ptr, source); - } - } - - public void Set(Slice source) - { - m_count = 0; - if (source.Count > 0) - { - if (source.Array == null || source.Offset < 0) ThrowInvalidSource(); - - var ptr = AllocateInternal((uint)source.Count, zeroed: false); - Contract.Assert(ptr != null); - UnmanagedHelpers.CopyUnsafe(ptr, source); - } - } - - public void Resize(uint newSize, byte filler) - { - if (m_buffer == null) ThrowAlreadyDisposed(); - if (newSize <= m_count) - { - m_count = newSize; - } - else - { - if (newSize > m_buffer.Length) GrowBuffer(newSize); - - // fill the extra space with zeroes - uint pos = m_count; - uint r = checked((uint)m_buffer.Length - newSize); - if (r > 0) - { - UnmanagedHelpers.FillUnsafe(this.Data + pos, r, 0); - } - m_count = newSize; - } - } - - public void Swap(UnmanagedSliceBuilder other) - { - if (other == null) throw new ArgumentNullException("other"); - if (m_buffer == null || other.m_buffer == null) ThrowAlreadyDisposed(); - - try - { } - finally - { - var handle = other.m_handle; - var buffer = other.m_buffer; - var data = other.m_data; - var sz = other.m_count; - - other.m_handle = m_handle; - other.m_buffer = buffer; - other.m_data = m_data; - other.m_count = m_count; - - m_handle = handle; - m_buffer = buffer; - m_data = data; - m_count = sz; - } - } - - /// Gets the current content of the buffer as a managed slice - /// Slice that points to the content of the buffer. - /// Caution: do NOT use the returned slice after the buffer has been changed (it can get relocated during a resize) - public Slice ToSlice() - { - if (m_buffer == null) ThrowAlreadyDisposed(); - return m_count > 0 ? Slice.Create(m_buffer, 0, (int)m_count) : default(Slice); - } - - /// Gets the current content of the buffer as an unmanaged slice - /// Slice that points to the content of the buffer. - /// Caution: do NOT use the returned slice after the buffer has been changed (it can get relocated during a resize) - public USlice ToUSlice() - { - if (m_buffer == null) ThrowAlreadyDisposed(); - return m_count > 0 ? new USlice(m_data, m_count) : default(USlice); - } - - /// Gets the a segment of the buffer as an unmanaged slice - /// Number of bytes (from the start) to return - /// Slice that points to the specified segment of the buffer. - /// Caution: do NOT use the returned slice after the buffer has been changed (it can get relocated during a resize) - public USlice ToUSlice(uint count) - { - return ToUSlice(0, count); - } - - /// Gets the a segment of the buffer as an unmanaged slice - /// Offset from the start of the buffer - /// Number of bytes to return - /// Slice that points to the specified segment of the buffer. - /// Caution: do NOT use the returned slice after the buffer has been changed (it can get relocated during a resize) - public USlice ToUSlice(uint offset, uint count) - { - if (m_buffer == null) ThrowAlreadyDisposed(); - if (offset > m_count) throw new ArgumentOutOfRangeException("offset"); - if (count == 0) return default(USlice); - if (offset + count > m_count) throw new ArgumentOutOfRangeException("count"); - - return new USlice(m_data + offset, count); - } - - /// Copy the content of the buffer to an unmanaged pointer, and return the corresponding slice - /// Destination pointer where the buffer will be copied. Caution: the destination buffer must be large enough! - /// Slice that points to the copied segment in the destination buffer - internal USlice CopyTo(byte* dest) - { - return CopyTo(dest, m_count); - } - - /// Copy a segment of the buffer to an unmanaged pointer, and return the corresponding slice - /// Number of bytes to copy - /// Destination pointer where the buffer will be copied. Caution: the destination buffer must be large enough! - /// Slice that points to the copied segment in the destination buffer - internal USlice CopyTo(byte* dest, uint count) - { - if (m_buffer == null) ThrowAlreadyDisposed(); - if (count == 0) return default(USlice); - if (count > m_count) throw new ArgumentOutOfRangeException("count"); - - UnmanagedHelpers.CopyUnsafe(dest, m_data, count); - return new USlice(dest, count); - } - - public byte[] GetBytes() - { - if (m_buffer == null) ThrowAlreadyDisposed(); - - var tmp = new byte[m_count]; - if (m_count >= 0) - { - fixed (byte* ptr = tmp) - { - UnmanagedHelpers.CopyUnsafe(ptr, m_data, m_count); - } - } - return tmp; - } - - private static void ThrowIndexOutOfRange() - { - throw new IndexOutOfRangeException(); - } - - private void ThrowAlreadyDisposed() - { - throw new ObjectDisposedException(this.GetType().Name); - } - - private void ThrowInvalidSource() - { - throw new ArgumentException("The source memory location is invalid"); - } - - public void Dispose() - { - Dispose(true); - GC.SuppressFinalize(this); - } - - private void Dispose(bool disposing) - { - if (disposing) - { - if (m_handle.IsAllocated) - { - m_handle.Free(); - } - - } - m_data = null; - m_buffer = null; - m_count = 0; - } - - private sealed class DebugView - { - private readonly UnmanagedSliceBuilder m_builder; - - public DebugView(UnmanagedSliceBuilder builder) - { - m_builder = builder; - } - - public byte[] Data - { - get - { - if (m_builder.m_count == 0) return s_empty; - var buffer = m_builder.m_buffer; - if (buffer == null) return null; - var tmp = new byte[m_builder.Count]; - System.Buffer.BlockCopy(m_builder.m_buffer, 0, tmp, 0, tmp.Length); - return tmp; - } - } - - public uint Count - { - get { return m_builder.m_count; } - } - - public uint Capacity - { - get { return m_builder.m_buffer == null ? 0U : (uint)m_builder.m_buffer.Length; } - } - - } - - } - -} diff --git a/FoundationDB.Storage.Memory/Utils/UnmanagedSliceBuilderPool.cs b/FoundationDB.Storage.Memory/Utils/UnmanagedSliceBuilderPool.cs deleted file mode 100644 index 4341ad4c8..000000000 --- a/FoundationDB.Storage.Memory/Utils/UnmanagedSliceBuilderPool.cs +++ /dev/null @@ -1,160 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Utils -{ - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Diagnostics.Contracts; - using System.Threading; - - [DebuggerDisplay("Count={m_buckets.Count}, Used={m_memoryUsed}, Loaned={m_memoryLoaned}")] - public class UnmanagedSliceBuilderPool : IDisposable - { - private readonly Stack m_buckets; - private uint m_initialCapacity; - private int m_maxCount; - private long m_memoryUsed; - private long m_memoryLoaned; - private bool m_disposed; - - public UnmanagedSliceBuilderPool(uint initialCapacity, int maxCount) - { - m_initialCapacity = UnmanagedHelpers.NextPowerOfTwo(Math.Min(initialCapacity, 64)); - m_maxCount = Math.Max(1, maxCount); - m_buckets = new Stack(Math.Max(m_maxCount, 100)); - } - - /// Subscription to a scratch buffer from the pool. DO NOT COPY BY VALUE! - /// Copying this struct by value will break the pool. Only use it as a local variable in a method or in a class ! - public struct Subscription : IDisposable - { - private readonly UnmanagedSliceBuilderPool m_pool; - private UnmanagedSliceBuilder m_builder; - - internal Subscription(UnmanagedSliceBuilderPool pool, UnmanagedSliceBuilder builder) - { - Contract.Requires(pool != null && builder != null); - m_pool = pool; - m_builder = builder; - } - - public UnmanagedSliceBuilder Builder - { - get - { - Contract.Assert(m_builder != null, "Builder already returned to the pool"); - return m_builder; - } - } - - public bool Allocated - { - get { return m_builder != null; } - } - - public void Dispose() - { -#pragma warning disable 420 - var builder = Interlocked.Exchange(ref m_builder, null); -#pragma warning restore 420 - if (builder != null && builder.Buffer != null) - { - m_pool.Return(builder); - } - } - } - - /// Borrow a builder from this pool - /// Builder subscription that should be disposed as soon as the buffer is not needed anymore - /// ALWAYS wrap the subscription in a using(...) statement! Do NOT pass the subscription by value, always pass the Builder by reference ! Do NOT keep a reference on the Builder or reuse it after it has been disposed! Do NOT return or store slices that point to this buffer! - public Subscription Use() - { - UnmanagedSliceBuilder builder = null; - lock (m_buckets) - { - if (m_disposed) ThrowDisposed(); - - while(m_buckets.Count > 0) - { - builder = m_buckets.Pop(); - if (builder != null && builder.Buffer != null) - { - Interlocked.Add(ref m_memoryUsed, -((long)builder.Capacity)); - Contract.Assert(m_memoryUsed >= 0, "m_memoryUsed desync"); - break; - } - builder = null; - } - } - if (builder == null) - { - builder = new UnmanagedSliceBuilder(m_initialCapacity); - } - Interlocked.Add(ref m_memoryLoaned, builder.Capacity); - Contract.Assert(builder != null && builder.Buffer != null); - return new Subscription(this, builder); - } - - /// Return a builder into the pool - /// Builder that is no longer in use - internal void Return(UnmanagedSliceBuilder builder) - { - if (m_disposed || builder == null) return; - - lock (m_buckets) - { - if (m_disposed) return; - - var size = builder.Capacity; - Contract.Assert(size == UnmanagedHelpers.NextPowerOfTwo(size), "builder size should always be a power of two"); - - Interlocked.Add(ref m_memoryLoaned, -((long)builder.Capacity)); - Contract.Assert(m_memoryUsed >= 0, "m_memoryLoaned desync"); - - if (m_buckets.Count < m_maxCount) - { - m_buckets.Push(builder); - Interlocked.Add(ref m_memoryUsed, builder.Capacity); - } - else - { - builder.Dispose(); - } - } - } - - private static void ThrowDisposed() - { - throw new InvalidOperationException("The buffer pool as already been disposed"); - } - - public void Dispose() - { - Dispose(true); - GC.SuppressFinalize(this); - } - - protected virtual void Dispose(bool disposing) - { - if (!m_disposed) - { - m_disposed = true; - if (disposing) - { - lock (m_buckets) - { - foreach(var builder in m_buckets) - { - if (builder != null) builder.Dispose(); - } - m_buckets.Clear(); - } - } - } - } - } - -} diff --git a/FoundationDB.Storage.Memory/Utils/UnmanagedSliceReader.cs b/FoundationDB.Storage.Memory/Utils/UnmanagedSliceReader.cs deleted file mode 100644 index d08557912..000000000 --- a/FoundationDB.Storage.Memory/Utils/UnmanagedSliceReader.cs +++ /dev/null @@ -1,305 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013, Doxense SARL -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Storage.Memory.Utils -{ - using System; - using System.Diagnostics.Contracts; - - /// Helper class that holds the internal state used to parse tuples from slices - public unsafe class UnmanagedSliceReader - { - - /// Creates a reader on a byte array - public static UnmanagedSliceReader FromSlice(USlice slice) - { - return new UnmanagedSliceReader(slice.Data, slice.Count); - } - - /// Creates a reader on a segment of a byte array - public static UnmanagedSliceReader FromAddress(byte* address, ulong count) - { - if (address == null && count != 0) throw new ArgumentException("Address cannot be null"); - return new UnmanagedSliceReader(address, count); - } - - /// Buffer containing the tuple being parsed - public readonly byte* Base; - - /// Current position inside the buffer - public byte* Position; - - /// Memory address just after the end of the buffer - public readonly byte* End; - - private UnmanagedSliceReader(byte* address, ulong count) - { - Contract.Requires(address != null || count == 0); - - this.Base = address; - this.Position = address; - this.End = address + count; - - Contract.Ensures(this.End >= this.Base && this.Position >= this.Base && this.Position <= this.End); - } - - public ulong Offset { get { return this.Position > this.Base ? (ulong)(this.Position - this.Base) : 0UL; } } - - public ulong Length { get { return (ulong)(this.End - this.Base); } } - - /// Returns true if there are more bytes to parse - public bool HasMore { get { return this.Position < this.End; } } - - /// Returns the number of bytes remaining - public ulong Remaining { get { return this.Position < this.End ? (ulong)(this.End - this.Position) : 0UL; } } - - /// Ensure that there are at least bytes remaining in the buffer - public void EnsureBytes(uint count) - { - if (checked(this.Position + count) > this.End) throw new ArgumentOutOfRangeException("count"); - } - - /// Return the value of the next byte in the buffer, or -1 if we reached the end - public int PeekByte() - { - byte* p = this.Position; - return p < this.End ? (*p) : -1; - } - - /// Skip the next bytes of the buffer - public void Skip(uint count) - { - EnsureBytes(count); - - this.Position += count; - } - - /// Read the next byte from the buffer - public byte ReadByte() - { - EnsureBytes(1); - - byte* p = this.Position; - byte b = *p; - this.Position = checked(p + 1); - return b; - } - - /// Read the next bytes from the buffer - public USlice ReadBytes(uint count) - { - EnsureBytes(count); - - byte* p = this.Position; - this.Position = checked(p + count); - return new USlice(p, count); - } - - /// Read the next 2 bytes as an unsigned 16-bit integer, encoded in little-endian - public ushort ReadFixed16() - { - EnsureBytes(2); - byte* p = this.Position; - this.Position = checked(p + 2); - return (ushort)(p[0] | p[1] << 8); - } - - /// Read the next 4 bytes as an unsigned 32-bit integer, encoded in little-endian - public uint ReadFixed32() - { - EnsureBytes(4); - byte* p = this.Position; - this.Position = checked(p + 4); - return p[0] | (uint)p[1] << 8 | (uint)p[2] << 16 | (uint)p[3] << 24; - } - - /// Read the next 8 bytes as an unsigned 64-bit integer, encoded in little-endian - public ulong ReadFixed64() - { - EnsureBytes(8); - byte* p = this.Position; - this.Position = checked(p + 8); - return p[0] | (ulong)p[1] << 8 | (ulong)p[2] << 16 | (ulong)p[3] << 24 | (ulong)p[4] << 32 | (ulong)p[5] << 40 | (ulong)p[6] << 48 | (ulong)p[7] << 56; - } - - /// Reads a 7-bit encoded unsigned int (aka 'Varint16') from the buffer, and advances the cursor - /// Can read up to 3 bytes from the input - public ushort ReadVarint16() - { - byte* p = this.Position; - byte* end = this.End; - uint n = 1; - - if (p >= end) goto overflow; - uint b = p[0]; - uint res = b & 0x7F; - if (res < 0x80) { goto done; } - - if (p >= end) goto overflow; - b = p[1]; - res |= (b & 0x7F) << 7; - if (b < 0x80) { n = 2; goto done; } - - // the third byte should only have 2 bits worth of data - if (p >= end) goto overflow; - b = p[2]; - if (b >= 0x4) throw new FormatException("Varint is bigger than 16 bits"); - res |= (b & 0x2) << 14; - n = 3; - - done: - this.Position = checked(p + n); - return (ushort)res; - - overflow: - throw new FormatException("Truncated Varint"); - } - - /// Reads a 7-bit encoded unsigned int (aka 'Varint32') from the buffer, and advances the cursor - /// Can read up to 5 bytes from the input - public uint ReadVarint32() - { - byte* p = this.Position; - byte* end = this.End; - uint n = 1; - - if (p >= end) goto overflow; - uint b = p[0]; - uint res = b & 0x7F; - if (res < 0x80) { goto done; } - - if (p >= end) goto overflow; - b = p[1]; - res |= (b & 0x7F) << 7; - if (b < 0x80) { n = 2; goto done; } - - if (p >= end) goto overflow; - b = p[2]; - res |= (b & 0x7F) << 14; - if (b < 0x80) { n = 3; goto done; } - - if (p >= end) goto overflow; - b = p[3]; - res |= (b & 0x7F) << 21; - if (b < 0x80) { n = 4; goto done; } - - // the fifth byte should only have 4 bits worth of data - if (p >= end) goto overflow; - b = p[4]; - if (b >= 0x20) throw new FormatException("Varint is bigger than 32 bits"); - res |= (b & 0x1F) << 28; - n = 5; - - done: - this.Position = checked(p + n); - return res; - - overflow: - throw new FormatException("Truncated Varint"); - } - - /// Reads a 7-bit encoded unsigned long (aka 'Varint32') from the buffer, and advances the cursor - /// Can read up to 10 bytes from the input - public ulong ReadVarint64() - { - byte* p = this.Position; - byte* end = this.End; - uint n = 1; - - if (p >= end) goto overflow; - uint b = p[0]; - ulong res = b & 0x7F; - if (res < 0x80) { goto done; } - - if (p >= end) goto overflow; - b = p[1]; - res |= (b & 0x7F) << 7; - if (b < 0x80) { n = 2; goto done; } - - if (p >= end) goto overflow; - b = p[2]; - res |= (b & 0x7F) << 14; - if (b < 0x80) { n = 3; goto done; } - - if (p >= end) goto overflow; - b = p[3]; - res |= (b & 0x7F) << 21; - if (b < 0x80) { n = 4; goto done; } - - if (p >= end) goto overflow; - b = p[4]; - res |= (b & 0x7F) << 28; - if (b < 0x80) { n = 5; goto done; } - - if (p >= end) goto overflow; - b = p[5]; - res |= (b & 0x7F) << 35; - if (b < 0x80) { n = 6; goto done; } - - if (p >= end) goto overflow; - b = p[6]; - res |= (b & 0x7F) << 42; - if (b < 0x80) { n = 7; goto done; } - - if (p >= end) goto overflow; - b = p[7]; - res |= (b & 0x7F) << 49; - if (b < 0x80) { n = 8; goto done; } - - if (p >= end) goto overflow; - b = p[8]; - res |= (b & 0x7F) << 56; - if (b < 0x80) { n = 9; goto done; } - - // the tenth byte should only have 1 bit worth of data - if (p >= end) goto overflow; - b = p[4]; - if (b > 1) throw new FormatException("Varint is bigger than 64 bits"); - res |= (b & 0x1) << 63; - n = 10; - - done: - this.Position = checked(p + n); - return res; - - overflow: - throw new FormatException("Truncated Varint"); - } - - /// Reads a variable sized slice, by first reading its size (stored as a Varint32) and then the data - public USlice ReadVarbytes() - { - uint size = ReadVarint32(); - if (size > uint.MaxValue) throw new FormatException("Malformed variable size"); - if (size == 0) return USlice.Nil; - return ReadBytes(size); - } - - } - -} diff --git a/FoundationDB.Storage.Memory/Utils/UnmanagedSliceStream.cs b/FoundationDB.Storage.Memory/Utils/UnmanagedSliceStream.cs deleted file mode 100644 index 03bac62b9..000000000 --- a/FoundationDB.Storage.Memory/Utils/UnmanagedSliceStream.cs +++ /dev/null @@ -1,228 +0,0 @@ -#region Copyright (c) 2013-2014, Doxense SAS. All rights reserved. -// See License.MD for license information -#endregion - -namespace FoundationDB.Storage.Memory.Utils -{ - using System; - using System.Diagnostics.Contracts; - using System.IO; - using System.Runtime.InteropServices; - using System.Text; - using System.Threading; - using System.Threading.Tasks; - - /// Stream that can read from a slice of unmanaged memory - public unsafe sealed class UnmanagedSliceStream : Stream - { - private byte* m_begin; - private uint m_pos; - private readonly uint m_size; - private Task m_lastReadTask; - - internal UnmanagedSliceStream(USlice slice) - { - Contract.Requires(slice.Count == 0 || slice.Data != null); - - m_begin = slice.Data; - m_size = slice.Count; - } - - internal UnmanagedSliceStream(byte* data, uint size) - { - Contract.Requires(size == 0 || data != null); - - m_begin = data; - m_size = size; - } - - public override bool CanRead - { - get { return m_begin != null; } - } - - public override bool CanSeek - { - get { return true; } - } - - public override bool CanWrite - { - get { return false; } - } - - public override void Flush() - { - //NO OP - } - - public override Task FlushAsync(CancellationToken cancellationToken) - { - return TaskHelpers.CompletedTask; - } - - public override long Length - { - get { return m_size; } - } - - public override long Position - { - get - { - return m_pos; - } - set - { - Seek(value, SeekOrigin.Begin); - } - } - - public override int ReadByte() - { - if (m_begin == null) ThrowDisposed(); - uint pos = m_pos; - if (pos < m_size) - { - int res = (int)m_begin[pos]; - m_pos = pos + 1; - return res; - } - return -1; - } - - public override int Read(byte[] buffer, int offset, int count) - { - if (m_begin == null) ThrowDisposed(); - - if (buffer == null) throw new ArgumentNullException("buffer"); - if (offset < 0 || offset > buffer.Length) throw new ArgumentOutOfRangeException("offset"); - if (count < 0 || offset + count >= buffer.Length) throw new ArgumentOutOfRangeException("count"); - - uint pos = m_pos; - if (pos >= m_size) return 0; // EOF - - uint chunk; - checked { chunk = (uint)Math.Max(m_size - pos, count); } - - if (chunk > 0) - { - fixed (byte* ptr = buffer) - { - UnmanagedHelpers.CopyUnsafe(ptr + offset, m_begin + pos, chunk); - } - m_pos = pos + chunk; - } - return (int)chunk; - } - - public override Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) - { - if (cancellationToken.IsCancellationRequested) - { - return TaskHelpers.FromCancellation(cancellationToken); - } - try - { - int result = Read(buffer, offset, count); - var t = m_lastReadTask; - return t != null && t.Result == result ? t : (t = Task.FromResult(result)); - } - catch (Exception e) - { - return TaskHelpers.FromException(e); - } - } - - public override long Seek(long offset, SeekOrigin origin) - { - if (m_begin == null) ThrowDisposed(); - - switch (origin) - { - case SeekOrigin.Begin: - { - if (offset < 0) throw new ArgumentOutOfRangeException("offset", "Offset cannot be less than zero"); - offset = offset >= m_size ? m_size : offset; - Contract.Assert(offset >= 0); - m_pos = (uint)offset; - return m_pos; - } - case SeekOrigin.End: - { - if (offset < 0) throw new ArgumentOutOfRangeException("offset", "Offset cannot be less than zero"); - offset += m_size; - offset = offset < 0 ? 0 : offset; - Contract.Assert(offset >= 0); - m_pos = (uint)offset; - return m_pos; - } - case SeekOrigin.Current: - { - offset += m_pos; - offset = offset < 0 ? 0 : offset >= m_size ? m_size : offset; - Contract.Assert(offset >= 0); - m_pos = (uint)offset; - return m_pos; - } - default: - { - throw new ArgumentOutOfRangeException("origin"); - } - } - } - - public override void SetLength(long value) - { - throw new NotSupportedException("Cannot set the length of a read-only stream"); - } - - public override void Write(byte[] buffer, int offset, int count) - { - throw new NotSupportedException("Cannot write to a read-only stream"); - } - - public override Task WriteAsync(byte[] buffer, int offset, int count, System.Threading.CancellationToken cancellationToken) - { - return TaskHelpers.FromException(new NotSupportedException("Cannot write to a read-only stream")); - } - - public byte[] ToArray() - { - if (m_begin == null) ThrowDisposed(); - var tmp = new byte[m_size]; - if (tmp.Length > 0) - { - fixed (byte* ptr = tmp) - { - UnmanagedHelpers.CopyUnsafe(ptr, m_begin, (uint)m_size); - } - } - return tmp; - } - - public FoundationDB.Client.Slice ToSlice() - { - return FoundationDB.Client.Slice.Create(this.ToArray()); - } - - public USlice ToUSlice() - { - if (m_begin == null) ThrowDisposed(); - return new USlice(m_begin, m_size); - } - - private void ThrowDisposed() - { - throw new ObjectDisposedException(this.GetType().Name); - } - - protected override void Dispose(bool disposing) - { - m_begin = null; - m_pos = m_size; - m_lastReadTask = null; - } - } - -} diff --git a/FoundationDB.Storage.Memory/packages.config b/FoundationDB.Storage.Memory/packages.config deleted file mode 100644 index 9f072bdfc..000000000 --- a/FoundationDB.Storage.Memory/packages.config +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/FoundationDB.Tests.Sandbox/FoundationDB.Tests.Sandbox.csproj b/FoundationDB.Tests.Sandbox/FoundationDB.Tests.Sandbox.csproj index fc90c83f3..ac7b1aa2e 100644 --- a/FoundationDB.Tests.Sandbox/FoundationDB.Tests.Sandbox.csproj +++ b/FoundationDB.Tests.Sandbox/FoundationDB.Tests.Sandbox.csproj @@ -1,5 +1,5 @@  - + Debug @@ -9,7 +9,7 @@ Properties FoundationDB.Tests.Sandbox fdbsandbox - v4.5 + v4.6.1 512 ..\ @@ -64,7 +64,6 @@ - - + \ No newline at end of file diff --git a/FoundationDB.Tests/FoundationDB.Tests.csproj.DotSettings b/FoundationDB.Tests/FoundationDB.Tests.csproj.DotSettings new file mode 100644 index 000000000..96331d1ce --- /dev/null +++ b/FoundationDB.Tests/FoundationDB.Tests.csproj.DotSettings @@ -0,0 +1,2 @@ + + CSharp72 \ No newline at end of file diff --git a/FoundationDB.Tests/KeyFacts.cs b/FoundationDB.Tests/KeyFacts.cs index 80a77704e..caaf0ccc5 100644 --- a/FoundationDB.Tests/KeyFacts.cs +++ b/FoundationDB.Tests/KeyFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,15 +28,14 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Client.Tests { - using FoundationDB.Client; - using FoundationDB.Layers.Tuples; - using NUnit.Framework; using System; using System.Collections.Generic; using System.Linq; - using System.Text; using System.Threading; using System.Threading.Tasks; + using Doxense.Collections.Tuples; + using FoundationDB.Client; + using NUnit.Framework; [TestFixture] public class KeyFacts @@ -67,25 +66,25 @@ public void Test_FdbKey_Constants() public void Test_FdbKey_Increment() { - var key = FdbKey.Increment(Slice.FromAscii("Hello")); - Assert.That(key.ToAscii(), Is.EqualTo("Hellp")); + var key = FdbKey.Increment(Slice.FromByteString("Hello")); + Assert.That(key.ToString(), Is.EqualTo("Hellp")); - key = FdbKey.Increment(Slice.FromAscii("Hello\x00")); - Assert.That(key.ToAscii(), Is.EqualTo("Hello\x01")); + key = FdbKey.Increment(Slice.FromByteString("Hello\x00")); + Assert.That(key.ToString(), Is.EqualTo("Hello<01>")); - key = FdbKey.Increment(Slice.FromAscii("Hello\xFE")); - Assert.That(key.ToAscii(), Is.EqualTo("Hello\xFF")); + key = FdbKey.Increment(Slice.FromByteString("Hello\xFE")); + Assert.That(key.ToString(), Is.EqualTo("Hello")); - key = FdbKey.Increment(Slice.FromAscii("Hello\xFF")); - Assert.That(key.ToAscii(), Is.EqualTo("Hellp"), "Should remove training \\xFF"); + key = FdbKey.Increment(Slice.FromByteString("Hello\xFF")); + Assert.That(key.ToString(), Is.EqualTo("Hellp"), "Should remove training \\xFF"); - key = FdbKey.Increment(Slice.FromAscii("A\xFF\xFF\xFF")); - Assert.That(key.ToAscii(), Is.EqualTo("B"), "Should truncate all trailing \\xFFs"); + key = FdbKey.Increment(Slice.FromByteString("A\xFF\xFF\xFF")); + Assert.That(key.ToString(), Is.EqualTo("B"), "Should truncate all trailing \\xFFs"); // corner cases Assert.That(() => FdbKey.Increment(Slice.Nil), Throws.InstanceOf().With.Property("ParamName").EqualTo("slice")); Assert.That(() => FdbKey.Increment(Slice.Empty), Throws.InstanceOf()); - Assert.That(() => FdbKey.Increment(Slice.FromAscii("\xFF")), Throws.InstanceOf()); + Assert.That(() => FdbKey.Increment(Slice.FromByteString("\xFF")), Throws.InstanceOf()); } @@ -110,8 +109,10 @@ public void Test_FdbKey_Merge() } // corner cases - Assert.That(() => FdbKey.Merge(Slice.Empty, default(Slice[])), Throws.InstanceOf().With.Property("ParamName").EqualTo("keys")); - Assert.That(() => FdbKey.Merge(Slice.Empty, default(IEnumerable)), Throws.InstanceOf().With.Property("ParamName").EqualTo("keys")); + // ReSharper disable AssignNullToNotNullAttribute + Assert.That(() => FdbKey.Merge(Slice.Empty, default(Slice[])), Throws.ArgumentNullException.With.Property("ParamName").EqualTo("keys")); + Assert.That(() => FdbKey.Merge(Slice.Empty, default(IEnumerable)), Throws.ArgumentNullException.With.Property("ParamName").EqualTo("keys")); + // ReSharper restore AssignNullToNotNullAttribute } [Test] @@ -200,129 +201,129 @@ public async Task Test_FdbKey_Batched() } [Test] - public void Test_FdbKeyRange_Contains() + public void Test_KeyRange_Contains() { - FdbKeyRange range; + KeyRange range; // ["", "") - range = FdbKeyRange.Empty; + range = KeyRange.Empty; Assert.That(range.Contains(Slice.Empty), Is.False); - Assert.That(range.Contains(Slice.FromAscii("\x00")), Is.False); - Assert.That(range.Contains(Slice.FromAscii("hello")), Is.False); - Assert.That(range.Contains(Slice.FromAscii("\xFF")), Is.False); + Assert.That(range.Contains(Slice.FromByteString("\x00")), Is.False); + Assert.That(range.Contains(Slice.FromByteString("hello")), Is.False); + Assert.That(range.Contains(Slice.FromByteString("\xFF")), Is.False); // ["", "\xFF" ) - range = FdbKeyRange.Create(Slice.Empty, Slice.FromAscii("\xFF")); + range = KeyRange.Create(Slice.Empty, Slice.FromByteString("\xFF")); Assert.That(range.Contains(Slice.Empty), Is.True); - Assert.That(range.Contains(Slice.FromAscii("\x00")), Is.True); - Assert.That(range.Contains(Slice.FromAscii("hello")), Is.True); - Assert.That(range.Contains(Slice.FromAscii("\xFF")), Is.False); + Assert.That(range.Contains(Slice.FromByteString("\x00")), Is.True); + Assert.That(range.Contains(Slice.FromByteString("hello")), Is.True); + Assert.That(range.Contains(Slice.FromByteString("\xFF")), Is.False); // ["\x00", "\xFF" ) - range = FdbKeyRange.Create(Slice.FromAscii("\x00"), Slice.FromAscii("\xFF")); + range = KeyRange.Create(Slice.FromByteString("\x00"), Slice.FromByteString("\xFF")); Assert.That(range.Contains(Slice.Empty), Is.False); - Assert.That(range.Contains(Slice.FromAscii("\x00")), Is.True); - Assert.That(range.Contains(Slice.FromAscii("hello")), Is.True); - Assert.That(range.Contains(Slice.FromAscii("\xFF")), Is.False); + Assert.That(range.Contains(Slice.FromByteString("\x00")), Is.True); + Assert.That(range.Contains(Slice.FromByteString("hello")), Is.True); + Assert.That(range.Contains(Slice.FromByteString("\xFF")), Is.False); // corner cases - Assert.That(FdbKeyRange.Create(Slice.FromAscii("A"), Slice.FromAscii("A")).Contains(Slice.FromAscii("A")), Is.False, "Equal bounds"); + Assert.That(KeyRange.Create(Slice.FromByteString("A"), Slice.FromByteString("A")).Contains(Slice.FromByteString("A")), Is.False, "Equal bounds"); } [Test] - public void Test_FdbKeyRange_Test() + public void Test_KeyRange_Test() { const int BEFORE = -1, INSIDE = 0, AFTER = +1; - FdbKeyRange range; + KeyRange range; // range: [ "A", "Z" ) - range = FdbKeyRange.Create(Slice.FromAscii("A"), Slice.FromAscii("Z")); + range = KeyRange.Create(Slice.FromByteString("A"), Slice.FromByteString("Z")); // Excluding the end: < "Z" - Assert.That(range.Test(Slice.FromAscii("\x00"), endIncluded: false), Is.EqualTo(BEFORE)); - Assert.That(range.Test(Slice.FromAscii("@"), endIncluded: false), Is.EqualTo(BEFORE)); - Assert.That(range.Test(Slice.FromAscii("A"), endIncluded: false), Is.EqualTo(INSIDE)); - Assert.That(range.Test(Slice.FromAscii("Z"), endIncluded: false), Is.EqualTo(AFTER)); - Assert.That(range.Test(Slice.FromAscii("Z\x00"), endIncluded: false), Is.EqualTo(AFTER)); - Assert.That(range.Test(Slice.FromAscii("\xFF"), endIncluded: false), Is.EqualTo(AFTER)); + Assert.That(range.Test(Slice.FromByteString("\x00"), endIncluded: false), Is.EqualTo(BEFORE)); + Assert.That(range.Test(Slice.FromByteString("@"), endIncluded: false), Is.EqualTo(BEFORE)); + Assert.That(range.Test(Slice.FromByteString("A"), endIncluded: false), Is.EqualTo(INSIDE)); + Assert.That(range.Test(Slice.FromByteString("Z"), endIncluded: false), Is.EqualTo(AFTER)); + Assert.That(range.Test(Slice.FromByteString("Z\x00"), endIncluded: false), Is.EqualTo(AFTER)); + Assert.That(range.Test(Slice.FromByteString("\xFF"), endIncluded: false), Is.EqualTo(AFTER)); // Including the end: <= "Z" - Assert.That(range.Test(Slice.FromAscii("\x00"), endIncluded: true), Is.EqualTo(BEFORE)); - Assert.That(range.Test(Slice.FromAscii("@"), endIncluded: true), Is.EqualTo(BEFORE)); - Assert.That(range.Test(Slice.FromAscii("A"), endIncluded: true), Is.EqualTo(INSIDE)); - Assert.That(range.Test(Slice.FromAscii("Z"), endIncluded: true), Is.EqualTo(INSIDE)); - Assert.That(range.Test(Slice.FromAscii("Z\x00"), endIncluded: true), Is.EqualTo(AFTER)); - Assert.That(range.Test(Slice.FromAscii("\xFF"), endIncluded: true), Is.EqualTo(AFTER)); - - range = FdbKeyRange.Create(FdbTuple.EncodeKey("A"), FdbTuple.EncodeKey("Z")); - Assert.That(range.Test(FdbTuple.Create("@")), Is.EqualTo((BEFORE))); - Assert.That(range.Test(FdbTuple.Create("A")), Is.EqualTo((INSIDE))); - Assert.That(range.Test(FdbTuple.Create("Z")), Is.EqualTo((AFTER))); - Assert.That(range.Test(FdbTuple.Create("Z"), endIncluded: true), Is.EqualTo(INSIDE)); + Assert.That(range.Test(Slice.FromByteString("\x00"), endIncluded: true), Is.EqualTo(BEFORE)); + Assert.That(range.Test(Slice.FromByteString("@"), endIncluded: true), Is.EqualTo(BEFORE)); + Assert.That(range.Test(Slice.FromByteString("A"), endIncluded: true), Is.EqualTo(INSIDE)); + Assert.That(range.Test(Slice.FromByteString("Z"), endIncluded: true), Is.EqualTo(INSIDE)); + Assert.That(range.Test(Slice.FromByteString("Z\x00"), endIncluded: true), Is.EqualTo(AFTER)); + Assert.That(range.Test(Slice.FromByteString("\xFF"), endIncluded: true), Is.EqualTo(AFTER)); + + range = KeyRange.Create(TuPack.EncodeKey("A"), TuPack.EncodeKey("Z")); + Assert.That(range.Test(TuPack.EncodeKey("@")), Is.EqualTo((BEFORE))); + Assert.That(range.Test(TuPack.EncodeKey("A")), Is.EqualTo((INSIDE))); + Assert.That(range.Test(TuPack.EncodeKey("Z")), Is.EqualTo((AFTER))); + Assert.That(range.Test(TuPack.EncodeKey("Z"), endIncluded: true), Is.EqualTo(INSIDE)); } [Test] - public void Test_FdbKeyRange_StartsWith() + public void Test_KeyRange_StartsWith() { - FdbKeyRange range; + KeyRange range; // "abc" => [ "abc", "abd" ) - range = FdbKeyRange.StartsWith(Slice.FromAscii("abc")); - Assert.That(range.Begin, Is.EqualTo(Slice.FromAscii("abc"))); - Assert.That(range.End, Is.EqualTo(Slice.FromAscii("abd"))); + range = KeyRange.StartsWith(Slice.FromByteString("abc")); + Assert.That(range.Begin, Is.EqualTo(Slice.FromByteString("abc"))); + Assert.That(range.End, Is.EqualTo(Slice.FromByteString("abd"))); // "" => ArgumentException - Assert.That(() => FdbKeyRange.PrefixedBy(Slice.Empty), Throws.InstanceOf()); + Assert.That(() => KeyRange.PrefixedBy(Slice.Empty), Throws.InstanceOf()); // "\xFF" => ArgumentException - Assert.That(() => FdbKeyRange.PrefixedBy(Slice.FromAscii("\xFF")), Throws.InstanceOf()); + Assert.That(() => KeyRange.PrefixedBy(Slice.FromByteString("\xFF")), Throws.InstanceOf()); // null => ArgumentException - Assert.That(() => FdbKeyRange.PrefixedBy(Slice.Nil), Throws.InstanceOf()); + Assert.That(() => KeyRange.PrefixedBy(Slice.Nil), Throws.InstanceOf()); } [Test] - public void Test_FdbKeyRange_PrefixedBy() + public void Test_KeyRange_PrefixedBy() { - FdbKeyRange range; + KeyRange range; // "abc" => [ "abc\x00", "abd" ) - range = FdbKeyRange.PrefixedBy(Slice.FromAscii("abc")); - Assert.That(range.Begin, Is.EqualTo(Slice.FromAscii("abc\x00"))); - Assert.That(range.End, Is.EqualTo(Slice.FromAscii("abd"))); + range = KeyRange.PrefixedBy(Slice.FromByteString("abc")); + Assert.That(range.Begin, Is.EqualTo(Slice.FromByteString("abc\x00"))); + Assert.That(range.End, Is.EqualTo(Slice.FromByteString("abd"))); // "" => ArgumentException - Assert.That(() => FdbKeyRange.PrefixedBy(Slice.Empty), Throws.InstanceOf()); + Assert.That(() => KeyRange.PrefixedBy(Slice.Empty), Throws.InstanceOf()); // "\xFF" => ArgumentException - Assert.That(() => FdbKeyRange.PrefixedBy(Slice.FromAscii("\xFF")), Throws.InstanceOf()); + Assert.That(() => KeyRange.PrefixedBy(Slice.FromByteString("\xFF")), Throws.InstanceOf()); // null => ArgumentException - Assert.That(() => FdbKeyRange.PrefixedBy(Slice.Nil), Throws.InstanceOf()); + Assert.That(() => KeyRange.PrefixedBy(Slice.Nil), Throws.InstanceOf()); } [Test] - public void Test_FdbKeyRange_FromKey() + public void Test_KeyRange_FromKey() { - FdbKeyRange range; + KeyRange range; // "" => [ "", "\x00" ) - range = FdbKeyRange.FromKey(Slice.Empty); + range = KeyRange.FromKey(Slice.Empty); Assert.That(range.Begin, Is.EqualTo(Slice.Empty)); - Assert.That(range.End, Is.EqualTo(Slice.FromAscii("\x00"))); + Assert.That(range.End, Is.EqualTo(Slice.FromByteString("\x00"))); // "abc" => [ "abc", "abc\x00" ) - range = FdbKeyRange.FromKey(Slice.FromAscii("abc")); - Assert.That(range.Begin, Is.EqualTo(Slice.FromAscii("abc"))); - Assert.That(range.End, Is.EqualTo(Slice.FromAscii("abc\x00"))); + range = KeyRange.FromKey(Slice.FromByteString("abc")); + Assert.That(range.Begin, Is.EqualTo(Slice.FromByteString("abc"))); + Assert.That(range.End, Is.EqualTo(Slice.FromByteString("abc\x00"))); // "\xFF" => [ "\xFF", "\xFF\x00" ) - range = FdbKeyRange.FromKey(Slice.FromAscii("\xFF")); - Assert.That(range.Begin, Is.EqualTo(Slice.FromAscii("\xFF"))); - Assert.That(range.End, Is.EqualTo(Slice.FromAscii("\xFF\x00"))); + range = KeyRange.FromKey(Slice.FromByteString("\xFF")); + Assert.That(range.Begin, Is.EqualTo(Slice.FromByteString("\xFF"))); + Assert.That(range.End, Is.EqualTo(Slice.FromByteString("\xFF\x00"))); - Assert.That(() => FdbKeyRange.FromKey(Slice.Nil), Throws.InstanceOf()); + Assert.That(() => KeyRange.FromKey(Slice.Nil), Throws.InstanceOf()); } [Test] @@ -336,51 +337,51 @@ public void Test_FdbKey_PrettyPrint() Assert.That(FdbKey.Dump(Slice.FromByte(0)), Is.EqualTo("<00>")); Assert.That(FdbKey.Dump(Slice.FromByte(255)), Is.EqualTo("")); - Assert.That(FdbKey.Dump(Slice.Create(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7 })), Is.EqualTo("<00><01><02><03><04><05><06><07>")); - Assert.That(FdbKey.Dump(Slice.Create(new byte[] { 255, 254, 253, 252, 251, 250, 249, 248 })), Is.EqualTo("")); + Assert.That(FdbKey.Dump(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7 }.AsSlice()), Is.EqualTo("<00><01><02><03><04><05><06><07>")); + Assert.That(FdbKey.Dump(new byte[] { 255, 254, 253, 252, 251, 250, 249, 248 }.AsSlice()), Is.EqualTo("")); Assert.That(FdbKey.Dump(Slice.FromString("hello")), Is.EqualTo("hello")); Assert.That(FdbKey.Dump(Slice.FromString("héllø")), Is.EqualTo("hll")); // tuples should be decoded properly - Assert.That(FdbKey.Dump(FdbTuple.EncodeKey(123)), Is.EqualTo("(123,)"), "Singleton tuples should end with a ','"); - Assert.That(FdbKey.Dump(FdbTuple.EncodeKey(Slice.FromAscii("hello"))), Is.EqualTo("('hello',)"), "ASCII strings should use single quotes"); - Assert.That(FdbKey.Dump(FdbTuple.EncodeKey("héllø")), Is.EqualTo("(\"héllø\",)"), "Unicode strings should use double quotes"); - Assert.That(FdbKey.Dump(FdbTuple.EncodeKey(Slice.Create(new byte[] { 1, 2, 3 }))), Is.EqualTo("(<01 02 03>,)")); - Assert.That(FdbKey.Dump(FdbTuple.EncodeKey(123, 456)), Is.EqualTo("(123, 456)"), "Elements should be separated with a space, and not end up with ','"); - Assert.That(FdbKey.Dump(FdbTuple.EncodeKey(true, false, default(object))), Is.EqualTo("(1, 0, null)"), "Booleans should be displayed as numbers, and null should be in lowercase"); //note: even though it's tempting to using Python's "Nil", it's not very ".NETty" - Assert.That(FdbKey.Dump(FdbTuple.EncodeKey(1.0d, Math.PI, Math.E)), Is.EqualTo("(1, 3.1415926535897931, 2.7182818284590451)"), "Doubles should used dot and have full precision (17 digits)"); - Assert.That(FdbKey.Dump(FdbTuple.EncodeKey(1.0f, (float)Math.PI, (float)Math.E)), Is.EqualTo("(1, 3.14159274, 2.71828175)"), "Singles should used dot and have full precision (10 digits)"); + Assert.That(FdbKey.Dump(TuPack.EncodeKey(123)), Is.EqualTo("(123,)"), "Singleton tuples should end with a ','"); + Assert.That(FdbKey.Dump(TuPack.EncodeKey(Slice.FromByteString("hello"))), Is.EqualTo("(`hello`,)"), "ASCII strings should use single back quotes"); + Assert.That(FdbKey.Dump(TuPack.EncodeKey("héllø")), Is.EqualTo("(\"héllø\",)"), "Unicode strings should use double quotes"); + Assert.That(FdbKey.Dump(TuPack.EncodeKey(new byte[] { 1, 2, 3 }.AsSlice())), Is.EqualTo("(`<01><02><03>`,)")); + Assert.That(FdbKey.Dump(TuPack.EncodeKey(123, 456)), Is.EqualTo("(123, 456)"), "Elements should be separated with a space, and not end up with ','"); + Assert.That(FdbKey.Dump(TuPack.EncodeKey(true, false, default(object))), Is.EqualTo("(1, 0, null)"), "Booleans should be displayed as numbers, and null should be in lowercase"); //note: even though it's tempting to using Python's "Nil", it's not very ".NETty" + Assert.That(FdbKey.Dump(TuPack.EncodeKey(1.0d, Math.PI, Math.E)), Is.EqualTo("(1, 3.1415926535897931, 2.7182818284590451)"), "Doubles should used dot and have full precision (17 digits)"); + Assert.That(FdbKey.Dump(TuPack.EncodeKey(1.0f, (float)Math.PI, (float)Math.E)), Is.EqualTo("(1, 3.14159274, 2.71828175)"), "Singles should used dot and have full precision (10 digits)"); var guid = Guid.NewGuid(); - Assert.That(FdbKey.Dump(FdbTuple.EncodeKey(guid)), Is.EqualTo(String.Format("({0},)", guid.ToString("B"))), "GUIDs should be displayed as a string literal, surrounded by {...}, and without quotes"); + Assert.That(FdbKey.Dump(TuPack.EncodeKey(guid)), Is.EqualTo(String.Format("({0},)", guid.ToString("B"))), "GUIDs should be displayed as a string literal, surrounded by {{...}}, and without quotes"); var uuid128 = Uuid128.NewUuid(); - Assert.That(FdbKey.Dump(FdbTuple.EncodeKey(uuid128)), Is.EqualTo(String.Format("({0},)", uuid128.ToString("B"))), "Uuid128s should be displayed as a string literal, surrounded by {...}, and without quotes"); + Assert.That(FdbKey.Dump(TuPack.EncodeKey(uuid128)), Is.EqualTo(String.Format("({0},)", uuid128.ToString("B"))), "Uuid128s should be displayed as a string literal, surrounded by {{...}}, and without quotes"); var uuid64 = Uuid64.NewUuid(); - Assert.That(FdbKey.Dump(FdbTuple.EncodeKey(uuid64)), Is.EqualTo(String.Format("({0},)", uuid64.ToString("B"))), "Uuid64s should be displayed as a string literal, surrounded by {...}, and without quotes"); + Assert.That(FdbKey.Dump(TuPack.EncodeKey(uuid64)), Is.EqualTo(String.Format("({0},)", uuid64.ToString("B"))), "Uuid64s should be displayed as a string literal, surrounded by {{...}}, and without quotes"); // ranges should be decoded when possible - var key = FdbTuple.ToRange(FdbTuple.Create("hello")); + var key = TuPack.ToRange(STuple.Create("hello")); // "<02>hello<00><00>" .. "<02>hello<00>" Assert.That(FdbKey.PrettyPrint(key.Begin, FdbKey.PrettyPrintMode.Begin), Is.EqualTo("(\"hello\",).<00>")); Assert.That(FdbKey.PrettyPrint(key.End, FdbKey.PrettyPrintMode.End), Is.EqualTo("(\"hello\",).")); - key = FdbKeyRange.StartsWith(FdbTuple.EncodeKey("hello")); + key = KeyRange.StartsWith(TuPack.EncodeKey("hello")); // "<02>hello<00>" .. "<02>hello<01>" Assert.That(FdbKey.PrettyPrint(key.Begin, FdbKey.PrettyPrintMode.Begin), Is.EqualTo("(\"hello\",)")); Assert.That(FdbKey.PrettyPrint(key.End, FdbKey.PrettyPrintMode.End), Is.EqualTo("(\"hello\",) + 1")); - var t = FdbTuple.EncodeKey(123); + var t = TuPack.EncodeKey(123); Assert.That(FdbKey.PrettyPrint(t, FdbKey.PrettyPrintMode.Single), Is.EqualTo("(123,)")); - Assert.That(FdbKey.PrettyPrint(FdbTuple.ToRange(t).Begin, FdbKey.PrettyPrintMode.Begin), Is.EqualTo("(123,).<00>")); - Assert.That(FdbKey.PrettyPrint(FdbTuple.ToRange(t).End, FdbKey.PrettyPrintMode.End), Is.EqualTo("(123,).")); + Assert.That(FdbKey.PrettyPrint(TuPack.ToRange(t).Begin, FdbKey.PrettyPrintMode.Begin), Is.EqualTo("(123,).<00>")); + Assert.That(FdbKey.PrettyPrint(TuPack.ToRange(t).End, FdbKey.PrettyPrintMode.End), Is.EqualTo("(123,).")); } [Test] - public void Test_FdbKeyRange_Intersects() + public void Test_KeyRange_Intersects() { - Func range = (x, y) => FdbKeyRange.Create(Slice.FromByte(x), Slice.FromByte(y)); + Func range = (x, y) => KeyRange.Create(Slice.FromByte(x), Slice.FromByte(y)); #region Not Intersecting... @@ -438,9 +439,9 @@ public void Test_FdbKeyRange_Intersects() } [Test] - public void Test_FdbKeyRange_Disjoint() + public void Test_KeyRange_Disjoint() { - Func range = (x, y) => FdbKeyRange.Create(Slice.FromByte(x), Slice.FromByte(y)); + Func range = (x, y) => KeyRange.Create(Slice.FromByte(x), Slice.FromByte(y)); #region Disjoint... diff --git a/FoundationDB.Tests/Layers/BlobFacts.cs b/FoundationDB.Tests/Layers/BlobFacts.cs index 1451328fd..69627f81f 100644 --- a/FoundationDB.Tests/Layers/BlobFacts.cs +++ b/FoundationDB.Tests/Layers/BlobFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,12 +28,11 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Blobs.Tests { + using System; + using System.Threading.Tasks; using FoundationDB.Client; using FoundationDB.Client.Tests; - using FoundationDB.Layers.Tuples; using NUnit.Framework; - using System; - using System.Threading.Tasks; [TestFixture] public class BlobFacts : FdbTest @@ -121,7 +120,7 @@ public async Task Test_FdbBlob_CanAppendLargeChunks() { using (var tr = db.BeginTransaction(this.Cancellation)) { - await blob.AppendAsync(tr, Slice.Create(data)); + await blob.AppendAsync(tr, data.AsSlice()); await tr.CommitAsync(); } } diff --git a/FoundationDB.Tests/Layers/CounterFacts.cs b/FoundationDB.Tests/Layers/CounterFacts.cs index 09c5ceba1..1830fb3af 100644 --- a/FoundationDB.Tests/Layers/CounterFacts.cs +++ b/FoundationDB.Tests/Layers/CounterFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,13 +28,13 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Counters.Tests { - using FoundationDB.Client.Tests; - using NUnit.Framework; using System; using System.Diagnostics; using System.Linq; using System.Threading; using System.Threading.Tasks; + using FoundationDB.Client.Tests; + using NUnit.Framework; [TestFixture] [Obsolete] @@ -72,7 +72,7 @@ public async Task Bench_FdbCounter_Increment_Sequentially() var c = new FdbHighContentionCounter(db, location); - Console.WriteLine("Doing " + N + " inserts in one thread..."); + Log("Doing " + N + " inserts in one thread..."); var sw = Stopwatch.StartNew(); for (int i = 0; i < N; i++) @@ -81,7 +81,7 @@ public async Task Bench_FdbCounter_Increment_Sequentially() } sw.Stop(); - Console.WriteLine("> " + N + " completed in " + sw.Elapsed.TotalMilliseconds.ToString("N1") + " ms (" + (sw.Elapsed.TotalMilliseconds * 1000 / N).ToString("N0") + " µs/add)"); + Log("> " + N + " completed in " + sw.Elapsed.TotalMilliseconds.ToString("N1") + " ms (" + (sw.Elapsed.TotalMilliseconds * 1000 / N).ToString("N0") + " µs/add)"); #if DEBUG await DumpSubspace(db, location); @@ -108,7 +108,7 @@ public async Task Bench_FdbCounter_Increment_Concurrently() var c = new FdbHighContentionCounter(db, location); - Console.WriteLine("Doing " + W + " x " + B + " inserts in " + W + " threads..."); + Log("Doing " + W + " x " + B + " inserts in " + W + " threads..."); var signal = new TaskCompletionSource(); var done = new TaskCompletionSource(); @@ -128,7 +128,7 @@ public async Task Bench_FdbCounter_Increment_Concurrently() // wait await Task.WhenAll(workers); sw.Stop(); - Console.WriteLine("> " + N + " completed in " + sw.Elapsed.TotalMilliseconds.ToString("N1") + " ms (" + (sw.Elapsed.TotalMilliseconds * 1000 / B).ToString("N0") + " µs/add)"); + Log("> " + N + " completed in " + sw.Elapsed.TotalMilliseconds.ToString("N1") + " ms (" + (sw.Elapsed.TotalMilliseconds * 1000 / B).ToString("N0") + " µs/add)"); long n = await c.GetSnapshotAsync(this.Cancellation); if (n != N) diff --git a/FoundationDB.Tests/Layers/DirectoryFacts.cs b/FoundationDB.Tests/Layers/DirectoryFacts.cs index 1a2218329..6966cc54d 100644 --- a/FoundationDB.Tests/Layers/DirectoryFacts.cs +++ b/FoundationDB.Tests/Layers/DirectoryFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,19 +26,21 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion +// ReSharper disable AssignNullToNotNullAttribute #undef ENABLE_LOGGING namespace FoundationDB.Layers.Directories { - using FoundationDB.Client; - using FoundationDB.Client.Tests; - using FoundationDB.Filters.Logging; - using FoundationDB.Layers.Tuples; - using NUnit.Framework; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; + using Doxense.Collections.Tuples; + using FoundationDB.Client; + using FoundationDB.Client.Tests; + using FoundationDB.Filters.Logging; + using NUnit.Framework; + using NUnit.Framework.Constraints; [TestFixture] public class DirectoryFacts : FdbTest @@ -98,7 +100,7 @@ public async Task Test_Allocator() #if ENABLE_LOGGING foreach(var log in list) { - Console.WriteLine(log.GetTimingsReport(true)); + Log(log.GetTimingsReport(true)); } #endif } @@ -124,9 +126,9 @@ public async Task Test_CreateOrOpen_Simple() var directory = FdbDirectoryLayer.Create(location); Assert.That(directory.ContentSubspace, Is.Not.Null); - Assert.That(directory.ContentSubspace.Key, Is.EqualTo(location.Key)); + Assert.That(directory.ContentSubspace.GetPrefix(), Is.EqualTo(location.GetPrefix())); Assert.That(directory.NodeSubspace, Is.Not.Null); - Assert.That(directory.NodeSubspace.Key, Is.EqualTo(location.Key + Slice.FromByte(254))); + Assert.That(directory.NodeSubspace.GetPrefix(), Is.EqualTo(location.GetPrefix() + Slice.FromByte(254))); // first call should create a new subspace (with a random prefix) FdbDirectorySubspace foo; @@ -160,12 +162,12 @@ public async Task Test_CreateOrOpen_Simple() Assert.That(foo2.Path, Is.EqualTo(new[] { "Foo" })); Assert.That(foo2.Layer, Is.EqualTo(Slice.Empty)); Assert.That(foo2.DirectoryLayer, Is.SameAs(directory)); - Assert.That(foo2.Key, Is.EqualTo(foo.Key), "Second call to CreateOrOpen should return the same subspace"); + Assert.That(foo2.GetPrefix(), Is.EqualTo(foo.GetPrefix()), "Second call to CreateOrOpen should return the same subspace"); #if ENABLE_LOGGING foreach (var log in list) { - Console.WriteLine(log.GetTimingsReport(true)); + Log(log.GetTimingsReport(true)); } #endif } @@ -193,7 +195,7 @@ public async Task Test_CreateOrOpen_With_Layer() Assert.That(directory.ContentSubspace, Is.Not.Null); Assert.That(directory.ContentSubspace, Is.EqualTo(location)); Assert.That(directory.NodeSubspace, Is.Not.Null); - Assert.That(directory.NodeSubspace.Key, Is.EqualTo(location.Key + Slice.FromByte(254))); + Assert.That(directory.NodeSubspace.GetPrefix(), Is.EqualTo(location.GetPrefix() + Slice.FromByte(254))); // first call should create a new subspace (with a random prefix) var foo = await directory.CreateOrOpenAsync(logged, new[] { "Foo" }, Slice.FromString("AcmeLayer"), this.Cancellation); @@ -214,13 +216,13 @@ public async Task Test_CreateOrOpen_With_Layer() Assert.That(foo2.Path, Is.EqualTo(new[] { "Foo" })); Assert.That(foo2.Layer.ToUnicode(), Is.EqualTo("AcmeLayer")); Assert.That(foo2.DirectoryLayer, Is.SameAs(directory)); - Assert.That(foo2.Key, Is.EqualTo(foo.Key), "Second call to CreateOrOpen should return the same subspace"); + Assert.That(foo2.GetPrefix(), Is.EqualTo(foo.GetPrefix()), "Second call to CreateOrOpen should return the same subspace"); // opening it with wrong layer id should fail - Assert.Throws(async () => await directory.OpenAsync(logged, new[] { "Foo" }, Slice.FromString("OtherLayer"), this.Cancellation), "Opening with invalid layer id should fail"); + Assert.That(async () => await directory.OpenAsync(logged, new[] { "Foo" }, Slice.FromString("OtherLayer"), this.Cancellation), Throws.InstanceOf(), "Opening with invalid layer id should fail"); // opening without specifying a layer should disable the layer check - var foo3 = await directory.OpenAsync(logged, "Foo", layer: Slice.Nil, cancellationToken: this.Cancellation); + var foo3 = await directory.OpenAsync(logged, "Foo", layer: Slice.Nil, ct: this.Cancellation); Assert.That(foo3, Is.Not.Null); Assert.That(foo3.Layer.ToUnicode(), Is.EqualTo("AcmeLayer")); @@ -228,7 +230,7 @@ public async Task Test_CreateOrOpen_With_Layer() Assert.DoesNotThrow(() => foo3.CheckLayer(Slice.FromString("AcmeLayer")), "CheckLayer should not throw if the layer id is correct"); // CheckLayer with the incorrect value should fail - Assert.Throws(() => foo3.CheckLayer(Slice.FromString("OtherLayer")), "CheckLayer should throw if the layer id is not correct"); + Assert.That(() => foo3.CheckLayer(Slice.FromString("OtherLayer")), Throws.InstanceOf(), "CheckLayer should throw if the layer id is not correct"); // CheckLayer with empty string should do nothing foo3.CheckLayer(Slice.Empty); @@ -237,7 +239,7 @@ public async Task Test_CreateOrOpen_With_Layer() #if ENABLE_LOGGING foreach (var log in list) { - Console.WriteLine(log.GetTimingsReport(true)); + Log(log.GetTimingsReport(true)); } #endif } @@ -289,7 +291,7 @@ public async Task Test_CreateOrOpen_SubFolder() #if ENABLE_LOGGING foreach (var log in list) { - Console.WriteLine(log.GetTimingsReport(true)); + Log(log.GetTimingsReport(true)); } #endif } @@ -332,7 +334,7 @@ public async Task Test_List_SubFolders() subdirs = await directory.ListAsync(logged, new[] { "Foo", "Bar", "Baz" }, this.Cancellation); Assert.That(subdirs, Is.Not.Null); - Assert.That(subdirs.Count, Is.EqualTo(0)); + Assert.That(subdirs.Count, Is.Zero); subdirs = await directory.ListAsync(logged, new[] { "numbers" }, this.Cancellation); Assert.That(subdirs, Is.Not.Null); @@ -342,7 +344,7 @@ public async Task Test_List_SubFolders() #if ENABLE_LOGGING foreach (var log in list) { - Console.WriteLine(log.GetTimingsReport(true)); + Log(log.GetTimingsReport(true)); } #endif } @@ -423,24 +425,24 @@ public async Task Test_Move_Folder() Assert.That(renamed, Is.Not.Null); Assert.That(renamed.FullName, Is.EqualTo("Bar")); Assert.That(renamed.Path, Is.EqualTo(new[] { "Bar" })); - Assert.That(renamed.Key, Is.EqualTo(original.Key)); + Assert.That(renamed.GetPrefix(), Is.EqualTo(original.GetPrefix())); // opening the old path should fail - Assert.Throws(async () => await directory.OpenAsync(logged, "Foo", this.Cancellation)); + Assert.That(async () => await directory.OpenAsync(logged, "Foo", this.Cancellation), Throws.InstanceOf()); // opening the new path should succeed var folder = await directory.OpenAsync(logged, "Bar", this.Cancellation); Assert.That(folder, Is.Not.Null); Assert.That(folder.FullName, Is.EqualTo(renamed.FullName)); Assert.That(folder.Path, Is.EqualTo(renamed.Path)); - Assert.That(folder.Key, Is.EqualTo(renamed.Key)); + Assert.That(folder.GetPrefix(), Is.EqualTo(renamed.GetPrefix())); // moving the folder under itself should fail - Assert.Throws(async () => await folder.MoveToAsync(logged, new[] { "Bar", "Baz" }, this.Cancellation)); + Assert.That(async () => await folder.MoveToAsync(logged, new[] { "Bar", "Baz" }, this.Cancellation), Throws.InstanceOf()); #if ENABLE_LOGGING foreach (var log in list) { - Console.WriteLine(log.GetTimingsReport(true)); + Log(log.GetTimingsReport(true)); } #endif } @@ -478,7 +480,7 @@ public async Task Test_Remove_Folder() //TODO: call ExistsAsync(...) once it is implemented! // Removing it a second time should fail - Assert.Throws(async () => await directory.RemoveAsync(logged, path, this.Cancellation), "Removing a non-existent directory should fail"); + Assert.That(async () => await directory.RemoveAsync(logged, path, this.Cancellation), Throws.InstanceOf(), "Removing a non-existent directory should fail"); // TryRemoveAsync @@ -495,12 +497,12 @@ public async Task Test_Remove_Folder() // Corner Cases // removing the root folder is not allowed (too dangerous) - Assert.Throws(async () => await directory.RemoveAsync(logged, new string[0], this.Cancellation), "Attempting to remove the root directory should fail"); + Assert.That(async () => await directory.RemoveAsync(logged, new string[0], this.Cancellation), Throws.InstanceOf(), "Attempting to remove the root directory should fail"); #if ENABLE_LOGGING foreach (var log in list) { - Console.WriteLine(log.GetTimingsReport(true)); + Log(log.GetTimingsReport(true)); } #endif } @@ -522,7 +524,7 @@ public async Task Test_Can_Change_Layer_Of_Existing_Directory() var logged = db; #endif - var folder = await directory.CreateAsync(logged, "Test", layer: Slice.FromString("foo"), cancellationToken: this.Cancellation); + var folder = await directory.CreateAsync(logged, "Test", layer: Slice.FromString("foo"), ct: this.Cancellation); #if DEBUG await DumpSubspace(db, location); #endif @@ -537,20 +539,20 @@ public async Task Test_Can_Change_Layer_Of_Existing_Directory() Assert.That(folder2, Is.Not.Null); Assert.That(folder2.Layer.ToUnicode(), Is.EqualTo("bar")); Assert.That(folder2.FullName, Is.EqualTo("Test")); - Assert.That(folder2.Path, Is.EqualTo(FdbTuple.Create("Test"))); - Assert.That(folder2.Key, Is.EqualTo(folder.Key)); + Assert.That(folder2.Path, Is.EqualTo(new [] { "Test" })); + Assert.That(folder2.GetPrefix(), Is.EqualTo(folder.GetPrefix())); // opening the directory with the new layer should succeed - var folder3 = await directory.OpenAsync(logged, "Test", layer: Slice.FromString("bar"), cancellationToken: this.Cancellation); + var folder3 = await directory.OpenAsync(logged, "Test", layer: Slice.FromString("bar"), ct: this.Cancellation); Assert.That(folder3, Is.Not.Null); // opening the directory with the old layer should fail - Assert.Throws(async () => await directory.OpenAsync(logged, "Test", layer: Slice.FromString("foo"), cancellationToken: this.Cancellation)); + Assert.That(async () => await directory.OpenAsync(logged, "Test", layer: Slice.FromString("foo"), ct: this.Cancellation), Throws.InstanceOf()); #if ENABLE_LOGGING foreach (var log in list) { - Console.WriteLine(log.GetTimingsReport(true)); + Log(log.GetTimingsReport(true)); } #endif } @@ -565,41 +567,41 @@ public async Task Test_Directory_Partitions() await db.ClearRangeAsync(location, this.Cancellation); var directory = FdbDirectoryLayer.Create(location); - Console.WriteLine(directory); + Log(directory); - var partition = await directory.CreateAsync(db, "Foo", Slice.FromAscii("partition"), this.Cancellation); + var partition = await directory.CreateAsync(db, "Foo", Slice.FromStringAscii("partition"), this.Cancellation); // we can't get the partition key directory (because it's a root directory) so we need to cheat a little bit - var partitionKey = FdbSubspace.Copy(partition).Key; - Console.WriteLine(partition); + var partitionKey = partition.Copy().GetPrefix(); + Log(partition); Assert.That(partition, Is.InstanceOf()); - Assert.That(partition.Layer, Is.EqualTo(Slice.FromAscii("partition"))); + Assert.That(partition.Layer, Is.EqualTo(Slice.FromStringAscii("partition"))); Assert.That(partition.FullName, Is.EqualTo("Foo")); Assert.That(partition.Path, Is.EqualTo(new[] { "Foo" }), "Partition's path should be absolute"); Assert.That(partition.DirectoryLayer, Is.Not.SameAs(directory), "Partitions should have their own DL"); - Assert.That(partition.DirectoryLayer.ContentSubspace.Key, Is.EqualTo(partitionKey), "Partition's content should be under the partition's prefix"); - Assert.That(partition.DirectoryLayer.NodeSubspace.Key, Is.EqualTo(partitionKey + FdbKey.Directory), "Partition's nodes should be under the partition's prefix"); + Assert.That(partition.DirectoryLayer.ContentSubspace.GetPrefix(), Is.EqualTo(partitionKey), "Partition's content should be under the partition's prefix"); + Assert.That(partition.DirectoryLayer.NodeSubspace.GetPrefix(), Is.EqualTo(partitionKey + FdbKey.Directory), "Partition's nodes should be under the partition's prefix"); var bar = await partition.CreateAsync(db, "Bar", this.Cancellation); - Console.WriteLine(bar); + Log(bar); Assert.That(bar, Is.InstanceOf()); Assert.That(bar.Path, Is.EqualTo(new [] { "Foo", "Bar" }), "Path of directories under a partition should be absolute"); - Assert.That(bar.Key, Is.Not.EqualTo(partitionKey), "{0} should be located under {1}", bar, partition); - Assert.That(bar.Key.StartsWith(partitionKey), Is.True, "{0} should be located under {1}", bar, partition); + Assert.That(bar.GetPrefix(), Is.Not.EqualTo(partitionKey), "{0} should be located under {1}", bar, partition); + Assert.That(bar.GetPrefix().StartsWith(partitionKey), Is.True, "{0} should be located under {1}", bar, partition); var baz = await partition.CreateAsync(db, "Baz", this.Cancellation); - Console.WriteLine(baz); + Log(baz); Assert.That(baz, Is.InstanceOf()); Assert.That(baz.FullName, Is.EqualTo("Foo/Baz")); Assert.That(baz.Path, Is.EqualTo(new[] { "Foo", "Baz" }), "Path of directories under a partition should be absolute"); - Assert.That(baz.Key, Is.Not.EqualTo(partitionKey), "{0} should be located under {1}", baz, partition); - Assert.That(baz.Key.StartsWith(partitionKey), Is.True, "{0} should be located under {1}", baz, partition); + Assert.That(baz.GetPrefix(), Is.Not.EqualTo(partitionKey), "{0} should be located under {1}", baz, partition); + Assert.That(baz.GetPrefix().StartsWith(partitionKey), Is.True, "{0} should be located under {1}", baz, partition); // Rename 'Bar' to 'BarBar' var bar2 = await bar.MoveToAsync(db, new[] { "Foo", "BarBar" }, this.Cancellation); - Console.WriteLine(bar2); + Log(bar2); Assert.That(bar2, Is.InstanceOf()); Assert.That(bar2, Is.Not.SameAs(bar)); - Assert.That(bar2.Key, Is.EqualTo(bar.Key)); + Assert.That(bar2.GetPrefix(), Is.EqualTo(bar.GetPrefix())); Assert.That(bar2.FullName, Is.EqualTo("Foo/BarBar")); Assert.That(bar2.Path, Is.EqualTo(new[] { "Foo", "BarBar" })); Assert.That(bar2.DirectoryLayer, Is.SameAs(bar.DirectoryLayer)); @@ -615,14 +617,14 @@ public async Task Test_Directory_Cannot_Move_To_Another_Partition() await db.ClearRangeAsync(location, this.Cancellation); var directory = FdbDirectoryLayer.Create(location); - Console.WriteLine(directory); + Log(directory); - var foo = await directory.CreateAsync(db, "Foo", Slice.FromAscii("partition"), this.Cancellation); - Console.WriteLine(foo); + var foo = await directory.CreateAsync(db, "Foo", Slice.FromStringAscii("partition"), this.Cancellation); + Log(foo); // create a 'Bar' under the 'Foo' partition var bar = await foo.CreateAsync(db, "Bar", this.Cancellation); - Console.WriteLine(bar); + Log(bar); Assert.That(bar.FullName, Is.EqualTo("Foo/Bar")); Assert.That(bar.Path, Is.EqualTo(new string[] { "Foo", "Bar" })); Assert.That(bar.DirectoryLayer, Is.Not.SameAs(directory)); @@ -644,14 +646,14 @@ public async Task Test_Directory_Cannot_Move_To_A_Sub_Partition() await db.ClearRangeAsync(location, this.Cancellation); var directory = FdbDirectoryLayer.Create(location); - Console.WriteLine(directory); + Log(directory); - var outer = await directory.CreateAsync(db, "Outer", Slice.FromAscii("partition"), this.Cancellation); - Console.WriteLine(outer); + var outer = await directory.CreateAsync(db, "Outer", Slice.FromStringAscii("partition"), this.Cancellation); + Log(outer); // create a 'Inner' subpartition under the 'Outer' partition var inner = await outer.CreateAsync(db, "Inner", Slice.FromString("partition"), this.Cancellation); - Console.WriteLine(inner); + Log(inner); Assert.That(inner.FullName, Is.EqualTo("Outer/Inner")); Assert.That(inner.Path, Is.EqualTo(new string[] { "Outer", "Inner" })); Assert.That(inner.DirectoryLayer, Is.Not.SameAs(directory)); @@ -679,14 +681,14 @@ public async Task Test_Directory_Cannot_Move_To_A_Sub_Partition() var foo2 = await directory.MoveAsync(db, new[] { "Outer", "Foo" }, new[] { "Outer", "SubFolder", "Foo" }, this.Cancellation); Assert.That(foo2.FullName, Is.EqualTo("Outer/SubFolder/Foo")); Assert.That(foo2.Path, Is.EqualTo(new[] { "Outer", "SubFolder", "Foo" })); - Assert.That(foo2.Key, Is.EqualTo(foo.Key)); + Assert.That(foo2.GetPrefix(), Is.EqualTo(foo.GetPrefix())); // Moving 'Bar' inside the Inner partition itself should work await directory.CreateAsync(db, new[] { "Outer", "Inner", "SubFolder" }, this.Cancellation); // parent of destination folder must already exist when moving... var bar2 = await directory.MoveAsync(db, new[] { "Outer", "Inner", "Bar" }, new[] { "Outer", "Inner", "SubFolder", "Bar" }, this.Cancellation); Assert.That(bar2.FullName, Is.EqualTo("Outer/Inner/SubFolder/Bar")); Assert.That(bar2.Path, Is.EqualTo(new[] { "Outer", "Inner", "SubFolder", "Bar" })); - Assert.That(bar2.Key, Is.EqualTo(bar.Key)); + Assert.That(bar2.GetPrefix(), Is.EqualTo(bar.GetPrefix())); } } @@ -729,7 +731,7 @@ public async Task Test_Renaming_Partition_Uses_Parent_DirectoryLayer() // should have kept the same prefix //note: we need to cheat to get the key of the partition - Assert.That(FdbSubspace.Copy(bar).Key, Is.EqualTo(FdbSubspace.Copy(foo).Key)); + Assert.That(bar.Copy().GetPrefix(), Is.EqualTo(foo.Copy().GetPrefix())); // verify list again folders = await directory.ListAsync(tr); @@ -798,36 +800,36 @@ public async Task Test_Directory_Methods_Should_Fail_With_Empty_Paths() var directory = FdbDirectoryLayer.Create(location); // CreateOrOpen - Assert.Throws(async () => await directory.CreateOrOpenAsync(db, default(string[]), this.Cancellation)); - Assert.Throws(async () => await directory.CreateOrOpenAsync(db, new string[0], this.Cancellation)); - Assert.Throws(async () => await directory.CreateOrOpenAsync(db, default(string), this.Cancellation)); + Assert.That(async () => await directory.CreateOrOpenAsync(db, default(string[]), this.Cancellation), Throws.InstanceOf()); + Assert.That(async () => await directory.CreateOrOpenAsync(db, new string[0], this.Cancellation), Throws.InstanceOf()); + Assert.That(async () => await directory.CreateOrOpenAsync(db, default(string), this.Cancellation), Throws.InstanceOf()); // Create - Assert.Throws(async () => await directory.CreateAsync(db, default(string[]), this.Cancellation)); - Assert.Throws(async () => await directory.CreateAsync(db, new string[0], this.Cancellation)); - Assert.Throws(async () => await directory.CreateAsync(db, default(string), this.Cancellation)); + Assert.That(async () => await directory.CreateAsync(db, default(string[]), this.Cancellation), Throws.InstanceOf()); + Assert.That(async () => await directory.CreateAsync(db, new string[0], this.Cancellation), Throws.InstanceOf()); + Assert.That(async () => await directory.CreateAsync(db, default(string), this.Cancellation), Throws.InstanceOf()); // Open - Assert.Throws(async () => await directory.OpenAsync(db, default(string[]), this.Cancellation)); - Assert.Throws(async () => await directory.OpenAsync(db, new string[0], this.Cancellation)); - Assert.Throws(async () => await directory.OpenAsync(db, default(string), this.Cancellation)); + Assert.That(async () => await directory.OpenAsync(db, default(string[]), this.Cancellation), Throws.InstanceOf()); + Assert.That(async () => await directory.OpenAsync(db, new string[0], this.Cancellation), Throws.InstanceOf()); + Assert.That(async () => await directory.OpenAsync(db, default(string), this.Cancellation), Throws.InstanceOf()); // Move - Assert.Throws(async () => await directory.MoveAsync(db, default(string[]), new[] { "foo" }, this.Cancellation)); - Assert.Throws(async () => await directory.MoveAsync(db, new[] { "foo" }, default(string[]), this.Cancellation)); - Assert.Throws(async () => await directory.MoveAsync(db, new string[0], new[] { "foo" }, this.Cancellation)); - Assert.Throws(async () => await directory.MoveAsync(db, new[] { "foo" }, new string[0], this.Cancellation)); + Assert.That(async () => await directory.MoveAsync(db, default(string[]), new[] { "foo" }, this.Cancellation), Throws.InstanceOf()); + Assert.That(async () => await directory.MoveAsync(db, new[] { "foo" }, default(string[]), this.Cancellation), Throws.InstanceOf()); + Assert.That(async () => await directory.MoveAsync(db, new string[0], new[] { "foo" }, this.Cancellation), Throws.InstanceOf()); + Assert.That(async () => await directory.MoveAsync(db, new[] { "foo" }, new string[0], this.Cancellation), Throws.InstanceOf()); // Remove - Assert.Throws(async () => await directory.RemoveAsync(db, default(string[]), this.Cancellation)); - Assert.Throws(async () => await directory.RemoveAsync(db, new string[0], this.Cancellation)); - Assert.Throws(async () => await directory.RemoveAsync(db, new string[] { "Foo", " ", "Bar" }, this.Cancellation)); - Assert.Throws(async () => await directory.RemoveAsync(db, default(string), this.Cancellation)); + Assert.That(async () => await directory.RemoveAsync(db, default(string[]), this.Cancellation), Throws.InstanceOf()); + Assert.That(async () => await directory.RemoveAsync(db, new string[0], this.Cancellation), Throws.InstanceOf()); + Assert.That(async () => await directory.RemoveAsync(db, new string[] { "Foo", " ", "Bar" }, this.Cancellation), Throws.InstanceOf()); + Assert.That(async () => await directory.RemoveAsync(db, default(string), this.Cancellation), Throws.InstanceOf()); // List - Assert.Throws(async () => await directory.ListAsync(db, default(string[]), this.Cancellation)); - Assert.Throws(async () => await directory.ListAsync(db, new string[] { "Foo", "", "Bar" }, this.Cancellation)); - Assert.Throws(async () => await directory.ListAsync(db, default(string), this.Cancellation)); + Assert.That(async () => await directory.ListAsync(db, default(string[]), this.Cancellation), Throws.InstanceOf()); + Assert.That(async () => await directory.ListAsync(db, new string[] { "Foo", "", "Bar" }, this.Cancellation), Throws.InstanceOf()); + Assert.That(async () => await directory.ListAsync(db, default(string), this.Cancellation), Throws.InstanceOf()); } } @@ -844,106 +846,81 @@ public async Task Test_Directory_Partitions_Should_Disallow_Creation_Of_Direct_K await db.ClearRangeAsync(location, this.Cancellation); var directory = FdbDirectoryLayer.Create(location); - Console.WriteLine(directory); + Log(directory); - var partition = await directory.CreateAsync(db, "Foo", Slice.FromAscii("partition"), this.Cancellation); + var partition = await directory.CreateAsync(db, "Foo", Slice.FromStringAscii("partition"), this.Cancellation); //note: if we want a testable key INSIDE the partition, we have to get it from a sub-directory var subdir = await partition.CreateOrOpenAsync(db, "Bar", this.Cancellation); - var barKey = subdir.Key; + var barKey = subdir.GetPrefix(); // the constraint will always be the same for all the checks - Action shouldFail = (del) => + void ShouldFail(ActualValueDelegate del) { - Assert.That(del, Throws.InstanceOf().With.Message.StringContaining("root of a directory partition")); - }; - Action shouldPass = (del) => + Assert.That(del, Throws.InstanceOf().With.Message.Contains("root of a directory partition")); + } + + void ShouldPass(ActualValueDelegate del) { Assert.That(del, Throws.Nothing); - }; + } // === PASS === // these methods are allowed to succeed on directory partitions, because we need them for the rest to work - shouldPass(() => { var _ = FdbSubspace.Copy(partition).Key; }); // EXCEPTION: we need this to work, because that's the only way that the unit tests above can see the partition key! - shouldPass(() => partition.ToString()); // EXCEPTION: this should never fail! - shouldPass(() => partition.DumpKey(barKey)); // EXCEPTION: this should always work, because this can be used for debugging and logging... - shouldPass(() => partition.BoundCheck(barKey, true)); // EXCEPTION: needs to work because it is used by GetRange() and GetKey() + ShouldPass(() => partition.Copy().GetPrefix()); // EXCEPTION: we need this to work, because that's the only way that the unit tests above can see the partition key! + ShouldPass(() => partition.ToString()); // EXCEPTION: this should never fail! + ShouldPass(() => partition.DumpKey(barKey)); // EXCEPTION: this should always work, because this can be used for debugging and logging... + ShouldPass(() => partition.BoundCheck(barKey, true)); // EXCEPTION: needs to work because it is used by GetRange() and GetKey() // === FAIL ==== // Key - shouldFail(() => { var _ = partition.Key; }); - - // ToFoundationDBKey - shouldFail(() => ((IFdbKey)partition).ToFoundationDbKey()); + ShouldFail(() => partition.GetPrefix()); // Contains - shouldFail(() => partition.Contains(subdir)); - shouldFail(() => partition.Contains(barKey)); + ShouldFail(() => partition.Contains(barKey)); // Extract / ExtractAndCheck / BoundCheck - shouldFail(() => partition.ExtractKey(barKey, boundCheck: false)); - shouldFail(() => partition.ExtractKey(barKey, boundCheck: true)); - shouldFail(() => partition.ExtractKeys(new[] { barKey, barKey + FdbKey.MinValue })); + ShouldFail(() => partition.ExtractKey(barKey, boundCheck: false)); + ShouldFail(() => partition.ExtractKey(barKey, boundCheck: true)); // Partition - shouldFail(() => partition.Partition.ByKey(123)); - shouldFail(() => partition.Partition.ByKey(123, "hello")); - shouldFail(() => partition.Partition.ByKey(123, "hello", false)); - shouldFail(() => partition.Partition.ByKey(123, "hello", false, "world")); + ShouldFail(() => partition.Partition.ByKey(123)); + ShouldFail(() => partition.Partition.ByKey(123, "hello")); + ShouldFail(() => partition.Partition.ByKey(123, "hello", false)); + ShouldFail(() => partition.Partition.ByKey(123, "hello", false, "world")); // Keys - shouldFail(() => partition.ConcatKey(Slice.FromString("hello"))); - shouldFail(() => partition.ConcatKey(location.Key)); - shouldFail(() => partition.ConcatKeys(new[] { Slice.FromString("hello"), Slice.FromString("world"), Slice.FromString("!") })); + ShouldFail(() => partition[Slice.FromString("hello")]); + ShouldFail(() => partition[location.GetPrefix()]); + ShouldFail(() => partition[STuple.Create("hello", 123)]); - shouldFail(() => { var _ = partition[Slice.FromString("hello")]; }); - shouldFail(() => { var _ = partition[location.Key]; }); - shouldFail(() => { var _ = partition[location]; }); - - shouldFail(() => partition.ToRange()); - shouldFail(() => partition.ToRange(Slice.FromString("hello"))); - shouldFail(() => partition.ToRange(FdbTuple.EncodeKey("hello"))); - shouldFail(() => partition.ToRange(location)); + ShouldFail(() => partition.ToRange()); + ShouldFail(() => partition.ToRange(Slice.FromString("hello"))); + ShouldFail(() => partition.ToRange(TuPack.EncodeKey("hello"))); // Tuples - shouldFail(() => partition.Keys.Encode(123)); - shouldFail(() => partition.Keys.Encode(123, "hello")); - shouldFail(() => partition.Keys.Encode(123, "hello", false)); - shouldFail(() => partition.Keys.Encode(123, "hello", false, "world")); - shouldFail(() => partition.Keys.Encode(123)); - - shouldFail(() => partition.Keys.EncodeMany(new[] { 123, 456, 789 })); - shouldFail(() => partition.Keys.EncodeMany((IEnumerable)new[] { 123, 456, 789 })); - shouldFail(() => partition.Keys.EncodeMany(new object[] { 123, "hello", true })); - shouldFail(() => partition.Keys.EncodeMany((IEnumerable)new object[] { 123, "hello", true })); - - shouldFail(() => partition.Keys.Unpack(barKey)); - shouldFail(() => partition.Keys.UnpackMany(new[] { barKey, barKey + FdbTuple.EncodeKey(123) })); - shouldFail(() => partition.Keys.Decode(barKey)); - shouldFail(() => partition.Keys.DecodeMany(new[] { barKey, barKey })); - shouldFail(() => partition.Keys.DecodeLast(barKey)); - shouldFail(() => partition.Keys.DecodeLastMany(new[] { barKey, barKey + FdbTuple.EncodeKey(123) })); - shouldFail(() => partition.Keys.DecodeFirst(barKey)); - shouldFail(() => partition.Keys.DecodeFirstMany(new[] { barKey, barKey + FdbTuple.EncodeKey(123) })); - - //FIXME: need to re-enable this code! -#if REFACTORING_IN_PROGRESS - shouldFail(() => partition.Keys.ToTuple()); - - shouldFail(() => partition.Keys.Append(123)); - shouldFail(() => partition.Keys.Append(123, "hello")); - shouldFail(() => partition.Keys.Append(123, "hello", false)); - shouldFail(() => partition.Keys.Append(123, "hello", false, "world")); - shouldFail(() => partition.Keys.Concat(FdbTuple.Create(123, "hello", false, "world"))); - shouldFail(() => partition.Keys.Append(new object[] { 123, "hello", false, "world" })); -#endif + ShouldFail(() => partition.Keys.Encode(123)); + ShouldFail(() => partition.Keys.Encode(123, "hello")); + ShouldFail(() => partition.Keys.Encode(123, "hello", false)); + ShouldFail(() => partition.Keys.Encode(123, "hello", false, "world")); + ShouldFail(() => partition.Keys.Encode(123)); + + ShouldFail(() => partition.Keys.EncodeMany(new[] { 123, 456, 789 })); + ShouldFail(() => partition.Keys.EncodeMany((IEnumerable)new[] { 123, 456, 789 })); + ShouldFail(() => partition.Keys.EncodeMany(new object[] { 123, "hello", true })); + ShouldFail(() => partition.Keys.EncodeMany((IEnumerable)new object[] { 123, "hello", true })); + + ShouldFail(() => partition.Keys.Unpack(barKey)); + ShouldFail(() => partition.Keys.Decode(barKey)); + ShouldFail(() => partition.Keys.DecodeLast(barKey)); + ShouldFail(() => partition.Keys.DecodeFirst(barKey)); - shouldFail(() => partition.Keys.ToRange()); - shouldFail(() => partition.ToRange(Slice.FromString("hello"))); - shouldFail(() => partition.Keys.ToRange(FdbTuple.Create("hello"))); + ShouldFail(() => partition.Keys.ToRange()); + ShouldFail(() => partition.ToRange(Slice.FromString("hello"))); + ShouldFail(() => partition.Keys.ToRange(STuple.Create("hello"))); } } @@ -957,12 +934,12 @@ public async Task Test_Concurrent_Directory_Creation() await db.ClearRangeAsync(location, this.Cancellation); var directory = FdbDirectoryLayer.Create(location); - Console.WriteLine(directory); + Log(directory); //to prevent any side effect from first time initialization of the directory layer, already create one dummy folder await directory.CreateAsync(db, "Zero", this.Cancellation); - var logdb = db.Logged((tr) => Console.WriteLine(tr.Log.GetTimingsReport(true))); + var logdb = db.Logged((tr) => Log(tr.Log.GetTimingsReport(true))); var f = FdbDirectoryLayer.AnnotateTransactions; try @@ -979,19 +956,19 @@ await Task.WhenAll( ); var first = await directory.CreateAsync(tr1, new[] { "First" }, Slice.Nil); - tr1.Set(first.Key, Slice.FromString("This belongs to the first directory")); + tr1.Set(first.GetPrefix(), Slice.FromString("This belongs to the first directory")); var second = await directory.CreateAsync(tr2, new[] { "Second" }, Slice.Nil); - tr2.Set(second.Key, Slice.FromString("This belongs to the second directory")); + tr2.Set(second.GetPrefix(), Slice.FromString("This belongs to the second directory")); - Console.WriteLine("Committing T1..."); + Log("Committing T1..."); await tr1.CommitAsync(); - Console.WriteLine("T1 committed"); + Log("T1 committed"); tr1.Dispose(); // force T1 to be dumped immediately - Console.WriteLine("Committing T2..."); + Log("Committing T2..."); await tr2.CommitAsync(); - Console.WriteLine("T2 committed"); + Log("T2 committed"); } } finally @@ -1016,12 +993,12 @@ public async Task Test_Concurrent_Directory_Creation_With_Custom_Prefix() // ie: regular prefix would be ("DL", 123) and our custom prefixes will be ("DL", "abc") var directory = FdbDirectoryLayer.Create(location); - Console.WriteLine(directory); + Log(directory); //to prevent any side effect from first time initialization of the directory layer, already create one dummy folder await directory.CreateAsync(db, "Zero", this.Cancellation); - var logdb = db.Logged((tr) => Console.WriteLine(tr.Log.GetTimingsReport(true))); + var logdb = db.Logged((tr) => Log(tr.Log.GetTimingsReport(true))); var f = FdbDirectoryLayer.AnnotateTransactions; try @@ -1038,17 +1015,17 @@ await Task.WhenAll( ); var first = await directory.RegisterAsync(tr1, new[] { "First" }, Slice.Nil, location.Keys.Encode("abc")); - tr1.Set(first.Key, Slice.FromString("This belongs to the first directory")); + tr1.Set(first.GetPrefix(), Slice.FromString("This belongs to the first directory")); var second = await directory.RegisterAsync(tr2, new[] { "Second" }, Slice.Nil, location.Keys.Encode("def")); - tr2.Set(second.Key, Slice.FromString("This belongs to the second directory")); + tr2.Set(second.GetPrefix(), Slice.FromString("This belongs to the second directory")); - Console.WriteLine("Committing T1..."); + Log("Committing T1..."); await tr1.CommitAsync(); - Console.WriteLine("T1 committed"); + Log("T1 committed"); tr1.Dispose(); // force T1 to be dumped immediately - Console.WriteLine("Committing T2..."); + Log("Committing T2..."); try { await tr2.CommitAsync(); @@ -1063,7 +1040,7 @@ await Task.WhenAll( } throw; } - Console.WriteLine("T2 committed"); + Log("T2 committed"); } } finally diff --git a/FoundationDB.Tests/Layers/DocumentCollectionFacts.cs b/FoundationDB.Tests/Layers/DocumentCollectionFacts.cs index 43505b7af..ef727febe 100644 --- a/FoundationDB.Tests/Layers/DocumentCollectionFacts.cs +++ b/FoundationDB.Tests/Layers/DocumentCollectionFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,12 +28,12 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Documents.Tests { + using System; + using System.Threading.Tasks; using FoundationDB.Client.Tests; using FoundationDB.Types.Json; using FoundationDB.Types.ProtocolBuffers; using NUnit.Framework; - using System; - using System.Threading.Tasks; [TestFixture] public class DocumentCollectionFacts : FdbTest diff --git a/FoundationDB.Tests/Layers/IndexingFacts.cs b/FoundationDB.Tests/Layers/IndexingFacts.cs index 9522cecf7..9ab392e84 100644 --- a/FoundationDB.Tests/Layers/IndexingFacts.cs +++ b/FoundationDB.Tests/Layers/IndexingFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,16 +28,15 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Tables.Tests { - using FoundationDB.Client; - using FoundationDB.Client.Tests; - using FoundationDB.Layers.Indexing; - using FoundationDB.Layers.Directories; - using FoundationDB.Linq; - using NUnit.Framework; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using System; + using Doxense.Linq; + using FoundationDB.Client; + using FoundationDB.Client.Tests; + using FoundationDB.Layers.Indexing; + using NUnit.Framework; [TestFixture] public class IndexingFacts : FdbTest @@ -112,10 +111,10 @@ await db.WriteAsync((tr) => Assert.That(greens, Is.EqualTo(new int[] { 2, 4 })); var blues = await index.LookupAsync(tr, "blue"); - Assert.That(blues.Count, Is.EqualTo(0)); + Assert.That(blues.Count, Is.Zero); var yellows = await index.LookupAsync(tr, "yellow"); - Assert.That(yellows.Count, Is.EqualTo(0)); + Assert.That(yellows.Count, Is.Zero); var indigos = await index.LookupAsync(tr, "indigo"); Assert.That(indigos, Is.EqualTo(new int[] { 3 })); @@ -171,7 +170,7 @@ await db.WriteAsync((tr) => using (var tr = db.BeginTransaction(this.Cancellation)) { var superHeroes = await indexSuperHero.LookupAsync(tr, value: true); - Console.WriteLine("SuperHeroes: " + string.Join(", ", superHeroes)); + Log("SuperHeroes: " + string.Join(", ", superHeroes)); Assert.That(superHeroes, Is.EqualTo(characters.Where(c => c.HasSuperPowers).Select(c => c.Id).ToList())); } @@ -179,11 +178,11 @@ await db.WriteAsync((tr) => using (var tr = db.BeginTransaction(this.Cancellation)) { var dc = await indexBrand.LookupAsync(tr, value: "DC"); - Console.WriteLine("DC: " + string.Join(", ", dc)); + Log("DC: " + string.Join(", ", dc)); Assert.That(dc, Is.EqualTo(characters.Where(c => c.Brand == "DC").Select(c => c.Id).ToList())); var marvel = await indexBrand.LookupAsync(tr, value: "Marvel"); - Console.WriteLine("Marvel: " + string.Join(", ", dc)); + Log("Marvel: " + string.Join(", ", dc)); Assert.That(marvel, Is.EqualTo(characters.Where(c => c.Brand == "Marvel").Select(c => c.Id).ToList())); } diff --git a/FoundationDB.Tests/Layers/MapFacts.cs b/FoundationDB.Tests/Layers/MapFacts.cs index bde98dbf7..e1ef92a41 100644 --- a/FoundationDB.Tests/Layers/MapFacts.cs +++ b/FoundationDB.Tests/Layers/MapFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,14 +28,15 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Collections.Tests { - using FoundationDB.Client; - using FoundationDB.Client.Tests; - using FoundationDB.Layers.Tuples; - using NUnit.Framework; using System; using System.Collections.Generic; using System.Net; using System.Threading.Tasks; + using Doxense.Collections.Tuples; + using Doxense.Serialization.Encoders; + using FoundationDB.Client; + using FoundationDB.Client.Tests; + using NUnit.Framework; [TestFixture] public class MapFacts : FdbTest @@ -60,7 +61,7 @@ public async Task Test_FdbMap_Read_Write_Delete() var value = await map.TryGetAsync(tr, "hello"); Assert.That(value.HasValue, Is.False); - Assert.That(value.GetValueOrDefault(), Is.Null); + Assert.That(value.Value, Is.Null); } // write value @@ -168,11 +169,11 @@ public async Task Test_FdbMap_With_Custom_Key_Encoder() // Encode IPEndPoint as the (IP, Port,) encoded with the Tuple codec // note: there is a much simpler way or creating composite keys, this is just a quick and dirty test! var keyEncoder = KeyValueEncoders.Bind( - (ipe) => ipe == null ? Slice.Empty : FdbTuple.EncodeKey(ipe.Address, ipe.Port), + (ipe) => ipe == null ? Slice.Empty : TuPack.EncodeKey(ipe.Address, ipe.Port), (packed) => { if (packed.IsNullOrEmpty) return default(IPEndPoint); - var t = FdbTuple.Unpack(packed); + var t = TuPack.Unpack(packed); return new IPEndPoint(t.Get(0), t.Get(1)); } ); @@ -188,7 +189,7 @@ public async Task Test_FdbMap_With_Custom_Key_Encoder() { var location = await GetCleanDirectory(db, "Collections", "Maps"); - var map = new FdbMap("Firewall", location.Partition.ByKey("Hosts"), keyEncoder, KeyValueEncoders.Values.StringEncoder); + var map = new FdbMap("Firewall", location.Partition.ByKey("Hosts").UsingEncoder(keyEncoder), KeyValueEncoders.Values.StringEncoder); // import all the rules await db.WriteAsync((tr) => diff --git a/FoundationDB.Tests/Layers/MultiMapFacts.cs b/FoundationDB.Tests/Layers/MultiMapFacts.cs index af11ff7ee..c46f1cfc8 100644 --- a/FoundationDB.Tests/Layers/MultiMapFacts.cs +++ b/FoundationDB.Tests/Layers/MultiMapFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,14 +28,12 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Collections.Tests { + using System; + using System.Threading.Tasks; + using Doxense.Serialization.Encoders; using FoundationDB.Client; using FoundationDB.Client.Tests; - using FoundationDB.Layers.Tuples; using NUnit.Framework; - using System; - using System.Collections.Generic; - using System.Net; - using System.Threading.Tasks; [TestFixture] public class MultiMapFacts : FdbTest @@ -89,7 +87,8 @@ public async Task Test_FdbMultiMap_Read_Write_Delete() // directly read the value, behind the table's back using (var tr = db.BeginTransaction(this.Cancellation)) { - var value = await tr.GetAsync(map.Subspace[FdbTuple.Create("hello", "world")]); + var loc = map.Subspace.AsDynamic(); + var value = await tr.GetAsync(loc.Keys.Encode("hello", "world")); Assert.That(value, Is.Not.EqualTo(Slice.Nil)); Assert.That(value.ToInt64(), Is.EqualTo(1)); } @@ -112,7 +111,8 @@ public async Task Test_FdbMultiMap_Read_Write_Delete() Assert.That(count, Is.Null); // also check directly - var data = await tr.GetAsync(map.Subspace[FdbTuple.Create("hello", "world")]); + var loc = map.Subspace.AsDynamic(); + var data = await tr.GetAsync(loc.Keys.Encode("hello", "world")); Assert.That(data, Is.EqualTo(Slice.Nil)); } diff --git a/FoundationDB.Tests/Layers/QueuesFacts.cs b/FoundationDB.Tests/Layers/QueuesFacts.cs index ab39ff736..b88020299 100644 --- a/FoundationDB.Tests/Layers/QueuesFacts.cs +++ b/FoundationDB.Tests/Layers/QueuesFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -30,17 +30,16 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Collections.Tests { - using FoundationDB.Async; - using FoundationDB.Client; - using FoundationDB.Client.Tests; - using FoundationDB.Filters.Logging; - using NUnit.Framework; using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Threading; using System.Threading.Tasks; + using Doxense.Async; + using FoundationDB.Client; + using FoundationDB.Client.Tests; + using NUnit.Framework; [TestFixture] public class QueuesFacts : FdbTest @@ -56,12 +55,12 @@ public async Task Test_Queue_Fast() var queue = new FdbQueue(location, highContention: false); - Console.WriteLine("Clear Queue"); + Log("Clear Queue"); await db.WriteAsync((tr) => queue.Clear(tr), this.Cancellation); - Console.WriteLine("Empty? " + await db.ReadAsync((tr) => queue.EmptyAsync(tr), this.Cancellation)); + Log("Empty? " + await db.ReadAsync((tr) => queue.EmptyAsync(tr), this.Cancellation)); - Console.WriteLine("Push 10, 8, 6"); + Log("Push 10, 8, 6"); await db.ReadWriteAsync((tr) => queue.PushAsync(tr, 10), this.Cancellation); await db.ReadWriteAsync((tr) => queue.PushAsync(tr, 8), this.Cancellation); await db.ReadWriteAsync((tr) => queue.PushAsync(tr, 6), this.Cancellation); @@ -72,51 +71,55 @@ public async Task Test_Queue_Fast() // Empty? bool empty = await db.ReadAsync((tr) => queue.EmptyAsync(tr), this.Cancellation); - Console.WriteLine("Empty? " + empty); + Log("Empty? " + empty); Assert.That(empty, Is.False); - Optional item = await queue.PopAsync(db, this.Cancellation); - Console.WriteLine("Pop item: " + item); - Assert.That((int)item, Is.EqualTo(10)); + var item = await queue.PopAsync(db, this.Cancellation); + Log($"Pop item: {item}"); + Assert.That(item.HasValue, Is.True); + Assert.That(item.Value, Is.EqualTo(10)); item = await db.ReadWriteAsync((tr) => queue.PeekAsync(tr), this.Cancellation); - Console.WriteLine("Next item: " + item); - Assert.That((int)item, Is.EqualTo(8)); + Log($"Next item: {item}"); + Assert.That(item.HasValue, Is.True); + Assert.That(item.Value, Is.EqualTo(8)); #if DEBUG await DumpSubspace(db, location); #endif item = await queue.PopAsync(db, this.Cancellation); - Console.WriteLine("Pop item: " + item); - Assert.That((int)item, Is.EqualTo(8)); + Log($"Pop item: {item}"); + Assert.That(item.HasValue, Is.True); + Assert.That(item.Value, Is.EqualTo(8)); #if DEBUG await DumpSubspace(db, location); #endif item = await queue.PopAsync(db, this.Cancellation); - Console.WriteLine("Pop item: " + item); - Assert.That((int)item, Is.EqualTo(6)); + Log($"Pop item: {item}"); + Assert.That(item.HasValue, Is.True); + Assert.That(item.Value, Is.EqualTo(6)); #if DEBUG await DumpSubspace(db, location); #endif empty = await db.ReadAsync((tr) => queue.EmptyAsync(tr), this.Cancellation); - Console.WriteLine("Empty? " + empty); + Log("Empty? " + empty); Assert.That(empty, Is.True); - Console.WriteLine("Push 5"); + Log("Push 5"); await db.ReadWriteAsync((tr) => queue.PushAsync(tr, 5), this.Cancellation); #if DEBUG await DumpSubspace(db, location); #endif - Console.WriteLine("Clear Queue"); + Log("Clear Queue"); await db.WriteAsync((tr) => queue.Clear(tr), this.Cancellation); #if DEBUG await DumpSubspace(db, location); #endif empty = await db.ReadAsync((tr) => queue.EmptyAsync(tr), this.Cancellation); - Console.WriteLine("Empty? " + empty); + Log("Empty? " + empty); Assert.That(empty, Is.True); } } @@ -150,9 +153,9 @@ public async Task Test_Single_Client() } - private static async Task RunMultiClientTest(IFdbDatabase db, FdbSubspace location, bool highContention, string desc, int K, int NUM, CancellationToken ct) + private static async Task RunMultiClientTest(IFdbDatabase db, KeySubspace location, bool highContention, string desc, int K, int NUM, CancellationToken ct) { - Console.WriteLine("Starting {0} test with {1} threads and {2} iterations", desc, K, NUM); + Log("Starting {0} test with {1} threads and {2} iterations", desc, K, NUM); var queue = new FdbQueue(location, highContention); await db.WriteAsync((tr) => queue.Clear(tr), ct); @@ -234,8 +237,8 @@ private static async Task RunMultiClientTest(IFdbDatabase db, FdbSubspace locati } sw.Stop(); - Console.WriteLine("> Finished {0} test in {1} seconds", desc, sw.Elapsed.TotalSeconds); - Console.WriteLine("> Pushed {0}, Popped {1} and Stalled {2}", pushCount, popCount, stalls); + Log("> Finished {0} test in {1} seconds", desc, sw.Elapsed.TotalSeconds); + Log("> Pushed {0}, Popped {1} and Stalled {2}", pushCount, popCount, stalls); var pushedItems = pushTreads.SelectMany(t => t.Result).ToList(); var poppedItems = popThreads.SelectMany(t => t.Result).ToList(); @@ -306,23 +309,23 @@ public async Task Test_Log_Queue() #if ENABLE_LOGGING foreach (var log in list) { - Console.WriteLine(log.GetTimingsReport(true)); + Log(log.GetTimingsReport(true)); } list.Clear(); #endif - Console.WriteLine("------------------------------------------------"); + Log("------------------------------------------------"); await RunMultiClientTest(logged, location, true, "high contention queue", 4, NUM, this.Cancellation); #if ENABLE_LOGGING foreach (var log in list) { - Console.WriteLine(log.GetTimingsReport(true)); + Log(log.GetTimingsReport(true)); } list.Clear(); #endif - Console.WriteLine("------------------------------------------------"); + Log("------------------------------------------------"); } diff --git a/FoundationDB.Tests/Layers/RankedSetFacts.cs b/FoundationDB.Tests/Layers/RankedSetFacts.cs index c3980cf25..aad3c0da7 100644 --- a/FoundationDB.Tests/Layers/RankedSetFacts.cs +++ b/FoundationDB.Tests/Layers/RankedSetFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,17 +28,14 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Collections.Tests { - using FoundationDB.Client; - using FoundationDB.Client.Tests; - using FoundationDB.Layers.Tuples; - using NUnit.Framework; using System; - using System.Collections.Generic; using System.Diagnostics; - using System.Linq; using System.Text; - using System.Threading; using System.Threading.Tasks; + using Doxense.Collections.Tuples; + using FoundationDB.Client; + using FoundationDB.Client.Tests; + using NUnit.Framework; [TestFixture] [Obsolete] @@ -59,16 +56,16 @@ await db.ReadWriteAsync(async (tr) => await PrintRankedSet(vector, tr); }, this.Cancellation); - Console.WriteLine(); + Log(); var rnd = new Random(); var sw = Stopwatch.StartNew(); for (int i = 0; i < 100; i++) { Console.Write("\rInserting " + i); - await db.ReadWriteAsync((tr) => vector.InsertAsync(tr, FdbTuple.EncodeKey(rnd.Next())), this.Cancellation); + await db.ReadWriteAsync((tr) => vector.InsertAsync(tr, TuPack.EncodeKey(rnd.Next())), this.Cancellation); } sw.Stop(); - Console.WriteLine("\rDone in {0:N3} sec", sw.Elapsed.TotalSeconds); + Log("\rDone in {0:N3} sec", sw.Elapsed.TotalSeconds); await db.ReadAsync((tr) => PrintRankedSet(vector, tr), this.Cancellation); } @@ -85,7 +82,7 @@ await tr.GetRange(rs.Subspace.Partition.ByKey(l).Keys.ToRange()).ForEachAsync((k sb.AppendFormat("\t{0} = {1}\r\n", rs.Subspace.Keys.Unpack(kvp.Key), kvp.Value.ToInt64()); }); } - Console.WriteLine(sb.ToString()); + Log(sb.ToString()); } } diff --git a/FoundationDB.Tests/Layers/StringInternFacts.cs b/FoundationDB.Tests/Layers/StringInternFacts.cs index 4cd5b2460..f11442ca7 100644 --- a/FoundationDB.Tests/Layers/StringInternFacts.cs +++ b/FoundationDB.Tests/Layers/StringInternFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,11 +28,11 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Interning.Tests { + using System; + using System.Threading.Tasks; using FoundationDB.Client; using FoundationDB.Client.Tests; using NUnit.Framework; - using System; - using System.Threading.Tasks; [TestFixture] public class StringInternFacts : FdbTest diff --git a/FoundationDB.Tests/Layers/TupleFacts.cs b/FoundationDB.Tests/Layers/TupleFacts.cs deleted file mode 100644 index 3a189739a..000000000 --- a/FoundationDB.Tests/Layers/TupleFacts.cs +++ /dev/null @@ -1,3041 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013, Doxense SARL -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Layers.Tuples.Tests -{ - using FoundationDB.Client; - using FoundationDB.Client.Converters; - using FoundationDB.Client.Tests; - using FoundationDB.Client.Utils; - using NUnit.Framework; - using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Linq; - using System.Net; - using System.Text; - - [TestFixture] - public class TupleFacts : FdbTest - { - - #region General Use... - - [Test] - public void Test_FdbTuple_Create() - { - var t1 = FdbTuple.Create("hello world"); - Assert.That(t1.Count, Is.EqualTo(1)); - Assert.That(t1.Item1, Is.EqualTo("hello world")); - Assert.That(t1.Get(0), Is.EqualTo("hello world")); - Assert.That(t1[0], Is.EqualTo("hello world")); - - var t2 = FdbTuple.Create("hello world", 123); - Assert.That(t2.Count, Is.EqualTo(2)); - Assert.That(t2.Item1, Is.EqualTo("hello world")); - Assert.That(t2.Item2, Is.EqualTo(123)); - Assert.That(t2.Get(0), Is.EqualTo("hello world")); - Assert.That(t2.Get(1), Is.EqualTo(123)); - Assert.That(t2[0], Is.EqualTo("hello world")); - Assert.That(t2[1], Is.EqualTo(123)); - - var t3 = FdbTuple.Create("hello world", 123, false); - Assert.That(t3.Count, Is.EqualTo(3)); - Assert.That(t3.Item1, Is.EqualTo("hello world")); - Assert.That(t3.Item2, Is.EqualTo(123)); - Assert.That(t3.Item3, Is.EqualTo(false)); - Assert.That(t3.Get(0), Is.EqualTo("hello world")); - Assert.That(t3.Get(1), Is.EqualTo(123)); - Assert.That(t3.Get(2), Is.EqualTo(false)); - Assert.That(t3[0], Is.EqualTo("hello world")); - Assert.That(t3[1], Is.EqualTo(123)); - Assert.That(t3[2], Is.EqualTo(false)); - - var t4 = FdbTuple.Create("hello world", 123, false, 1234L); - Assert.That(t4.Count, Is.EqualTo(4)); - Assert.That(t4.Item1, Is.EqualTo("hello world")); - Assert.That(t4.Item2, Is.EqualTo(123)); - Assert.That(t4.Item3, Is.EqualTo(false)); - Assert.That(t4.Item4, Is.EqualTo(1234L)); - Assert.That(t4.Get(0), Is.EqualTo("hello world")); - Assert.That(t4.Get(1), Is.EqualTo(123)); - Assert.That(t4.Get(2), Is.EqualTo(false)); - Assert.That(t4.Get(3), Is.EqualTo(1234L)); - Assert.That(t4[0], Is.EqualTo("hello world")); - Assert.That(t4[1], Is.EqualTo(123)); - Assert.That(t4[2], Is.EqualTo(false)); - Assert.That(t4[3], Is.EqualTo(1234L)); - - var t5 = FdbTuple.Create("hello world", 123, false, 1234L, -1234); - Assert.That(t5.Count, Is.EqualTo(5)); - Assert.That(t5.Item1, Is.EqualTo("hello world")); - Assert.That(t5.Item2, Is.EqualTo(123)); - Assert.That(t5.Item3, Is.EqualTo(false)); - Assert.That(t5.Item4, Is.EqualTo(1234L)); - Assert.That(t5.Item5, Is.EqualTo(-1234)); - Assert.That(t5.Get(0), Is.EqualTo("hello world")); - Assert.That(t5.Get(1), Is.EqualTo(123)); - Assert.That(t5.Get(2), Is.EqualTo(false)); - Assert.That(t5.Get(3), Is.EqualTo(1234L)); - Assert.That(t5.Get(4), Is.EqualTo(-1234)); - Assert.That(t5[0], Is.EqualTo("hello world")); - Assert.That(t5[1], Is.EqualTo(123)); - Assert.That(t5[2], Is.EqualTo(false)); - Assert.That(t5[3], Is.EqualTo(1234L)); - Assert.That(t5[4], Is.EqualTo(-1234)); - - var tn = FdbTuple.Create(new object[] { "hello world", 123, false, 1234L, -1234, "six" }); - Assert.That(tn.Count, Is.EqualTo(6)); - Assert.That(tn.Get(0), Is.EqualTo("hello world")); - Assert.That(tn.Get(1), Is.EqualTo(123)); - Assert.That(tn.Get(2), Is.EqualTo(false)); - Assert.That(tn.Get(3), Is.EqualTo(1234)); - Assert.That(tn.Get(4), Is.EqualTo(-1234)); - Assert.That(tn.Get(5), Is.EqualTo("six")); - } - - [Test] - public void Test_FdbTuple_Wrap() - { - // FdbTuple.Wrap(...) does not copy the items of the array - - var arr = new object[] { "Hello", 123, false, TimeSpan.FromSeconds(5) }; - - var t = FdbTuple.Wrap(arr); - Assert.That(t, Is.Not.Null); - Assert.That(t.Count, Is.EqualTo(4)); - Assert.That(t[0], Is.EqualTo("Hello")); - Assert.That(t[1], Is.EqualTo(123)); - Assert.That(t[2], Is.EqualTo(false)); - Assert.That(t[3], Is.EqualTo(TimeSpan.FromSeconds(5))); - - t = FdbTuple.Wrap(arr, 1, 2); - Assert.That(t, Is.Not.Null); - Assert.That(t.Count, Is.EqualTo(2)); - Assert.That(t[0], Is.EqualTo(123)); - Assert.That(t[1], Is.EqualTo(false)); - - // changing the underyling array should change the tuple - // DON'T DO THIS IN ACTUAL CODE!!! - - arr[1] = 456; - arr[2] = true; - Log("t = {0}", t); - - Assert.That(t[0], Is.EqualTo(456)); - Assert.That(t[1], Is.EqualTo(true)); - } - - [Test] - public void Test_FdbTuple_FromObjects() - { - // FdbTuple.FromObjects(...) does a copy of the items of the array - - var arr = new object[] { "Hello", 123, false, TimeSpan.FromSeconds(5) }; - - var t = FdbTuple.FromObjects(arr); - Log("t = {0}", t); - Assert.That(t, Is.Not.Null); - Assert.That(t.Count, Is.EqualTo(4)); - Assert.That(t[0], Is.EqualTo("Hello")); - Assert.That(t[1], Is.EqualTo(123)); - Assert.That(t[2], Is.EqualTo(false)); - Assert.That(t[3], Is.EqualTo(TimeSpan.FromSeconds(5))); - - t = FdbTuple.FromObjects(arr, 1, 2); - Log("t = {0}", t); - Assert.That(t, Is.Not.Null); - Assert.That(t.Count, Is.EqualTo(2)); - Assert.That(t[0], Is.EqualTo(123)); - Assert.That(t[1], Is.EqualTo(false)); - - // changing the underyling array should NOT change the tuple - - arr[1] = 456; - arr[2] = true; - Log("t = {0}", t); - - Assert.That(t[0], Is.EqualTo(123)); - Assert.That(t[1], Is.EqualTo(false)); - } - - [Test] - public void Test_FdbTuple_FromArray() - { - var items = new string[] { "Bonjour", "le", "Monde" }; - - var t = FdbTuple.FromArray(items); - Log("t = {0}", t); - Assert.That(t, Is.Not.Null); - Assert.That(t.Count, Is.EqualTo(3)); - Assert.That(t[0], Is.EqualTo("Bonjour")); - Assert.That(t[1], Is.EqualTo("le")); - Assert.That(t[2], Is.EqualTo("Monde")); - - t = FdbTuple.FromArray(items, 1, 2); - Log("t = {0}", t); - Assert.That(t, Is.Not.Null); - Assert.That(t.Count, Is.EqualTo(2)); - Assert.That(t[0], Is.EqualTo("le")); - Assert.That(t[1], Is.EqualTo("Monde")); - - // changing the underlying array should NOT change the tuple - items[1] = "ze"; - Log("t = {0}", t); - - Assert.That(t[0], Is.EqualTo("le")); - } - - [Test] - public void Test_FdbTuple_Negative_Indexing() - { - var t1 = FdbTuple.Create("hello world"); - Assert.That(t1.Get(-1), Is.EqualTo("hello world")); - Assert.That(t1[-1], Is.EqualTo("hello world")); - - var t2 = FdbTuple.Create("hello world", 123); - Assert.That(t2.Get(-1), Is.EqualTo(123)); - Assert.That(t2.Get(-2), Is.EqualTo("hello world")); - Assert.That(t2[-1], Is.EqualTo(123)); - Assert.That(t2[-2], Is.EqualTo("hello world")); - - var t3 = FdbTuple.Create("hello world", 123, false); - Assert.That(t3.Get(-1), Is.EqualTo(false)); - Assert.That(t3.Get(-2), Is.EqualTo(123)); - Assert.That(t3.Get(-3), Is.EqualTo("hello world")); - Assert.That(t3[-1], Is.EqualTo(false)); - Assert.That(t3[-2], Is.EqualTo(123)); - Assert.That(t3[-3], Is.EqualTo("hello world")); - - var t4 = FdbTuple.Create("hello world", 123, false, 1234L); - Assert.That(t4.Get(-1), Is.EqualTo(1234L)); - Assert.That(t4.Get(-2), Is.EqualTo(false)); - Assert.That(t4.Get(-3), Is.EqualTo(123)); - Assert.That(t4.Get(-4), Is.EqualTo("hello world")); - Assert.That(t4[-1], Is.EqualTo(1234L)); - Assert.That(t4[-2], Is.EqualTo(false)); - Assert.That(t4[-3], Is.EqualTo(123)); - Assert.That(t4[-4], Is.EqualTo("hello world")); - - var t5 = FdbTuple.Create("hello world", 123, false, 1234L, -1234); - Assert.That(t5.Get(-1), Is.EqualTo(-1234)); - Assert.That(t5.Get(-2), Is.EqualTo(1234L)); - Assert.That(t5.Get(-3), Is.EqualTo(false)); - Assert.That(t5.Get(-4), Is.EqualTo(123)); - Assert.That(t5.Get(-5), Is.EqualTo("hello world")); - Assert.That(t5[-1], Is.EqualTo(-1234)); - Assert.That(t5[-2], Is.EqualTo(1234L)); - Assert.That(t5[-3], Is.EqualTo(false)); - Assert.That(t5[-4], Is.EqualTo(123)); - Assert.That(t5[-5], Is.EqualTo("hello world")); - - var tn = FdbTuple.Create(new object[] { "hello world", 123, false, 1234, -1234, "six" }); - Assert.That(tn.Get(-1), Is.EqualTo("six")); - Assert.That(tn.Get(-2), Is.EqualTo(-1234)); - Assert.That(tn.Get(-3), Is.EqualTo(1234)); - Assert.That(tn.Get(-4), Is.EqualTo(false)); - Assert.That(tn.Get(-5), Is.EqualTo(123)); - Assert.That(tn.Get(-6), Is.EqualTo("hello world")); - Assert.That(tn[-1], Is.EqualTo("six")); - Assert.That(tn[-2], Is.EqualTo(-1234)); - Assert.That(tn[-3], Is.EqualTo(1234)); - Assert.That(tn[-4], Is.EqualTo(false)); - Assert.That(tn[-5], Is.EqualTo(123)); - Assert.That(tn[-6], Is.EqualTo("hello world")); - } - - [Test] - public void Test_FdbTuple_First_And_Last() - { - // tuple.First() should be equivalent to tuple.Get(0) - // tuple.Last() should be equivalent to tuple.Get(-1) - - var t1 = FdbTuple.Create(1); - Assert.That(t1.First(), Is.EqualTo(1)); - Assert.That(t1.First(), Is.EqualTo("1")); - Assert.That(((IFdbTuple)t1).Last(), Is.EqualTo(1)); - Assert.That(((IFdbTuple)t1).Last(), Is.EqualTo("1")); - - var t2 = FdbTuple.Create(1, 2); - Assert.That(t2.First(), Is.EqualTo(1)); - Assert.That(t2.First(), Is.EqualTo("1")); - Assert.That(t2.Last, Is.EqualTo(2)); - Assert.That(((IFdbTuple)t2).Last(), Is.EqualTo(2)); - Assert.That(((IFdbTuple)t2).Last(), Is.EqualTo("2")); - - var t3 = FdbTuple.Create(1, 2, 3); - Assert.That(t3.First(), Is.EqualTo(1)); - Assert.That(t3.First(), Is.EqualTo("1")); - Assert.That(t3.Last, Is.EqualTo(3)); - Assert.That(((IFdbTuple)t3).Last(), Is.EqualTo(3)); - Assert.That(((IFdbTuple)t3).Last(), Is.EqualTo("3")); - - var t4 = FdbTuple.Create(1, 2, 3, 4); - Assert.That(t4.First(), Is.EqualTo(1)); - Assert.That(t4.First(), Is.EqualTo("1")); - Assert.That(t4.Last, Is.EqualTo(4)); - Assert.That(((IFdbTuple)t4).Last(), Is.EqualTo(4)); - Assert.That(((IFdbTuple)t4).Last(), Is.EqualTo("4")); - - var t5 = FdbTuple.Create(1, 2, 3, 4, 5); - Assert.That(t5.First(), Is.EqualTo(1)); - Assert.That(t5.First(), Is.EqualTo("1")); - Assert.That(t5.Last, Is.EqualTo(5)); - Assert.That(((IFdbTuple)t5).Last(), Is.EqualTo(5)); - Assert.That(((IFdbTuple)t5).Last(), Is.EqualTo("5")); - - var tn = FdbTuple.Create(1, 2, 3, 4, 5, 6); - Assert.That(tn.First(), Is.EqualTo(1)); - Assert.That(tn.First(), Is.EqualTo("1")); - Assert.That(tn.Last(), Is.EqualTo(6)); - Assert.That(tn.Last(), Is.EqualTo("6")); - - Assert.That(() => FdbTuple.Empty.First(), Throws.InstanceOf()); - Assert.That(() => FdbTuple.Empty.Last(), Throws.InstanceOf()); - } - - [Test] - public void Test_FdbTuple_Unpack_First_And_Last() - { - // should only work with tuples having at least one element - - Slice packed; - - packed = FdbTuple.EncodeKey(1); - Assert.That(FdbTuple.DecodeFirst(packed), Is.EqualTo(1)); - Assert.That(FdbTuple.DecodeFirst(packed), Is.EqualTo("1")); - Assert.That(FdbTuple.DecodeLast(packed), Is.EqualTo(1)); - Assert.That(FdbTuple.DecodeLast(packed), Is.EqualTo("1")); - - packed = FdbTuple.EncodeKey(1, 2); - Assert.That(FdbTuple.DecodeFirst(packed), Is.EqualTo(1)); - Assert.That(FdbTuple.DecodeFirst(packed), Is.EqualTo("1")); - Assert.That(FdbTuple.DecodeLast(packed), Is.EqualTo(2)); - Assert.That(FdbTuple.DecodeLast(packed), Is.EqualTo("2")); - - packed = FdbTuple.EncodeKey(1, 2, 3); - Assert.That(FdbTuple.DecodeFirst(packed), Is.EqualTo(1)); - Assert.That(FdbTuple.DecodeFirst(packed), Is.EqualTo("1")); - Assert.That(FdbTuple.DecodeLast(packed), Is.EqualTo(3)); - Assert.That(FdbTuple.DecodeLast(packed), Is.EqualTo("3")); - - packed = FdbTuple.EncodeKey(1, 2, 3, 4); - Assert.That(FdbTuple.DecodeFirst(packed), Is.EqualTo(1)); - Assert.That(FdbTuple.DecodeFirst(packed), Is.EqualTo("1")); - Assert.That(FdbTuple.DecodeLast(packed), Is.EqualTo(4)); - Assert.That(FdbTuple.DecodeLast(packed), Is.EqualTo("4")); - - packed = FdbTuple.EncodeKey(1, 2, 3, 4, 5); - Assert.That(FdbTuple.DecodeFirst(packed), Is.EqualTo(1)); - Assert.That(FdbTuple.DecodeFirst(packed), Is.EqualTo("1")); - Assert.That(FdbTuple.DecodeLast(packed), Is.EqualTo(5)); - Assert.That(FdbTuple.DecodeLast(packed), Is.EqualTo("5")); - - packed = FdbTuple.EncodeKey(1, 2, 3, 4, 5, 6); - Assert.That(FdbTuple.DecodeFirst(packed), Is.EqualTo(1)); - Assert.That(FdbTuple.DecodeFirst(packed), Is.EqualTo("1")); - Assert.That(FdbTuple.DecodeLast(packed), Is.EqualTo(6)); - Assert.That(FdbTuple.DecodeLast(packed), Is.EqualTo("6")); - - packed = FdbTuple.EncodeKey(1, 2, 3, 4, 5, 6, 7); - Assert.That(FdbTuple.DecodeFirst(packed), Is.EqualTo(1)); - Assert.That(FdbTuple.DecodeFirst(packed), Is.EqualTo("1")); - Assert.That(FdbTuple.DecodeLast(packed), Is.EqualTo(7)); - Assert.That(FdbTuple.DecodeLast(packed), Is.EqualTo("7")); - - packed = FdbTuple.EncodeKey(1, 2, 3, 4, 5, 6, 7, 8); - Assert.That(FdbTuple.DecodeFirst(packed), Is.EqualTo(1)); - Assert.That(FdbTuple.DecodeFirst(packed), Is.EqualTo("1")); - Assert.That(FdbTuple.DecodeLast(packed), Is.EqualTo(8)); - Assert.That(FdbTuple.DecodeLast(packed), Is.EqualTo("8")); - - Assert.That(() => FdbTuple.DecodeFirst(Slice.Nil), Throws.InstanceOf()); - Assert.That(() => FdbTuple.DecodeFirst(Slice.Empty), Throws.InstanceOf()); - Assert.That(() => FdbTuple.DecodeLast(Slice.Nil), Throws.InstanceOf()); - Assert.That(() => FdbTuple.DecodeLast(Slice.Empty), Throws.InstanceOf()); - - } - - [Test] - public void Test_FdbTuple_UnpackSingle() - { - // should only work with tuples having exactly one element - - Slice packed; - - packed = FdbTuple.EncodeKey(1); - Assert.That(FdbTuple.DecodeKey(packed), Is.EqualTo(1)); - Assert.That(FdbTuple.DecodeKey(packed), Is.EqualTo("1")); - - packed = FdbTuple.EncodeKey("Hello\0World"); - Assert.That(FdbTuple.DecodeKey(packed), Is.EqualTo("Hello\0World")); - - Assert.That(() => FdbTuple.DecodeKey(Slice.Nil), Throws.InstanceOf()); - Assert.That(() => FdbTuple.DecodeKey(Slice.Empty), Throws.InstanceOf()); - Assert.That(() => FdbTuple.DecodeKey(FdbTuple.EncodeKey(1, 2)), Throws.InstanceOf()); - Assert.That(() => FdbTuple.DecodeKey(FdbTuple.EncodeKey(1, 2, 3)), Throws.InstanceOf()); - Assert.That(() => FdbTuple.DecodeKey(FdbTuple.EncodeKey(1, 2, 3, 4)), Throws.InstanceOf()); - Assert.That(() => FdbTuple.DecodeKey(FdbTuple.EncodeKey(1, 2, 3, 4, 5)), Throws.InstanceOf()); - Assert.That(() => FdbTuple.DecodeKey(FdbTuple.EncodeKey(1, 2, 3, 4, 5, 6)), Throws.InstanceOf()); - Assert.That(() => FdbTuple.DecodeKey(FdbTuple.EncodeKey(1, 2, 3, 4, 5, 6, 7)), Throws.InstanceOf()); - Assert.That(() => FdbTuple.DecodeKey(FdbTuple.EncodeKey(1, 2, 3, 4, 5, 6, 7, 8)), Throws.InstanceOf()); - - } - - [Test] - public void Test_FdbTuple_Embedded_Tuples() - { - // (A,B).Append((C,D)) should return (A,B,(C,D)) (length 3) and not (A,B,C,D) (length 4) - - FdbTuple x = FdbTuple.Create("A", "B"); - FdbTuple y = FdbTuple.Create("C", "D"); - - // using the instance method that returns a FdbTuple - IFdbTuple z = x.Append(y); - Log(z); - Assert.That(z, Is.Not.Null); - Assert.That(z.Count, Is.EqualTo(3)); - Assert.That(z[0], Is.EqualTo("A")); - Assert.That(z[1], Is.EqualTo("B")); - Assert.That(z[2], Is.EqualTo(y)); - var t = z.Get(2); - Assert.That(t, Is.Not.Null); - Assert.That(t.Count, Is.EqualTo(2)); - Assert.That(t[0], Is.EqualTo("C")); - Assert.That(t[1], Is.EqualTo("D")); - - // casted down to the interface IFdbTuple - z = ((IFdbTuple)x).Append((IFdbTuple)y); - Log(z); - Assert.That(z, Is.Not.Null); - Assert.That(z.Count, Is.EqualTo(3)); - Assert.That(z[0], Is.EqualTo("A")); - Assert.That(z[1], Is.EqualTo("B")); - Assert.That(z[2], Is.EqualTo(y)); - t = z.Get(2); - Assert.That(t, Is.Not.Null); - Assert.That(t.Count, Is.EqualTo(2)); - Assert.That(t[0], Is.EqualTo("C")); - Assert.That(t[1], Is.EqualTo("D")); - - // composite index key "(prefix, value, id)" - IFdbTuple subspace = FdbTuple.Create(123, 42); - IFdbTuple value = FdbTuple.Create(2014, 11, 6); // Indexing a date value (Y, M, D) - string id = "Doc123"; - z = subspace.Append(value, id); - Log(z); - Assert.That(z.Count, Is.EqualTo(4)); - } - - [Test] - public void Test_FdbTuple_With() - { - //note: important to always cast to (IFdbTuple) to be sure that we don't call specialized instance methods (tested elsewhere) - IFdbTuple t; - - // Size 1 - - t = FdbTuple.Create(123); - t.With((int a) => - { - Assert.That(a, Is.EqualTo(123)); - }); - Assert.That(t.With((int a) => - { - Assert.That(a, Is.EqualTo(123)); - return 42; - }), Is.EqualTo(42)); - - // Size 2 - - t = t.Append("abc"); - t.With((int a, string b) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - }); - Assert.That(t.With((int a, string b) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - return 42; - }), Is.EqualTo(42)); - - // Size 3 - - t = t.Append(3.14f); - t.With((int a, string b, float c) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - Assert.That(c, Is.EqualTo(3.14f)); - }); - Assert.That(t.With((int a, string b, float c) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - Assert.That(c, Is.EqualTo(3.14f)); - return 42; - }), Is.EqualTo(42)); - - // Size 4 - - t = t.Append(true); - t.With((int a, string b, float c, bool d) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - Assert.That(c, Is.EqualTo(3.14f)); - Assert.That(d, Is.True); - }); - Assert.That(t.With((int a, string b, float c, bool d) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - Assert.That(c, Is.EqualTo(3.14f)); - Assert.That(d, Is.True); - return 42; - }), Is.EqualTo(42)); - - // Size 5 - - t = t.Append('z'); - t.With((int a, string b, float c, bool d, char e) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - Assert.That(c, Is.EqualTo(3.14f)); - Assert.That(d, Is.True); - Assert.That(e, Is.EqualTo('z')); - }); - Assert.That(t.With((int a, string b, float c, bool d, char e) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - Assert.That(c, Is.EqualTo(3.14f)); - Assert.That(d, Is.True); - Assert.That(e, Is.EqualTo('z')); - return 42; - }), Is.EqualTo(42)); - - // Size 6 - - t = t.Append(Math.PI); - t.With((int a, string b, float c, bool d, char e, double f) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - Assert.That(c, Is.EqualTo(3.14f)); - Assert.That(d, Is.True); - Assert.That(e, Is.EqualTo('z')); - Assert.That(f, Is.EqualTo(Math.PI)); - }); - Assert.That(t.With((int a, string b, float c, bool d, char e, double f) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - Assert.That(c, Is.EqualTo(3.14f)); - Assert.That(d, Is.True); - Assert.That(e, Is.EqualTo('z')); - Assert.That(f, Is.EqualTo(Math.PI)); - return 42; - }), Is.EqualTo(42)); - - // Size 7 - - t = t.Append(IPAddress.Loopback); - t.With((int a, string b, float c, bool d, char e, double f, IPAddress g) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - Assert.That(c, Is.EqualTo(3.14f)); - Assert.That(d, Is.True); - Assert.That(e, Is.EqualTo('z')); - Assert.That(f, Is.EqualTo(Math.PI)); - Assert.That(g, Is.EqualTo(IPAddress.Loopback)); - }); - Assert.That(t.With((int a, string b, float c, bool d, char e, double f, IPAddress g) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - Assert.That(c, Is.EqualTo(3.14f)); - Assert.That(d, Is.True); - Assert.That(e, Is.EqualTo('z')); - Assert.That(f, Is.EqualTo(Math.PI)); - Assert.That(g, Is.EqualTo(IPAddress.Loopback)); - return 42; - }), Is.EqualTo(42)); - - // Size 8 - - t = t.Append(DateTime.MaxValue); - t.With((int a, string b, float c, bool d, char e, double f, IPAddress g, DateTime h) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - Assert.That(c, Is.EqualTo(3.14f)); - Assert.That(d, Is.True); - Assert.That(e, Is.EqualTo('z')); - Assert.That(f, Is.EqualTo(Math.PI)); - Assert.That(g, Is.EqualTo(IPAddress.Loopback)); - Assert.That(h, Is.EqualTo(DateTime.MaxValue)); - }); - Assert.That(t.With((int a, string b, float c, bool d, char e, double f, IPAddress g, DateTime h) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - Assert.That(c, Is.EqualTo(3.14f)); - Assert.That(d, Is.True); - Assert.That(e, Is.EqualTo('z')); - Assert.That(f, Is.EqualTo(Math.PI)); - Assert.That(g, Is.EqualTo(IPAddress.Loopback)); - Assert.That(h, Is.EqualTo(DateTime.MaxValue)); - return 42; - }), Is.EqualTo(42)); - - } - - [Test] - public void Test_FdbTuple_With_Struct() - { - // calling With() on the structs is faster - - FdbTuple t1 = FdbTuple.Create(123); - t1.With((a) => - { - Assert.That(a, Is.EqualTo(123)); - }); - Assert.That(t1.With((a) => - { - Assert.That(a, Is.EqualTo(123)); - return 42; - }), Is.EqualTo(42)); - - FdbTuple t2 = FdbTuple.Create(123, "abc"); - t2.With((a, b) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - }); - Assert.That(t2.With((a, b) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - return 42; - }), Is.EqualTo(42)); - - FdbTuple t3 = FdbTuple.Create(123, "abc", 3.14f); - t3.With((a, b, c) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - Assert.That(c, Is.EqualTo(3.14f)); - }); - Assert.That(t3.With((a, b, c) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - Assert.That(c, Is.EqualTo(3.14f)); - return 42; - }), Is.EqualTo(42)); - - FdbTuple t4 = FdbTuple.Create(123, "abc", 3.14f, true); - t4.With((a, b, c, d) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - Assert.That(c, Is.EqualTo(3.14f)); - Assert.That(d, Is.True); - }); - Assert.That(t4.With((a, b, c, d) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - Assert.That(c, Is.EqualTo(3.14f)); - Assert.That(d, Is.True); - return 42; - }), Is.EqualTo(42)); - - FdbTuple t5 = FdbTuple.Create(123, "abc", 3.14f, true, 'z'); - t5.With((a, b, c, d, e) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - Assert.That(c, Is.EqualTo(3.14f)); - Assert.That(d, Is.True); - Assert.That(e, Is.EqualTo('z')); - }); - Assert.That(t5.With((a, b, c, d, e) => - { - Assert.That(a, Is.EqualTo(123)); - Assert.That(b, Is.EqualTo("abc")); - Assert.That(c, Is.EqualTo(3.14f)); - Assert.That(d, Is.True); - Assert.That(e, Is.EqualTo('z')); - return 42; - }), Is.EqualTo(42)); - - //TODO: add more if we ever add struct tuples with 6 or more items - } - - [Test] - public void Test_FdbTuple_Of_Size() - { - // OfSize(n) check the size and return the tuple if it passed - // VerifySize(n) only check the size - // Both should throw if tuple is null, or not the expected size - - Action verify = (t) => - { - for (int i = 0; i <= 10; i++) - { - if (t.Count > i) - { - Assert.That(() => t.OfSize(i), Throws.InstanceOf()); - Assert.That(t.OfSizeAtLeast(i), Is.SameAs(t)); - Assert.That(() => t.OfSizeAtMost(i), Throws.InstanceOf()); - } - else if (t.Count < i) - { - Assert.That(() => t.OfSize(i), Throws.InstanceOf()); - Assert.That(() => t.OfSizeAtLeast(i), Throws.InstanceOf()); - Assert.That(t.OfSizeAtMost(i), Is.SameAs(t)); - } - else - { - Assert.That(t.OfSize(i), Is.SameAs(t)); - Assert.That(t.OfSizeAtLeast(i), Is.SameAs(t)); - Assert.That(t.OfSizeAtMost(i), Is.SameAs(t)); - } - } - }; - - verify(FdbTuple.Empty); - verify(FdbTuple.Create(123)); - verify(FdbTuple.Create(123, "abc")); - verify(FdbTuple.Create(123, "abc", 3.14f)); - verify(FdbTuple.Create(123, "abc", 3.14f, true)); - verify(FdbTuple.Create(123, "abc", 3.14f, true, 'z')); - verify(FdbTuple.FromArray(new[] { "hello", "world", "!" })); - verify(FdbTuple.FromEnumerable(Enumerable.Range(0, 10))); - - verify(FdbTuple.Create(123, "abc", 3.14f, true, 'z')[0, 2]); - verify(FdbTuple.Create(123, "abc", 3.14f, true, 'z')[1, 4]); - verify(FdbTuple.FromEnumerable(Enumerable.Range(0, 50)).Substring(15, 6)); - - IFdbTuple none = null; - Assert.That(() => none.OfSize(0), Throws.InstanceOf()); - Assert.That(() => none.OfSizeAtLeast(0), Throws.InstanceOf()); - Assert.That(() => none.OfSizeAtMost(0), Throws.InstanceOf()); - } - - [Test] - public void Test_FdbTuple_Truncate() - { - IFdbTuple t = FdbTuple.Create("Hello", 123, false, TimeSpan.FromSeconds(5), "World"); - - var head = t.Truncate(1); - Assert.That(head, Is.Not.Null); - Assert.That(head.Count, Is.EqualTo(1)); - Assert.That(head[0], Is.EqualTo("Hello")); - - head = t.Truncate(2); - Assert.That(head, Is.Not.Null); - Assert.That(head.Count, Is.EqualTo(2)); - Assert.That(head[0], Is.EqualTo("Hello")); - Assert.That(head[1], Is.EqualTo(123)); - - head = t.Truncate(5); - Assert.That(head, Is.EqualTo(t)); - - var tail = t.Truncate(-1); - Assert.That(tail, Is.Not.Null); - Assert.That(tail.Count, Is.EqualTo(1)); - Assert.That(tail[0], Is.EqualTo("World")); - - tail = t.Truncate(-2); - Assert.That(tail, Is.Not.Null); - Assert.That(tail.Count, Is.EqualTo(2)); - Assert.That(tail[0], Is.EqualTo(TimeSpan.FromSeconds(5))); - Assert.That(tail[1], Is.EqualTo("World")); - - tail = t.Truncate(-5); - Assert.That(tail, Is.EqualTo(t)); - - Assert.That(t.Truncate(0), Is.EqualTo(FdbTuple.Empty)); - Assert.That(() => t.Truncate(6), Throws.InstanceOf()); - Assert.That(() => t.Truncate(-6), Throws.InstanceOf()); - - Assert.That(() => FdbTuple.Empty.Truncate(1), Throws.InstanceOf()); - Assert.That(() => FdbTuple.Create("Hello", "World").Truncate(3), Throws.InstanceOf()); - Assert.That(() => FdbTuple.Create("Hello", "World").Truncate(-3), Throws.InstanceOf()); - } - - [Test] - public void Test_FdbTuple_As() - { - // IFdbTuple.As<...>() adds types to an untyped IFdbTuple - IFdbTuple t; - - t = FdbTuple.Create("Hello"); - var t1 = t.As(); - Assert.That(t1.Item1, Is.EqualTo("Hello")); - - t = FdbTuple.Create("Hello", 123); - var t2 = t.As(); - Assert.That(t2.Item1, Is.EqualTo("Hello")); - Assert.That(t2.Item2, Is.EqualTo(123)); - - t = FdbTuple.Create("Hello", 123, false); - var t3 = t.As(); - Assert.That(t3.Item1, Is.EqualTo("Hello")); - Assert.That(t3.Item2, Is.EqualTo(123)); - Assert.That(t3.Item3, Is.EqualTo(false)); - - var t4 = FdbTuple - .Create("Hello", 123, false, TimeSpan.FromSeconds(5)) - .As(); - Assert.That(t4.Item1, Is.EqualTo("Hello")); - Assert.That(t4.Item2, Is.EqualTo(123)); - Assert.That(t4.Item3, Is.EqualTo(false)); - Assert.That(t4.Item4, Is.EqualTo(TimeSpan.FromSeconds(5))); - - t = FdbTuple.Create("Hello", 123, false, TimeSpan.FromSeconds(5), "World"); - var t5 = t.As(); - Assert.That(t5.Item1, Is.EqualTo("Hello")); - Assert.That(t5.Item2, Is.EqualTo(123)); - Assert.That(t5.Item3, Is.EqualTo(false)); - Assert.That(t5.Item4, Is.EqualTo(TimeSpan.FromSeconds(5))); - Assert.That(t5.Item5, Is.EqualTo("World")); - } - - [Test] - public void Test_Cast_To_BCL_Tuples() - { - // implicit: Tuple => FdbTuple - // explicit: FdbTuple => Tuple - - var t1 = FdbTuple.Create("Hello"); - var b1 = (Tuple) t1; // explicit - Assert.That(b1, Is.Not.Null); - Assert.That(b1.Item1, Is.EqualTo("Hello")); - FdbTuple r1 = t1; // implicit - Assert.That(r1.Item1, Is.EqualTo("Hello")); - - var t2 = FdbTuple.Create("Hello", 123); - var b2 = (Tuple)t2; // explicit - Assert.That(b2, Is.Not.Null); - Assert.That(b2.Item1, Is.EqualTo("Hello")); - Assert.That(b2.Item2, Is.EqualTo(123)); - FdbTuple r2 = t2; // implicit - Assert.That(r2.Item1, Is.EqualTo("Hello")); - Assert.That(r2.Item2, Is.EqualTo(123)); - - var t3 = FdbTuple.Create("Hello", 123, false); - var b3 = (Tuple)t3; // explicit - Assert.That(b3, Is.Not.Null); - Assert.That(b3.Item1, Is.EqualTo("Hello")); - Assert.That(b3.Item2, Is.EqualTo(123)); - Assert.That(b3.Item3, Is.EqualTo(false)); - FdbTuple r3 = t3; // implicit - Assert.That(r3.Item1, Is.EqualTo("Hello")); - Assert.That(r3.Item2, Is.EqualTo(123)); - Assert.That(r3.Item3, Is.EqualTo(false)); - - var t4 = FdbTuple.Create("Hello", 123, false, TimeSpan.FromSeconds(5)); - var b4 = (Tuple)t4; // explicit - Assert.That(b4, Is.Not.Null); - Assert.That(b4.Item1, Is.EqualTo("Hello")); - Assert.That(b4.Item2, Is.EqualTo(123)); - Assert.That(b4.Item3, Is.EqualTo(false)); - Assert.That(b4.Item4, Is.EqualTo(TimeSpan.FromSeconds(5))); - FdbTuple r4 = t4; // implicit - Assert.That(r4.Item1, Is.EqualTo("Hello")); - Assert.That(r4.Item2, Is.EqualTo(123)); - Assert.That(r4.Item3, Is.EqualTo(false)); - Assert.That(r4.Item4, Is.EqualTo(TimeSpan.FromSeconds(5))); - - var t5 = FdbTuple.Create("Hello", 123, false, TimeSpan.FromSeconds(5), "World"); - var b5 = (Tuple)t5; // explicit - Assert.That(b5, Is.Not.Null); - Assert.That(b5.Item1, Is.EqualTo("Hello")); - Assert.That(b5.Item2, Is.EqualTo(123)); - Assert.That(b5.Item3, Is.EqualTo(false)); - Assert.That(b5.Item4, Is.EqualTo(TimeSpan.FromSeconds(5))); - Assert.That(b5.Item5, Is.EqualTo("World")); - FdbTuple r5 = t5; // implicit - Assert.That(r5.Item1, Is.EqualTo("Hello")); - Assert.That(r5.Item2, Is.EqualTo(123)); - Assert.That(r5.Item3, Is.EqualTo(false)); - Assert.That(r5.Item4, Is.EqualTo(TimeSpan.FromSeconds(5))); - Assert.That(r5.Item5, Is.EqualTo("World")); - - } - - #endregion - - #region Splicing... - - private static void VerifyTuple(string message, IFdbTuple t, object[] expected) - { - // count - if (t.Count != expected.Length) - { -#if DEBUG - if (Debugger.IsAttached) Debugger.Break(); -#endif - Assert.Fail("{0}: Count mismatch between observed {1} and expected {2} for tuple of type {3}", message, t, FdbTuple.ToString(expected), t.GetType().Name); - } - - // direct access - for (int i = 0; i < expected.Length; i++) - { - Assert.That(ComparisonHelper.AreSimilar(t[i], expected[i]), Is.True, "{0}: t[{1}] != expected[{1}]", message, i); - } - - // iterator - int p = 0; - foreach (var obj in t) - { - if (p >= expected.Length) Assert.Fail("Spliced iterator overshoot at t[{0}] = {1}", p, obj); - Assert.That(ComparisonHelper.AreSimilar(obj, expected[p]), Is.True, "{0}: Iterator[{1}], {2} ~= {3}", message, p, obj, expected[p]); - ++p; - } - Assert.That(p, Is.EqualTo(expected.Length), "{0}: t.GetEnumerator() returned only {1} elements out of {2} exected", message, p, expected.Length); - - // CopyTo - var tmp = new object[expected.Length]; - t.CopyTo(tmp, 0); - for (int i = 0; i < tmp.Length; i++) - { - Assert.That(ComparisonHelper.AreSimilar(tmp[i], expected[i]), Is.True, "{0}: CopyTo[{1}], {2} ~= {3}", message, i, tmp[i], expected[i]); - } - - // Memoize - tmp = t.Memoize().ToArray(); - for (int i = 0; i < tmp.Length; i++) - { - Assert.That(ComparisonHelper.AreSimilar(tmp[i], expected[i]), Is.True, "{0}: Memoize.Items[{1}], {2} ~= {3}", message, i, tmp[i], expected[i]); - } - - // Append - if (!(t is FdbSlicedTuple)) - { - var u = t.Append("last"); - Assert.That(u.Get(-1), Is.EqualTo("last")); - tmp = u.ToArray(); - for (int i = 0; i < tmp.Length - 1; i++) - { - Assert.That(ComparisonHelper.AreSimilar(tmp[i], expected[i]), Is.True, "{0}: Appended[{1}], {2} ~= {3}", message, i, tmp[i], expected[i]); - } - } - } - - [Test] - public void Test_Can_Splice_FdbListTuple() - { - var items = new object[] { "hello", "world", 123, "foo", 456, "bar" }; - // 0 1 2 3 4 5 - // -6 -5 -4 -3 -2 -1 - - var tuple = new FdbListTuple(items); - Assert.That(tuple.Count, Is.EqualTo(6)); - - // get all - VerifyTuple("[:]", tuple[null, null], items); - VerifyTuple("[:]", tuple[null, 6], items); - VerifyTuple("[:]", tuple[0, null], items); - VerifyTuple("[:]", tuple[0, 6], items); - VerifyTuple("[:]", tuple[0, null], items); - VerifyTuple("[:]", tuple[-6, null], items); - VerifyTuple("[:]", tuple[-6, 6], items); - - // tail - VerifyTuple("[n:]", tuple[4, null], new object[] { 456, "bar" }); - VerifyTuple("[n:+]", tuple[4, 6], new object[] { 456, "bar" }); - VerifyTuple("[-n:+]", tuple[-2, 6], new object[] { 456, "bar" }); - VerifyTuple("[-n:-]", tuple[-2, null], new object[] { 456, "bar" }); - - // head - VerifyTuple("[:n]", tuple[null, 3], new object[] { "hello", "world", 123 }); - VerifyTuple("[0:n]", tuple[0, 3], new object[] { "hello", "world", 123 }); - VerifyTuple("[0:-n]", tuple[0, -3], new object[] { "hello", "world", 123 }); - VerifyTuple("[-:n]", tuple[-6, 3], new object[] { "hello", "world", 123 }); - VerifyTuple("[-:-n]", tuple[-6, -3], new object[] { "hello", "world", 123 }); - - // single - VerifyTuple("[0:1]", tuple[0, 1], new object[] { "hello" }); - VerifyTuple("[-6:-5]", tuple[-6, -5], new object[] { "hello" }); - VerifyTuple("[1:2]", tuple[1, 2], new object[] { "world" }); - VerifyTuple("[-5:-4]", tuple[-5, -4], new object[] { "world" }); - VerifyTuple("[5:6]", tuple[5, 6], new object[] { "bar" }); - VerifyTuple("[-1:]", tuple[-1, null], new object[] { "bar" }); - - // chunk - VerifyTuple("[2:4]", tuple[2, 4], new object[] { 123, "foo" }); - VerifyTuple("[2:-2]", tuple[2, -2], new object[] { 123, "foo" }); - VerifyTuple("[-4:4]", tuple[-4, 4], new object[] { 123, "foo" }); - VerifyTuple("[-4:-2]", tuple[-4, -2], new object[] { 123, "foo" }); - - // remove first - VerifyTuple("[1:]", tuple[1, null], new object[] { "world", 123, "foo", 456, "bar" }); - VerifyTuple("[1:+]", tuple[1, 6], new object[] { "world", 123, "foo", 456, "bar" }); - VerifyTuple("[-5:]", tuple[-5, null], new object[] { "world", 123, "foo", 456, "bar" }); - VerifyTuple("[-5:+]", tuple[-5, 6], new object[] { "world", 123, "foo", 456, "bar" }); - - // remove last - VerifyTuple("[:5]", tuple[null, 5], new object[] { "hello", "world", 123, "foo", 456 }); - VerifyTuple("[:-1]", tuple[null, -1], new object[] { "hello", "world", 123, "foo", 456 }); - VerifyTuple("[0:5]", tuple[0, 5], new object[] { "hello", "world", 123, "foo", 456 }); - VerifyTuple("[0:-1]", tuple[0, -1], new object[] { "hello", "world", 123, "foo", 456 }); - - // out of range - VerifyTuple("[2:7]", tuple[2, 7], new object[] { 123, "foo", 456, "bar" }); - VerifyTuple("[2:42]", tuple[2, 42], new object[] { 123, "foo", 456, "bar" }); - VerifyTuple("[2:123456]", tuple[2, 123456], new object[] { 123, "foo", 456, "bar" }); - VerifyTuple("[-7:2]", tuple[-7, 2], new object[] { "hello", "world" }); - VerifyTuple("[-42:2]", tuple[-42, 2], new object[] { "hello", "world" }); - } - - private static object[] GetRange(int fromIncluded, int toExcluded, int count) - { - if (count == 0) return new object[0]; - - if (fromIncluded < 0) fromIncluded += count; - if (toExcluded < 0) toExcluded += count; - - if (toExcluded > count) toExcluded = count; - var tmp = new object[toExcluded - fromIncluded]; - for (int i = 0; i < tmp.Length; i++) tmp[i] = new string((char) (65 + fromIncluded + i), 1); - return tmp; - } - - [Test] - public void Test_Randomized_Splices() - { - // Test a random mix of sizes, and indexes... - - const int N = 100 * 1000; - - var tuples = new IFdbTuple[14]; - tuples[0] = FdbTuple.Empty; - tuples[1] = FdbTuple.Create("A"); - tuples[2] = FdbTuple.Create("A", "B"); - tuples[3] = FdbTuple.Create("A", "B", "C"); - tuples[4] = FdbTuple.Create("A", "B", "C", "D"); - tuples[5] = FdbTuple.Create("A", "B", "C", "D", "E"); - tuples[6] = FdbTuple.Create("A", "B", "C", "D", "E", "F"); - tuples[7] = FdbTuple.Create("A", "B", "C", "D", "E", "F", "G"); - tuples[8] = FdbTuple.Create("A", "B", "C", "D", "E", "F", "G", "H"); - tuples[9] = FdbTuple.Create("A", "B", "C", "D", "E", "F", "G", "H", "I"); - tuples[10]= FdbTuple.Create("A", "B", "C", "D", "E", "F", "G", "H", "I", "J"); - tuples[11] = new FdbJoinedTuple(tuples[6], FdbTuple.Create("G", "H", "I", "J", "K")); - tuples[12] = new FdbLinkedTuple(tuples[11], "L"); - tuples[13] = new FdbLinkedTuple(FdbTuple.Create("A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L"), "M"); - -#if false - Console.Write("Checking tuples"); - - foreach (var tuple in tuples) - { - var t = FdbTuple.Unpack(tuple.ToSlice()); - Assert.That(t.Equals(tuple), Is.True, t.ToString() + " != unpack(" + tuple.ToString() + ")"); - } -#endif - - var rnd = new Random(123456); - - for (int i = 0; i < N; i++) - { - if (i % 500 == 0) Console.Write("."); - var len = rnd.Next(tuples.Length); - var tuple = tuples[len]; - Assert.That(tuple.Count, Is.EqualTo(len)); - - string prefix = tuple.ToString(); - - if (rnd.Next(5) == 0) - { // randomly pack/unpack - tuple = FdbTuple.Unpack(tuple.ToSlice()); - prefix = "unpacked:" + prefix; - } - else if (rnd.Next(5) == 0) - { // randomly memoize - tuple = tuple.Memoize(); - prefix = "memoized:" + prefix; - } - - switch (rnd.Next(6)) - { - case 0: - { // [:+rnd] - int x = rnd.Next(len); - VerifyTuple(prefix + "[:" + x.ToString() + "]", tuple[null, x], GetRange(0, x, len)); - break; - } - case 1: - { // [+rnd:] - int x = rnd.Next(len); - VerifyTuple(prefix + "[" + x.ToString() + ":]", tuple[x, null], GetRange(x, int.MaxValue, len)); - break; - } - case 2: - { // [:-rnd] - int x = -1 - rnd.Next(len); - VerifyTuple(prefix + "[:" + x.ToString() + "]", tuple[null, x], GetRange(0, len + x, len)); - break; - } - case 3: - { // [-rnd:] - int x = -1 - rnd.Next(len); - VerifyTuple(prefix + "[" + x.ToString() + ":]", tuple[x, null], GetRange(len + x, int.MaxValue, len)); - break; - } - case 4: - { // [rnd:rnd] - int x = rnd.Next(len); - int y; - do { y = rnd.Next(len); } while (y < x); - VerifyTuple(prefix + " [" + x.ToString() + ":" + y.ToString() + "]", tuple[x, y], GetRange(x, y, len)); - break; - } - case 5: - { // [-rnd:-rnd] - int x = -1 - rnd.Next(len); - int y; - do { y = -1 - rnd.Next(len); } while (y < x); - VerifyTuple(prefix + " [" + x.ToString() + ":" + y.ToString() + "]", tuple[x, y], GetRange(len + x, len + y, len)); - break; - } - } - - } - Console.WriteLine(" done"); - - } - - #endregion - - #region Serialization... - - [Test] - public void Test_FdbTuple_Serialize_Bytes() - { - // Byte arrays are stored with prefix '01' followed by the bytes, and terminated by '00'. All occurences of '00' in the byte array are escaped with '00 FF' - // - Best case: packed_size = 2 + array_len - // - Worst case: packed_size = 2 + array_len * 2 - - Slice packed; - - packed = FdbTuple.EncodeKey(new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0 }); - Assert.That(packed.ToString(), Is.EqualTo("<01><12>4Vx<9A><00>")); - packed = FdbTuple.EncodeKey(new byte[] { 0x00, 0x42 }); - Assert.That(packed.ToString(), Is.EqualTo("<01><00>B<00>")); - packed = FdbTuple.EncodeKey(new byte[] { 0x42, 0x00 }); - Assert.That(packed.ToString(), Is.EqualTo("<01>B<00><00>")); - packed = FdbTuple.EncodeKey(new byte[] { 0x42, 0x00, 0x42 }); - Assert.That(packed.ToString(), Is.EqualTo("<01>B<00>B<00>")); - packed = FdbTuple.EncodeKey(new byte[] { 0x42, 0x00, 0x00, 0x42 }); - Assert.That(packed.ToString(), Is.EqualTo("<01>B<00><00>B<00>")); - } - - [Test] - public void Test_FdbTuple_Deserialize_Bytes() - { - IFdbTuple t; - - t = FdbTuple.Unpack(Slice.Unescape("<01><01><23><45><67><89><00>")); - Assert.That(t.Get(0), Is.EqualTo(new byte[] { 0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF })); - Assert.That(t.Get(0).ToHexaString(' '), Is.EqualTo("01 23 45 67 89 AB CD EF")); - - t = FdbTuple.Unpack(Slice.Unescape("<01><42><00><00>")); - Assert.That(t.Get(0), Is.EqualTo(new byte[] { 0x42, 0x00 })); - Assert.That(t.Get(0).ToHexaString(' '), Is.EqualTo("42 00")); - - t = FdbTuple.Unpack(Slice.Unescape("<01><00><42><00>")); - Assert.That(t.Get(0), Is.EqualTo(new byte[] { 0x00, 0x42 })); - Assert.That(t.Get(0).ToHexaString(' '), Is.EqualTo("00 42")); - - t = FdbTuple.Unpack(Slice.Unescape("<01><42><00><42><00>")); - Assert.That(t.Get(0), Is.EqualTo(new byte[] { 0x42, 0x00, 0x42 })); - Assert.That(t.Get(0).ToHexaString(' '), Is.EqualTo("42 00 42")); - - t = FdbTuple.Unpack(Slice.Unescape("<01><42><00><00><42><00>")); - Assert.That(t.Get(0), Is.EqualTo(new byte[] { 0x42, 0x00, 0x00, 0x42 })); - Assert.That(t.Get(0).ToHexaString(' '), Is.EqualTo("42 00 00 42")); - } - - [Test] - public void Test_FdbTuple_Serialize_Unicode_Strings() - { - // Unicode strings are stored with prefix '02' followed by the utf8 bytes, and terminated by '00'. All occurences of '00' in the UTF8 bytes are escaped with '00 FF' - - Slice packed; - - // simple string - packed = FdbTuple.Create("hello world").ToSlice(); - Assert.That(packed.ToString(), Is.EqualTo("<02>hello world<00>")); - - // empty - packed = FdbTuple.Create(String.Empty).ToSlice(); - Assert.That(packed.ToString(), Is.EqualTo("<02><00>")); - - // null - packed = FdbTuple.Create(default(string)).ToSlice(); - Assert.That(packed.ToString(), Is.EqualTo("<00>")); - - // unicode - packed = FdbTuple.Create("こんにちは世界").ToSlice(); - // note: Encoding.UTF8.GetBytes("こんにちは世界") => { e3 81 93 e3 82 93 e3 81 ab e3 81 a1 e3 81 af e4 b8 96 e7 95 8c } - Assert.That(packed.ToString(), Is.EqualTo("<02><81><93><82><93><81><81><81><96><95><8C><00>")); - } - - [Test] - public void Test_FdbTuple_Deserialize_Unicode_Strings() - { - IFdbTuple t; - - // simple string - t = FdbTuple.Unpack(Slice.Unescape("<02>hello world<00>")); - Assert.That(t.Get(0), Is.EqualTo("hello world")); - Assert.That(t[0], Is.EqualTo("hello world")); - - // empty - t = FdbTuple.Unpack(Slice.Unescape("<02><00>")); - Assert.That(t.Get(0), Is.EqualTo(String.Empty)); - Assert.That(t[0], Is.EqualTo(String.Empty)); - - // null - t = FdbTuple.Unpack(Slice.Unescape("<00>")); - Assert.That(t.Get(0), Is.EqualTo(default(string))); - Assert.That(t[0], Is.Null); - - // unicode - t = FdbTuple.Unpack(Slice.Unescape("<02><81><93><82><93><81><81><81><96><95><8C><00>")); - // note: Encoding.UTF8.GetString({ e3 81 93 e3 82 93 e3 81 ab e3 81 a1 e3 81 af e4 b8 96 e7 95 8c }) => "こんにちは世界" - Assert.That(t.Get(0), Is.EqualTo("こんにちは世界")); - Assert.That(t[0], Is.EqualTo("こんにちは世界")); - } - - [Test] - public void Test_FdbTuple_Serialize_Guids() - { - // 128-bit Guids are stored with prefix '30' followed by 16 bytes formatted according to RFC 4122 - - // System.Guid are stored in Little-Endian, but RFC 4122's UUIDs are stored in Big Endian, so per convention we will swap them - - Slice packed; - - // note: new Guid(bytes from 0 to 15) => "03020100-0504-0706-0809-0a0b0c0d0e0f"; - packed = FdbTuple.Create(Guid.Parse("00010203-0405-0607-0809-0a0b0c0d0e0f")).ToSlice(); - Assert.That(packed.ToString(), Is.EqualTo("0<00><01><02><03><04><05><06><07><08><09><0A><0B><0C><0D><0E><0F>")); - - packed = FdbTuple.Create(Guid.Empty).ToSlice(); - Assert.That(packed.ToString(), Is.EqualTo("0<00><00><00><00><00><00><00><00><00><00><00><00><00><00><00><00>")); - - } - - [Test] - public void Test_FdbTuple_Deserialize_Guids() - { - // 128-bit Guids are stored with prefix '30' followed by 16 bytes - // we also accept byte arrays (prefix '01') if they are of length 16 - - IFdbTuple packed; - - packed = FdbTuple.Unpack(Slice.Unescape("<30><00><01><02><03><04><05><06><07><08><09><0A><0B><0C><0D><0E><0F>")); - Assert.That(packed.Get(0), Is.EqualTo(Guid.Parse("00010203-0405-0607-0809-0a0b0c0d0e0f"))); - Assert.That(packed[0], Is.EqualTo(Guid.Parse("00010203-0405-0607-0809-0a0b0c0d0e0f"))); - - packed = FdbTuple.Unpack(Slice.Unescape("<30><00><00><00><00><00><00><00><00><00><00><00><00><00><00><00><00>")); - Assert.That(packed.Get(0), Is.EqualTo(Guid.Empty)); - Assert.That(packed[0], Is.EqualTo(Guid.Empty)); - - // unicode string - packed = FdbTuple.Unpack(Slice.Unescape("<02>03020100-0504-0706-0809-0a0b0c0d0e0f<00>")); - Assert.That(packed.Get(0), Is.EqualTo(Guid.Parse("03020100-0504-0706-0809-0a0b0c0d0e0f"))); - //note: t[0] returns a string, not a GUID - - // null maps to Guid.Empty - packed = FdbTuple.Unpack(Slice.Unescape("<00>")); - Assert.That(packed.Get(0), Is.EqualTo(Guid.Empty)); - //note: t[0] returns null, not a GUID - - } - - [Test] - public void Test_FdbTuple_Serialize_Uuid128s() - { - // UUID128s are stored with prefix '30' followed by 16 bytes formatted according to RFC 4122 - - Slice packed; - - // note: new Uuid(bytes from 0 to 15) => "03020100-0504-0706-0809-0a0b0c0d0e0f"; - packed = FdbTuple.Create(Uuid128.Parse("00010203-0405-0607-0809-0a0b0c0d0e0f")).ToSlice(); - Assert.That(packed.ToString(), Is.EqualTo("0<00><01><02><03><04><05><06><07><08><09><0A><0B><0C><0D><0E><0F>")); - - packed = FdbTuple.Create(Uuid128.Empty).ToSlice(); - Assert.That(packed.ToString(), Is.EqualTo("0<00><00><00><00><00><00><00><00><00><00><00><00><00><00><00><00>")); - } - - [Test] - public void Test_FdbTuple_Deserialize_Uuid128s() - { - // UUID128s are stored with prefix '30' followed by 16 bytes (the result of uuid.ToByteArray()) - // we also accept byte arrays (prefix '01') if they are of length 16 - - IFdbTuple packed; - - // note: new Uuid(bytes from 0 to 15) => "00010203-0405-0607-0809-0a0b0c0d0e0f"; - packed = FdbTuple.Unpack(Slice.Unescape("<30><00><01><02><03><04><05><06><07><08><09><0A><0B><0C><0D><0E><0F>")); - Assert.That(packed.Get(0), Is.EqualTo(Uuid128.Parse("00010203-0405-0607-0809-0a0b0c0d0e0f"))); - Assert.That(packed[0], Is.EqualTo(Uuid128.Parse("00010203-0405-0607-0809-0a0b0c0d0e0f"))); - - packed = FdbTuple.Unpack(Slice.Unescape("<30><00><00><00><00><00><00><00><00><00><00><00><00><00><00><00><00>")); - Assert.That(packed.Get(0), Is.EqualTo(Uuid128.Empty)); - Assert.That(packed[0], Is.EqualTo(Uuid128.Empty)); - - // unicode string - packed = FdbTuple.Unpack(Slice.Unescape("<02>00010203-0405-0607-0809-0a0b0c0d0e0f<00>")); - Assert.That(packed.Get(0), Is.EqualTo(Uuid128.Parse("00010203-0405-0607-0809-0a0b0c0d0e0f"))); - //note: t[0] returns a string, not a UUID - - // null maps to Uuid.Empty - packed = FdbTuple.Unpack(Slice.Unescape("<00>")); - Assert.That(packed.Get(0), Is.EqualTo(Uuid128.Empty)); - //note: t[0] returns null, not a UUID - - } - - [Test] - public void Test_FdbTuple_Serialize_Uuid64s() - { - // UUID64s are stored with prefix '31' followed by 8 bytes formatted according to RFC 4122 - - Slice packed; - - // note: new Uuid(bytes from 0 to 7) => "00010203-04050607"; - packed = FdbTuple.Create(Uuid64.Parse("00010203-04050607")).ToSlice(); - Assert.That(packed.ToString(), Is.EqualTo("1<00><01><02><03><04><05><06><07>")); - - packed = FdbTuple.Create(Uuid64.Parse("01234567-89ABCDEF")).ToSlice(); - Assert.That(packed.ToString(), Is.EqualTo("1<01>#Eg<89>")); - - packed = FdbTuple.Create(Uuid64.Empty).ToSlice(); - Assert.That(packed.ToString(), Is.EqualTo("1<00><00><00><00><00><00><00><00>")); - - packed = FdbTuple.Create(new Uuid64(0xBADC0FFEE0DDF00DUL)).ToSlice(); - Assert.That(packed.ToString(), Is.EqualTo("1<0F>
<0D>")); - - packed = FdbTuple.Create(new Uuid64(0xDEADBEEFL)).ToSlice(); - Assert.That(packed.ToString(), Is.EqualTo("1<00><00><00><00>")); - } - - [Test] - public void Test_FdbTuple_Deserialize_Uuid64s() - { - // UUID64s are stored with prefix '31' followed by 8 bytes (the result of uuid.ToByteArray()) - // we also accept byte arrays (prefix '01') if they are of length 8, and unicode strings (prefix '02') - - IFdbTuple packed; - - // note: new Uuid(bytes from 0 to 15) => "00010203-0405-0607-0809-0a0b0c0d0e0f"; - packed = FdbTuple.Unpack(Slice.Unescape("<31><01><23><45><67><89>")); - Assert.That(packed.Get(0), Is.EqualTo(Uuid64.Parse("01234567-89abcdef"))); - Assert.That(packed[0], Is.EqualTo(Uuid64.Parse("01234567-89abcdef"))); - - packed = FdbTuple.Unpack(Slice.Unescape("<31><00><00><00><00><00><00><00><00>")); - Assert.That(packed.Get(0), Is.EqualTo(Uuid64.Empty)); - Assert.That(packed[0], Is.EqualTo(Uuid64.Empty)); - - // 8 bytes - packed = FdbTuple.Unpack(Slice.Unescape("<01><01><23><45><67><89><00>")); - Assert.That(packed.Get(0), Is.EqualTo(Uuid64.Parse("01234567-89abcdef"))); - //note: t[0] returns a string, not a UUID - - // unicode string - packed = FdbTuple.Unpack(Slice.Unescape("<02>01234567-89abcdef<00>")); - Assert.That(packed.Get(0), Is.EqualTo(Uuid64.Parse("01234567-89abcdef"))); - //note: t[0] returns a string, not a UUID - - // null maps to Uuid.Empty - packed = FdbTuple.Unpack(Slice.Unescape("<00>")); - Assert.That(packed.Get(0), Is.EqualTo(Uuid64.Empty)); - //note: t[0] returns null, not a UUID - - } - - [Test] - public void Test_FdbTuple_Serialize_Integers() - { - // Positive integers are stored with a variable-length encoding. - // - The prefix is 0x14 + the minimum number of bytes to encode the integer, from 0 to 8, so valid prefixes range from 0x14 to 0x1C - // - The bytes are stored in High-Endian (ie: the upper bits first) - // Examples: - // - 0 => <14> - // - 1..255 => <15><##> - // - 256..65535 .. => <16> - // - ulong.MaxValue => <1C> - - Assert.That( - FdbTuple.Create(0).ToSlice().ToString(), - Is.EqualTo("<14>") - ); - - Assert.That( - FdbTuple.Create(1).ToSlice().ToString(), - Is.EqualTo("<15><01>") - ); - - Assert.That( - FdbTuple.Create(255).ToSlice().ToString(), - Is.EqualTo("<15>") - ); - - Assert.That( - FdbTuple.Create(256).ToSlice().ToString(), - Is.EqualTo("<16><01><00>") - ); - - Assert.That( - FdbTuple.Create(65535).ToSlice().ToString(), - Is.EqualTo("<16>") - ); - - Assert.That( - FdbTuple.Create(65536).ToSlice().ToString(), - Is.EqualTo("<17><01><00><00>") - ); - - Assert.That( - FdbTuple.Create(int.MaxValue).ToSlice().ToString(), - Is.EqualTo("<18><7F>") - ); - - // signed max - Assert.That( - FdbTuple.Create(long.MaxValue).ToSlice().ToString(), - Is.EqualTo("<1C><7F>") - ); - - // unsigned max - Assert.That( - FdbTuple.Create(ulong.MaxValue).ToSlice().ToString(), - Is.EqualTo("<1C>") - ); - } - - [Test] - public void Test_FdbTuple_Deserialize_Integers() - { - - Action verify = (encoded, value) => - { - var slice = Slice.Unescape(encoded); - Assert.That(FdbTuplePackers.DeserializeBoxed(slice), Is.EqualTo(value), "DeserializeBoxed({0})", encoded); - - // int64 - Assert.That(FdbTuplePackers.DeserializeInt64(slice), Is.EqualTo(value), "DeserializeInt64({0})", encoded); - Assert.That(FdbTuplePacker.Deserialize(slice), Is.EqualTo(value), "Deserialize({0})", encoded); - - // uint64 - if (value >= 0) - { - Assert.That(FdbTuplePackers.DeserializeUInt64(slice), Is.EqualTo((ulong)value), "DeserializeUInt64({0})", encoded); - Assert.That(FdbTuplePacker.Deserialize(slice), Is.EqualTo((ulong)value), "Deserialize({0})", encoded); - } - else - { - Assert.That(() => FdbTuplePackers.DeserializeUInt64(slice), Throws.InstanceOf(), "DeserializeUInt64({0})", encoded); - } - - // int32 - if (value <= int.MaxValue && value >= int.MinValue) - { - Assert.That(FdbTuplePackers.DeserializeInt32(slice), Is.EqualTo((int)value), "DeserializeInt32({0})", encoded); - Assert.That(FdbTuplePacker.Deserialize(slice), Is.EqualTo((int)value), "Deserialize({0})", encoded); - } - else - { - Assert.That(() => FdbTuplePackers.DeserializeInt32(slice), Throws.InstanceOf(), "DeserializeInt32({0})", encoded); - } - - // uint32 - if (value <= uint.MaxValue && value >= 0) - { - Assert.That(FdbTuplePackers.DeserializeUInt32(slice), Is.EqualTo((uint)value), "DeserializeUInt32({0})", encoded); - Assert.That(FdbTuplePacker.Deserialize(slice), Is.EqualTo((uint)value), "Deserialize({0})", encoded); - } - else - { - Assert.That(() => FdbTuplePackers.DeserializeUInt32(slice), Throws.InstanceOf(), "DeserializeUInt32({0})", encoded); - } - - // int16 - if (value <= short.MaxValue && value >= short.MinValue) - { - Assert.That(FdbTuplePackers.DeserializeInt16(slice), Is.EqualTo((short)value), "DeserializeInt16({0})", encoded); - Assert.That(FdbTuplePacker.Deserialize(slice), Is.EqualTo((short)value), "Deserialize({0})", encoded); - } - else - { - Assert.That(() => FdbTuplePackers.DeserializeInt16(slice), Throws.InstanceOf(), "DeserializeInt16({0})", encoded); - } - - // uint16 - if (value <= ushort.MaxValue && value >= 0) - { - Assert.That(FdbTuplePackers.DeserializeUInt16(slice), Is.EqualTo((ushort)value), "DeserializeUInt16({0})", encoded); - Assert.That(FdbTuplePacker.Deserialize(slice), Is.EqualTo((ushort)value), "Deserialize({0})", encoded); - } - else - { - Assert.That(() => FdbTuplePackers.DeserializeUInt16(slice), Throws.InstanceOf(), "DeserializeUInt16({0})", encoded); - } - - // sbyte - if (value <= sbyte.MaxValue && value >= sbyte.MinValue) - { - Assert.That(FdbTuplePackers.DeserializeSByte(slice), Is.EqualTo((sbyte)value), "DeserializeSByte({0})", encoded); - Assert.That(FdbTuplePacker.Deserialize(slice), Is.EqualTo((sbyte)value), "Deserialize({0})", encoded); - } - else - { - Assert.That(() => FdbTuplePackers.DeserializeSByte(slice), Throws.InstanceOf(), "DeserializeSByte({0})", encoded); - } - - // byte - if (value <= 255 && value >= 0) - { - Assert.That(FdbTuplePackers.DeserializeByte(slice), Is.EqualTo((byte)value), "DeserializeByte({0})", encoded); - Assert.That(FdbTuplePacker.Deserialize(slice), Is.EqualTo((byte)value), "Deserialize({0})", encoded); - } - else - { - Assert.That(() => FdbTuplePackers.DeserializeByte(slice), Throws.InstanceOf(), "DeserializeByte({0})", encoded); - } - - }; - verify("<14>", 0); - verify("<15>{", 123); - verify("<15><80>", 128); - verify("<15>", 255); - verify("<16><01><00>", 256); - verify("<16><04>", 1234); - verify("<16><80><00>", 32768); - verify("<16>", 65535); - verify("<17><01><00><00>", 65536); - verify("<13>", -1); - verify("<13><00>", -255); - verify("<12>", -256); - verify("<12><00><00>", -65535); - verify("<11>", -65536); - verify("<18><7F>", int.MaxValue); - verify("<10><7F>", int.MinValue); - verify("<1C><7F>", long.MaxValue); - verify("<0C><7F>", long.MinValue); - } - - [Test] - public void Test_FdbTuple_Serialize_Negative_Integers() - { - // Negative integers are stored with a variable-length encoding. - // - The prefix is 0x14 - the minimum number of bytes to encode the integer, from 0 to 8, so valid prefixes range from 0x0C to 0x13 - // - The value is encoded as the one's complement, and stored in High-Endian (ie: the upper bits first) - // - There is no way to encode '-0', it will be encoded as '0' (<14>) - // Examples: - // - -255..-1 => <13><00> .. <13> - // - -65535..-256 => <12><00>00> .. <12> - // - long.MinValue => <0C><7F> - - Assert.That( - FdbTuple.Create(-1).ToSlice().ToString(), - Is.EqualTo("<13>") - ); - - Assert.That( - FdbTuple.Create(-255).ToSlice().ToString(), - Is.EqualTo("<13><00>") - ); - - Assert.That( - FdbTuple.Create(-256).ToSlice().ToString(), - Is.EqualTo("<12>") - ); - Assert.That( - FdbTuple.Create(-257).ToSlice().ToString(), - Is.EqualTo("<12>") - ); - - Assert.That( - FdbTuple.Create(-65535).ToSlice().ToString(), - Is.EqualTo("<12><00><00>") - ); - Assert.That( - FdbTuple.Create(-65536).ToSlice().ToString(), - Is.EqualTo("<11>") - ); - - Assert.That( - FdbTuple.Create(int.MinValue).ToSlice().ToString(), - Is.EqualTo("<10><7F>") - ); - - Assert.That( - FdbTuple.Create(long.MinValue).ToSlice().ToString(), - Is.EqualTo("<0C><7F>") - ); - } - - [Test] - public void Test_FdbTuple_Serialize_Singles() - { - // 32-bit floats are stored in 5 bytes, using the prefix 0x20 followed by the High-Endian representation of their normalized form - - Assert.That(FdbTuple.Create(0f).ToSlice().ToHexaString(' '), Is.EqualTo("20 80 00 00 00")); - Assert.That(FdbTuple.Create(42f).ToSlice().ToHexaString(' '), Is.EqualTo("20 C2 28 00 00")); - Assert.That(FdbTuple.Create(-42f).ToSlice().ToHexaString(' '), Is.EqualTo("20 3D D7 FF FF")); - - Assert.That(FdbTuple.Create((float)Math.Sqrt(2)).ToSlice().ToHexaString(' '), Is.EqualTo("20 BF B5 04 F3")); - - Assert.That(FdbTuple.Create(float.MinValue).ToSlice().ToHexaString(' '), Is.EqualTo("20 00 80 00 00"), "float.MinValue"); - Assert.That(FdbTuple.Create(float.MaxValue).ToSlice().ToHexaString(' '), Is.EqualTo("20 FF 7F FF FF"), "float.MaxValue"); - Assert.That(FdbTuple.Create(-0f).ToSlice().ToHexaString(' '), Is.EqualTo("20 7F FF FF FF"), "-0f"); - Assert.That(FdbTuple.Create(float.NegativeInfinity).ToSlice().ToHexaString(' '), Is.EqualTo("20 00 7F FF FF"), "float.NegativeInfinity"); - Assert.That(FdbTuple.Create(float.PositiveInfinity).ToSlice().ToHexaString(' '), Is.EqualTo("20 FF 80 00 00"), "float.PositiveInfinity"); - Assert.That(FdbTuple.Create(float.Epsilon).ToSlice().ToHexaString(' '), Is.EqualTo("20 80 00 00 01"), "+float.Epsilon"); - Assert.That(FdbTuple.Create(-float.Epsilon).ToSlice().ToHexaString(' '), Is.EqualTo("20 7F FF FF FE"), "-float.Epsilon"); - - // all possible variants of NaN should all be equal - Assert.That(FdbTuple.Create(float.NaN).ToSlice().ToHexaString(' '), Is.EqualTo("20 00 3F FF FF"), "float.NaN"); - - // cook up a non standard NaN (with some bits set in the fraction) - float f = float.NaN; // defined as 1f / 0f - uint nan; - unsafe { nan = *((uint*)&f); } - nan += 123; - unsafe { f = *((float*)&nan); } - Assert.That(float.IsNaN(f), Is.True); - Assert.That( - FdbTuple.Create(f).ToSlice().ToHexaString(' '), - Is.EqualTo("20 00 3F FF FF"), - "All variants of NaN must be normalized" - //note: if we have 20 00 3F FF 84, that means that the NaN was not normalized - ); - - } - - [Test] - public void Test_FdbTuple_Deserialize_Singles() - { - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("20 80 00 00 00")), Is.EqualTo(0f), "0f"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("20 C2 28 00 00")), Is.EqualTo(42f), "42f"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("20 3D D7 FF FF")), Is.EqualTo(-42f), "-42f"); - - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("20 BF B5 04 F3")), Is.EqualTo((float)Math.Sqrt(2)), "Sqrt(2)"); - - // well known values - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("20 00 80 00 00")), Is.EqualTo(float.MinValue), "float.MinValue"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("20 FF 7F FF FF")), Is.EqualTo(float.MaxValue), "float.MaxValue"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("20 7F FF FF FF")), Is.EqualTo(-0f), "-0f"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("20 00 7F FF FF")), Is.EqualTo(float.NegativeInfinity), "float.NegativeInfinity"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("20 FF 80 00 00")), Is.EqualTo(float.PositiveInfinity), "float.PositiveInfinity"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("20 00 80 00 00")), Is.EqualTo(float.MinValue), "float.Epsilon"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("20 80 00 00 01")), Is.EqualTo(float.Epsilon), "+float.Epsilon"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("20 7F FF FF FE")), Is.EqualTo(-float.Epsilon), "-float.Epsilon"); - - // all possible variants of NaN should end up equal and normalized to float.NaN - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("20 00 3F FF FF")), Is.EqualTo(float.NaN), "float.NaN"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("20 00 3F FF FF")), Is.EqualTo(float.NaN), "float.NaN"); - } - - [Test] - public void Test_FdbTuple_Serialize_Doubles() - { - // 64-bit floats are stored in 9 bytes, using the prefix 0x21 followed by the High-Endian representation of their normalized form - - Assert.That(FdbTuple.Create(0d).ToSlice().ToHexaString(' '), Is.EqualTo("21 80 00 00 00 00 00 00 00")); - Assert.That(FdbTuple.Create(42d).ToSlice().ToHexaString(' '), Is.EqualTo("21 C0 45 00 00 00 00 00 00")); - Assert.That(FdbTuple.Create(-42d).ToSlice().ToHexaString(' '), Is.EqualTo("21 3F BA FF FF FF FF FF FF")); - - Assert.That(FdbTuple.Create(Math.PI).ToSlice().ToHexaString(' '), Is.EqualTo("21 C0 09 21 FB 54 44 2D 18")); - Assert.That(FdbTuple.Create(Math.E).ToSlice().ToHexaString(' '), Is.EqualTo("21 C0 05 BF 0A 8B 14 57 69")); - - Assert.That(FdbTuple.Create(double.MinValue).ToSlice().ToHexaString(' '), Is.EqualTo("21 00 10 00 00 00 00 00 00"), "double.MinValue"); - Assert.That(FdbTuple.Create(double.MaxValue).ToSlice().ToHexaString(' '), Is.EqualTo("21 FF EF FF FF FF FF FF FF"), "double.MaxValue"); - Assert.That(FdbTuple.Create(-0d).ToSlice().ToHexaString(' '), Is.EqualTo("21 7F FF FF FF FF FF FF FF"), "-0d"); - Assert.That(FdbTuple.Create(double.NegativeInfinity).ToSlice().ToHexaString(' '), Is.EqualTo("21 00 0F FF FF FF FF FF FF"), "double.NegativeInfinity"); - Assert.That(FdbTuple.Create(double.PositiveInfinity).ToSlice().ToHexaString(' '), Is.EqualTo("21 FF F0 00 00 00 00 00 00"), "double.PositiveInfinity"); - Assert.That(FdbTuple.Create(double.Epsilon).ToSlice().ToHexaString(' '), Is.EqualTo("21 80 00 00 00 00 00 00 01"), "+double.Epsilon"); - Assert.That(FdbTuple.Create(-double.Epsilon).ToSlice().ToHexaString(' '), Is.EqualTo("21 7F FF FF FF FF FF FF FE"), "-double.Epsilon"); - - // all possible variants of NaN should all be equal - - Assert.That(FdbTuple.Create(double.NaN).ToSlice().ToHexaString(' '), Is.EqualTo("21 00 07 FF FF FF FF FF FF"), "double.NaN"); - - // cook up a non standard NaN (with some bits set in the fraction) - double d = double.NaN; // defined as 1d / 0d - ulong nan; - unsafe { nan = *((ulong*)&d); } - nan += 123; - unsafe { d = *((double*)&nan); } - Assert.That(double.IsNaN(d), Is.True); - Assert.That( - FdbTuple.Create(d).ToSlice().ToHexaString(' '), - Is.EqualTo("21 00 07 FF FF FF FF FF FF") - //note: if we have 21 00 07 FF FF FF FF FF 84, that means that the NaN was not normalized - ); - - // roundtripping vectors of doubles - var tuple = FdbTuple.Create(Math.PI, Math.E, Math.Log(1), Math.Log(2)); - Assert.That(FdbTuple.Unpack(FdbTuple.EncodeKey(Math.PI, Math.E, Math.Log(1), Math.Log(2))), Is.EqualTo(tuple)); - Assert.That(FdbTuple.Unpack(FdbTuple.Create(Math.PI, Math.E, Math.Log(1), Math.Log(2)).ToSlice()), Is.EqualTo(tuple)); - Assert.That(FdbTuple.Unpack(FdbTuple.Empty.Append(Math.PI).Append(Math.E).Append(Math.Log(1)).Append(Math.Log(2)).ToSlice()), Is.EqualTo(tuple)); - } - - [Test] - public void Test_FdbTuple_Deserialize_Doubles() - { - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("21 80 00 00 00 00 00 00 00")), Is.EqualTo(0d), "0d"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("21 C0 45 00 00 00 00 00 00")), Is.EqualTo(42d), "42d"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("21 3F BA FF FF FF FF FF FF")), Is.EqualTo(-42d), "-42d"); - - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("21 C0 09 21 FB 54 44 2D 18")), Is.EqualTo(Math.PI), "Math.PI"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("21 C0 05 BF 0A 8B 14 57 69")), Is.EqualTo(Math.E), "Math.E"); - - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("21 00 10 00 00 00 00 00 00")), Is.EqualTo(double.MinValue), "double.MinValue"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("21 FF EF FF FF FF FF FF FF")), Is.EqualTo(double.MaxValue), "double.MaxValue"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("21 7F FF FF FF FF FF FF FF")), Is.EqualTo(-0d), "-0d"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("21 00 0F FF FF FF FF FF FF")), Is.EqualTo(double.NegativeInfinity), "double.NegativeInfinity"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("21 FF F0 00 00 00 00 00 00")), Is.EqualTo(double.PositiveInfinity), "double.PositiveInfinity"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("21 80 00 00 00 00 00 00 01")), Is.EqualTo(double.Epsilon), "+double.Epsilon"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("21 7F FF FF FF FF FF FF FE")), Is.EqualTo(-double.Epsilon), "-double.Epsilon"); - - // all possible variants of NaN should end up equal and normalized to double.NaN - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("21 00 07 FF FF FF FF FF FF")), Is.EqualTo(double.NaN), "double.NaN"); - Assert.That(FdbTuple.DecodeKey(Slice.FromHexa("21 00 07 FF FF FF FF FF 84")), Is.EqualTo(double.NaN), "double.NaN"); - } - - [Test] - public void Test_FdbTuple_Serialize_Booleans() - { - // Booleans are stored as interger 0 (<14>) for false, and integer 1 (<15><01>) for true - - Slice packed; - - // bool - packed = FdbTuple.EncodeKey(false); - Assert.That(packed.ToString(), Is.EqualTo("<14>")); - packed = FdbTuple.EncodeKey(true); - Assert.That(packed.ToString(), Is.EqualTo("<15><01>")); - - // bool? - packed = FdbTuple.EncodeKey(default(bool?)); - Assert.That(packed.ToString(), Is.EqualTo("<00>")); - packed = FdbTuple.EncodeKey((bool?)false); - Assert.That(packed.ToString(), Is.EqualTo("<14>")); - packed = FdbTuple.EncodeKey((bool?)true); - Assert.That(packed.ToString(), Is.EqualTo("<15><01>")); - - // tuple containing bools - packed = FdbTuple.Create(true).ToSlice(); - Assert.That(packed.ToString(), Is.EqualTo("<15><01>")); - packed = FdbTuple.Create(true, null, false).ToSlice(); - Assert.That(packed.ToString(), Is.EqualTo("<15><01><00><14>")); - } - - [Test] - public void Test_FdbTuple_Deserialize_Booleans() - { - // Null, 0, and empty byte[]/strings are equivalent to False. All others are equivalent to True - - // Falsy... - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<00>")), Is.EqualTo(false), "Null => False"); - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<14>")), Is.EqualTo(false), "0 => False"); - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<01><00>")), Is.EqualTo(false), "byte[0] => False"); - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<02><00>")), Is.EqualTo(false), "String.Empty => False"); - - // Truthy - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<15><01>")), Is.EqualTo(true), "1 => True"); - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<13>")), Is.EqualTo(true), "-1 => True"); - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<01>Hello<00>")), Is.EqualTo(true), "'Hello' => True"); - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<02>Hello<00>")), Is.EqualTo(true), "\"Hello\" => True"); - Assert.That(FdbTuple.DecodeKey(FdbTuple.EncodeKey(123456789)), Is.EqualTo(true), "random int => True"); - - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<02>True<00>")), Is.EqualTo(true), "\"True\" => True"); - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<02>False<00>")), Is.EqualTo(true), "\"False\" => True ***"); - // note: even though it would be tempting to convert the string "false" to False, it is not a standard behavior accross all bindings - - // When decoded to object, though, they should return 0 and 1 - Assert.That(FdbTuplePackers.DeserializeBoxed(FdbTuple.EncodeKey(false)), Is.EqualTo(0)); - Assert.That(FdbTuplePackers.DeserializeBoxed(FdbTuple.EncodeKey(true)), Is.EqualTo(1)); - } - - [Test] - public void Test_FdbTuple_Serialize_IPAddress() - { - // IP Addresses are stored as a byte array (<01>..<00>), in network order (big-endian) - // They will take from 6 to 10 bytes, depending on the number of '.0' in them. - - Assert.That( - FdbTuple.Create(IPAddress.Loopback).ToSlice().ToHexaString(' '), - Is.EqualTo("01 7F 00 FF 00 FF 01 00") - ); - - Assert.That( - FdbTuple.Create(IPAddress.Any).ToSlice().ToHexaString(' '), - Is.EqualTo("01 00 FF 00 FF 00 FF 00 FF 00") - ); - - Assert.That( - FdbTuple.Create(IPAddress.Parse("1.2.3.4")).ToSlice().ToHexaString(' '), - Is.EqualTo("01 01 02 03 04 00") - ); - - } - - - [Test] - public void Test_FdbTuple_Deserialize_IPAddress() - { - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<01><7F><00><00><01><00>")), Is.EqualTo(IPAddress.Parse("127.0.0.1"))); - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<01><00><00><00><00><00>")), Is.EqualTo(IPAddress.Parse("0.0.0.0"))); - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<01><01><02><03><04><00>")), Is.EqualTo(IPAddress.Parse("1.2.3.4"))); - - Assert.That(FdbTuple.DecodeKey(FdbTuple.EncodeKey("127.0.0.1")), Is.EqualTo(IPAddress.Loopback)); - - var ip = IPAddress.Parse("192.168.0.1"); - Assert.That(FdbTuple.DecodeKey(FdbTuple.EncodeKey(ip.ToString())), Is.EqualTo(ip)); - Assert.That(FdbTuple.DecodeKey(FdbTuple.EncodeKey(ip.GetAddressBytes())), Is.EqualTo(ip)); - Assert.That(FdbTuple.DecodeKey(FdbTuple.EncodeKey(ip.Address)), Is.EqualTo(ip)); - } - - [Test] - public void Test_FdbTuple_NullableTypes() - { - // Nullable types will either be encoded as <14> for null, or their regular encoding if not null - - // serialize - - Assert.That(FdbTuple.EncodeKey(0), Is.EqualTo(Slice.Unescape("<14>"))); - Assert.That(FdbTuple.EncodeKey(123), Is.EqualTo(Slice.Unescape("<15>{"))); - Assert.That(FdbTuple.EncodeKey(null), Is.EqualTo(Slice.Unescape("<00>"))); - - Assert.That(FdbTuple.EncodeKey(0L), Is.EqualTo(Slice.Unescape("<14>"))); - Assert.That(FdbTuple.EncodeKey(123L), Is.EqualTo(Slice.Unescape("<15>{"))); - Assert.That(FdbTuple.EncodeKey(null), Is.EqualTo(Slice.Unescape("<00>"))); - - Assert.That(FdbTuple.EncodeKey(true), Is.EqualTo(Slice.Unescape("<15><01>"))); - Assert.That(FdbTuple.EncodeKey(false), Is.EqualTo(Slice.Unescape("<14>"))); - Assert.That(FdbTuple.EncodeKey(null), Is.EqualTo(Slice.Unescape("<00>")), "Maybe it was File Not Found?"); - - Assert.That(FdbTuple.EncodeKey(Guid.Empty), Is.EqualTo(Slice.Unescape("0<00><00><00><00><00><00><00><00><00><00><00><00><00><00><00><00>"))); - Assert.That(FdbTuple.EncodeKey(null), Is.EqualTo(Slice.Unescape("<00>"))); - - Assert.That(FdbTuple.EncodeKey(TimeSpan.Zero), Is.EqualTo(Slice.Unescape("!<80><00><00><00><00><00><00><00>"))); - Assert.That(FdbTuple.EncodeKey(null), Is.EqualTo(Slice.Unescape("<00>"))); - - // deserialize - - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<14>")), Is.EqualTo(0)); - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<15>{")), Is.EqualTo(123)); - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<00>")), Is.Null); - - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<14>")), Is.EqualTo(0L)); - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<15>{")), Is.EqualTo(123L)); - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<00>")), Is.Null); - - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<15><01>")), Is.True); - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<14>")), Is.False); - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<00>")), Is.Null); - - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("0<00><00><00><00><00><00><00><00><00><00><00><00><00><00><00><00>")), Is.EqualTo(Guid.Empty)); - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<00>")), Is.Null); - - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<14>")), Is.EqualTo(TimeSpan.Zero)); - Assert.That(FdbTuple.DecodeKey(Slice.Unescape("<00>")), Is.Null); - - } - - [Test] - public void Test_FdbTuple_Serialize_Alias() - { - Assert.That( - FdbTuple.EncodeKey(FdbTupleAlias.System).ToString(), - Is.EqualTo("") - ); - - Assert.That( - FdbTuple.EncodeKey(FdbTupleAlias.Directory).ToString(), - Is.EqualTo("") - ); - - Assert.That( - FdbTuple.EncodeKey(FdbTupleAlias.Zero).ToString(), - Is.EqualTo("<00>") - ); - - } - - [Test] - public void Test_FdbTuple_Deserialize_Alias() - { - Slice slice; - - slice = Slice.Unescape(""); - Assert.That(FdbTuplePackers.DeserializeBoxed(slice), Is.EqualTo(FdbTupleAlias.System)); - - slice = Slice.Unescape(""); - Assert.That(FdbTuplePackers.DeserializeBoxed(slice), Is.EqualTo(FdbTupleAlias.Directory)); - - //note: FdbTupleAlias.Start is <00> and will be deserialized as null - } - - [Test] - public void Test_FdbTuple_Serialize_Embedded_Tuples() - { - Action verify = (t, expected) => - { - var key = t.ToSlice(); - Assert.That(key.ToHexaString(' '), Is.EqualTo(expected)); - Assert.That(FdbTuple.Pack(t), Is.EqualTo(key)); - var t2 = FdbTuple.Unpack(key); - Assert.That(t2, Is.Not.Null); - Assert.That(t2.Count, Is.EqualTo(t.Count), "{0}", t2); - Assert.That(t2, Is.EqualTo(t)); - }; - - // Index composite key - IFdbTuple value = FdbTuple.Create(2014, 11, 6); // Indexing a date value (Y, M, D) - string docId = "Doc123"; - // key would be "(..., value, id)" - - verify( - FdbTuple.Create(42, value, docId), - "15 2A 03 16 07 DE 15 0B 15 06 00 02 44 6F 63 31 32 33 00" - ); - verify( - FdbTuple.Create(new object[] { 42, value, docId }), - "15 2A 03 16 07 DE 15 0B 15 06 00 02 44 6F 63 31 32 33 00" - ); - verify( - FdbTuple.Create(42).Append(value).Append(docId), - "15 2A 03 16 07 DE 15 0B 15 06 00 02 44 6F 63 31 32 33 00" - ); - verify( - FdbTuple.Create(42).Append(value, docId), - "15 2A 03 16 07 DE 15 0B 15 06 00 02 44 6F 63 31 32 33 00" - ); - - // multiple depth - verify( - FdbTuple.Create(1, FdbTuple.Create(2, 3), FdbTuple.Create(FdbTuple.Create(4, 5, 6)), 7), - "15 01 03 15 02 15 03 00 03 03 15 04 15 05 15 06 00 00 15 07" - ); - - // corner cases - verify( - FdbTuple.Create(FdbTuple.Empty), - "03 00" // empty tumple should have header and footer - ); - verify( - FdbTuple.Create(FdbTuple.Empty, default(string)), - "03 00 00" // outer null should not be escaped - ); - verify( - FdbTuple.Create(FdbTuple.Create(default(string)), default(string)), - "03 00 FF 00 00" // inner null should be escaped, but not outer - ); - verify( - FdbTuple.Create(FdbTuple.Create(0x100, 0x10000, 0x1000000)), - "03 16 01 00 17 01 00 00 18 01 00 00 00 00" - ); - verify( - FdbTuple.Create(default(string), FdbTuple.Empty, default(string), FdbTuple.Create(default(string)), default(string)), - "00 03 00 00 03 00 FF 00 00" - ); - - } - - [Test] - public void Test_FdbTuple_SameBytes() - { - IFdbTuple t1 = FdbTuple.Create("hello world"); - IFdbTuple t2 = FdbTuple.Create(new object[] { "hello world" }); - - Assert.That(t1.ToSlice(), Is.EqualTo(t2.ToSlice())); - - t1 = FdbTuple.Create("hello world", 1234); - t2 = FdbTuple.Create("hello world").Append(1234); - - Assert.That(t1.ToSlice(), Is.EqualTo(t2.ToSlice())); - - } - - [Test] - public void Test_FdbTuple_Create_ToSlice() - { - Assert.That( - FdbTuple.Create("hello world").ToSlice().ToString(), - Is.EqualTo("<02>hello world<00>") - ); - - Assert.That( - FdbTuple.Create("hello", "world").ToSlice().ToString(), - Is.EqualTo("<02>hello<00><02>world<00>") - ); - - Assert.That( - FdbTuple.Create("hello world", 123).ToSlice().ToString(), - Is.EqualTo("<02>hello world<00><15>{") - ); - - Assert.That( - FdbTuple.Create("hello world", 1234, -1234).ToSlice().ToString(), - Is.EqualTo("<02>hello world<00><16><04><12>-") - ); - - Assert.That( - FdbTuple.Create("hello world", 123, false).ToSlice().ToString(), - Is.EqualTo("<02>hello world<00><15>{<14>") - ); - - Assert.That( - FdbTuple.Create("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }).ToSlice().ToString(), - Is.EqualTo("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>") - ); - - Assert.That( - FdbTuple.Create(new object[] { "hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 } }).ToSlice().ToString(), - Is.EqualTo("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>") - ); - - Assert.That( - FdbTuple.FromArray(new object[] { "hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 } }, 1, 2).ToSlice().ToString(), - Is.EqualTo("<15>{<14>") - ); - - Assert.That( - FdbTuple.FromEnumerable(new List { "hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 } }).ToSlice().ToString(), - Is.EqualTo("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>") - ); - - } - - [Test] - public void Test_FdbTuple_EncodeKey() - { - Assert.That( - FdbTuple.EncodeKey("hello world").ToString(), - Is.EqualTo("<02>hello world<00>") - ); - - Assert.That( - FdbTuple.EncodeKey("hello", "world").ToString(), - Is.EqualTo("<02>hello<00><02>world<00>") - ); - - Assert.That( - FdbTuple.EncodeKey("hello world", 123).ToString(), - Is.EqualTo("<02>hello world<00><15>{") - ); - - Assert.That( - FdbTuple.EncodeKey("hello world", 1234, -1234).ToString(), - Is.EqualTo("<02>hello world<00><16><04><12>-") - ); - - Assert.That( - FdbTuple.EncodeKey("hello world", 123, false).ToString(), - Is.EqualTo("<02>hello world<00><15>{<14>") - ); - - Assert.That( - FdbTuple.EncodeKey("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }).ToString(), - Is.EqualTo("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>") - ); - } - - [Test] - public void Test_FdbTuple_Unpack() - { - - var packed = FdbTuple.Create("hello world").ToSlice(); - Log(packed); - - var tuple = FdbTuple.Unpack(packed); - Assert.That(tuple, Is.Not.Null); - Log(tuple); - Assert.That(tuple.Count, Is.EqualTo(1)); - Assert.That(tuple.Get(0), Is.EqualTo("hello world")); - - packed = FdbTuple.Create("hello world", 123).ToSlice(); - Log(packed); - - tuple = FdbTuple.Unpack(packed); - Assert.That(tuple, Is.Not.Null); - Log(tuple); - Assert.That(tuple.Count, Is.EqualTo(2)); - Assert.That(tuple.Get(0), Is.EqualTo("hello world")); - Assert.That(tuple.Get(1), Is.EqualTo(123)); - - packed = FdbTuple.Create(1, 256, 257, 65536, int.MaxValue, long.MaxValue).ToSlice(); - Log(packed); - - tuple = FdbTuple.Unpack(packed); - Assert.That(tuple, Is.Not.Null); - Assert.That(tuple.Count, Is.EqualTo(6)); - Assert.That(tuple.Get(0), Is.EqualTo(1)); - Assert.That(tuple.Get(1), Is.EqualTo(256)); - Assert.That(tuple.Get(2), Is.EqualTo(257), ((FdbSlicedTuple)tuple).GetSlice(2).ToString()); - Assert.That(tuple.Get(3), Is.EqualTo(65536)); - Assert.That(tuple.Get(4), Is.EqualTo(int.MaxValue)); - Assert.That(tuple.Get(5), Is.EqualTo(long.MaxValue)); - - packed = FdbTuple.Create(-1, -256, -257, -65536, int.MinValue, long.MinValue).ToSlice(); - Log(packed); - - tuple = FdbTuple.Unpack(packed); - Assert.That(tuple, Is.Not.Null); - Assert.That(tuple, Is.InstanceOf()); - Log(tuple); - Assert.That(tuple.Count, Is.EqualTo(6)); - Assert.That(tuple.Get(0), Is.EqualTo(-1)); - Assert.That(tuple.Get(1), Is.EqualTo(-256)); - Assert.That(tuple.Get(2), Is.EqualTo(-257), "Slice is " + ((FdbSlicedTuple)tuple).GetSlice(2).ToString()); - Assert.That(tuple.Get(3), Is.EqualTo(-65536)); - Assert.That(tuple.Get(4), Is.EqualTo(int.MinValue)); - Assert.That(tuple.Get(5), Is.EqualTo(long.MinValue)); - } - - [Test] - public void Test_FdbTuple_CreateBoxed() - { - IFdbTuple tuple; - - tuple = FdbTuple.CreateBoxed(default(object)); - Assert.That(tuple.Count, Is.EqualTo(1)); - Assert.That(tuple[0], Is.Null); - - tuple = FdbTuple.CreateBoxed(1); - Assert.That(tuple.Count, Is.EqualTo(1)); - Assert.That(tuple[0], Is.EqualTo(1)); - - tuple = FdbTuple.CreateBoxed(1L); - Assert.That(tuple.Count, Is.EqualTo(1)); - Assert.That(tuple[0], Is.EqualTo(1L)); - - tuple = FdbTuple.CreateBoxed(false); - Assert.That(tuple.Count, Is.EqualTo(1)); - Assert.That(tuple[0], Is.EqualTo(false)); - - tuple = FdbTuple.CreateBoxed("hello"); - Assert.That(tuple.Count, Is.EqualTo(1)); - Assert.That(tuple[0], Is.EqualTo("hello")); - - tuple = FdbTuple.CreateBoxed(new byte[] { 1, 2, 3 }); - Assert.That(tuple.Count, Is.EqualTo(1)); - Assert.That(tuple[0], Is.EqualTo(Slice.Create(new byte[] { 1, 2, 3 }))); - } - - [Test] - public void Test_FdbTuple_EncodeKey_Boxed() - { - Slice slice; - - slice = FdbTuple.EncodeKey(default(object)); - Assert.That(slice.ToString(), Is.EqualTo("<00>")); - - slice = FdbTuple.EncodeKey(1); - Assert.That(slice.ToString(), Is.EqualTo("<15><01>")); - - slice = FdbTuple.EncodeKey(1L); - Assert.That(slice.ToString(), Is.EqualTo("<15><01>")); - - slice = FdbTuple.EncodeKey(1U); - Assert.That(slice.ToString(), Is.EqualTo("<15><01>")); - - slice = FdbTuple.EncodeKey(1UL); - Assert.That(slice.ToString(), Is.EqualTo("<15><01>")); - - slice = FdbTuple.EncodeKey(false); - Assert.That(slice.ToString(), Is.EqualTo("<14>")); - - slice = FdbTuple.EncodeKey(new byte[] { 4, 5, 6 }); - Assert.That(slice.ToString(), Is.EqualTo("<01><04><05><06><00>")); - - slice = FdbTuple.EncodeKey("hello"); - Assert.That(slice.ToString(), Is.EqualTo("<02>hello<00>")); - } - - [Test] - public void Test_FdbTuple_Numbers_Are_Sorted_Lexicographically() - { - // pick two numbers 'x' and 'y' at random, and check that the order of 'x' compared to 'y' is the same as 'pack(tuple(x))' compared to 'pack(tuple(y))' - - // ie: ensure that x.CompareTo(y) always has the same sign as Tuple(x).CompareTo(Tuple(y)) - - const int N = 1 * 1000 * 1000; - var rnd = new Random(); - var sw = Stopwatch.StartNew(); - - for (int i = 0; i < N; i++) - { - int x = rnd.Next() - 1073741824; - int y = x; - while (y == x) - { - y = rnd.Next() - 1073741824; - } - - var t1 = FdbTuple.Create(x).ToSlice(); - var t2 = FdbTuple.Create(y).ToSlice(); - - int dint = x.CompareTo(y); - int dtup = t1.CompareTo(t2); - - if (dtup == 0) Assert.Fail("Tuples for x={0} and y={1} should not have the same packed value", x, y); - - // compare signs - if (Math.Sign(dint) != Math.Sign(dtup)) - { - Assert.Fail("Tuples for x={0} and y={1} are not sorted properly ({2} / {3}): t(x)='{4}' and t(y)='{5}'", x, y, dint, dtup, t1.ToString(), t2.ToString()); - } - } - sw.Stop(); - Log("Checked {0:N0} tuples in {1:N1} ms", N, sw.ElapsedMilliseconds); - - } - - [Test] - public void Test_FdbTuple_Serialize_ITupleFormattable() - { - // types that implement ITupleFormattable should be packed by calling ToTuple() and then packing the returned tuple - - Slice packed; - - packed = FdbTuplePacker.Serialize(new Thing { Foo = 123, Bar = "hello" }); - Assert.That(packed.ToString(), Is.EqualTo("<03><15>{<02>hello<00><00>")); - - packed = FdbTuplePacker.Serialize(new Thing()); - Assert.That(packed.ToString(), Is.EqualTo("<03><14><00><00>")); - - packed = FdbTuplePacker.Serialize(default(Thing)); - Assert.That(packed.ToString(), Is.EqualTo("<00>")); - - } - - [Test] - public void Test_FdbTuple_Deserialize_ITupleFormattable() - { - Slice slice; - Thing thing; - - slice = Slice.Unescape("<03><16><01><02>world<00><00>"); - thing = FdbTuplePackers.DeserializeFormattable(slice); - Assert.That(thing, Is.Not.Null); - Assert.That(thing.Foo, Is.EqualTo(456)); - Assert.That(thing.Bar, Is.EqualTo("world")); - - slice = Slice.Unescape("<03><14><00><00>"); - thing = FdbTuplePackers.DeserializeFormattable(slice); - Assert.That(thing, Is.Not.Null); - Assert.That(thing.Foo, Is.EqualTo(0)); - Assert.That(thing.Bar, Is.EqualTo(null)); - - slice = Slice.Unescape("<00>"); - thing = FdbTuplePackers.DeserializeFormattable(slice); - Assert.That(thing, Is.Null); - } - - [Test] - public void Test_FdbTuple_BatchPack_Of_Tuples() - { - Slice[] slices; - var tuples = new IFdbTuple[] { - FdbTuple.Create("hello"), - FdbTuple.Create(123), - FdbTuple.Create(false), - FdbTuple.Create("world", 456, true) - }; - - // array version - slices = FdbTuple.Pack(tuples); - Assert.That(slices, Is.Not.Null); - Assert.That(slices.Length, Is.EqualTo(tuples.Length)); - Assert.That(slices, Is.EqualTo(tuples.Select(t => t.ToSlice()).ToArray())); - - // IEnumerable version that is passed an array - slices = FdbTuple.Pack((IEnumerable)tuples); - Assert.That(slices, Is.Not.Null); - Assert.That(slices.Length, Is.EqualTo(tuples.Length)); - Assert.That(slices, Is.EqualTo(tuples.Select(t => t.ToSlice()).ToArray())); - - // IEnumerable version but with a "real" enumerable - slices = FdbTuple.Pack(tuples.Select(t => t)); - Assert.That(slices, Is.Not.Null); - Assert.That(slices.Length, Is.EqualTo(tuples.Length)); - Assert.That(slices, Is.EqualTo(tuples.Select(t => t.ToSlice()).ToArray())); - } - - [Test] - public void Test_FdbTuple_EncodeKeys_Of_T() - { - Slice[] slices; - - #region PackRange(Tuple, ...) - - var tuple = FdbTuple.Create("hello"); - int[] items = new int[] { 1, 2, 3, 123, -1, int.MaxValue }; - - // array version - slices = FdbTuple.EncodePrefixedKeys(tuple, items); - Assert.That(slices, Is.Not.Null); - Assert.That(slices.Length, Is.EqualTo(items.Length)); - Assert.That(slices, Is.EqualTo(items.Select(x => tuple.Append(x).ToSlice()).ToArray())); - - // IEnumerable version that is passed an array - slices = FdbTuple.EncodePrefixedKeys(tuple, (IEnumerable)items); - Assert.That(slices, Is.Not.Null); - Assert.That(slices.Length, Is.EqualTo(items.Length)); - Assert.That(slices, Is.EqualTo(items.Select(x => tuple.Append(x).ToSlice()).ToArray())); - - // IEnumerable version but with a "real" enumerable - slices = FdbTuple.EncodePrefixedKeys(tuple, items.Select(t => t)); - Assert.That(slices, Is.Not.Null); - Assert.That(slices.Length, Is.EqualTo(items.Length)); - Assert.That(slices, Is.EqualTo(items.Select(x => tuple.Append(x).ToSlice()).ToArray())); - - #endregion - - #region PackRange(Slice, ...) - - string[] words = new string[] { "hello", "world", "très bien", "断トツ", "abc\0def", null, String.Empty }; - - var merged = FdbTuple.EncodePrefixedKeys(Slice.FromByte(42), words); - Assert.That(merged, Is.Not.Null); - Assert.That(merged.Length, Is.EqualTo(words.Length)); - - for (int i = 0; i < words.Length; i++) - { - var expected = Slice.FromByte(42) + FdbTuple.EncodeKey(words[i]); - Assert.That(merged[i], Is.EqualTo(expected)); - - Assert.That(merged[i].Array, Is.SameAs(merged[0].Array), "All slices should be stored in the same buffer"); - if (i > 0) Assert.That(merged[i].Offset, Is.EqualTo(merged[i - 1].Offset + merged[i - 1].Count), "All slices should be contiguous"); - } - - // corner cases - Assert.That(() => FdbTuple.EncodePrefixedKeys(Slice.Empty, default(int[])), Throws.InstanceOf().With.Property("ParamName").EqualTo("keys")); - Assert.That(() => FdbTuple.EncodePrefixedKeys(Slice.Empty, default(IEnumerable)), Throws.InstanceOf().With.Property("ParamName").EqualTo("keys")); - - #endregion - } - - [Test] - public void Test_FdbTuple_EncodeKeys_Boxed() - { - Slice[] slices; - var tuple = FdbTuple.Create("hello"); - object[] items = new object[] { "world", 123, false, Guid.NewGuid(), long.MinValue }; - - // array version - slices = FdbTuple.EncodePrefixedKeys(tuple, items); - Assert.That(slices, Is.Not.Null); - Assert.That(slices.Length, Is.EqualTo(items.Length)); - Assert.That(slices, Is.EqualTo(items.Select(x => tuple.Append(x).ToSlice()).ToArray())); - - // IEnumerable version that is passed an array - slices = FdbTuple.EncodePrefixedKeys(tuple, (IEnumerable)items); - Assert.That(slices, Is.Not.Null); - Assert.That(slices.Length, Is.EqualTo(items.Length)); - Assert.That(slices, Is.EqualTo(items.Select(x => tuple.Append(x).ToSlice()).ToArray())); - - // IEnumerable version but with a "real" enumerable - slices = FdbTuple.EncodePrefixedKeys(tuple, items.Select(t => t)); - Assert.That(slices, Is.Not.Null); - Assert.That(slices.Length, Is.EqualTo(items.Length)); - Assert.That(slices, Is.EqualTo(items.Select(x => tuple.Append(x).ToSlice()).ToArray())); - } - - #endregion - - #region FdbTupleParser - - private static string Clean(string value) - { - var sb = new StringBuilder(value.Length + 8); - foreach (var c in value) - { - if (c < ' ') sb.Append("\\x").Append(((int)c).ToString("x2")); else sb.Append(c); - } - return sb.ToString(); - } - - private static void PerformWriterTest(FdbTuplePackers.Encoder action, T value, string expectedResult, string message = null) - { - var writer = new TupleWriter(); - action(ref writer, value); - - Assert.That( - writer.Output.ToSlice().ToHexaString(' '), - Is.EqualTo(expectedResult), - message != null ? "Value {0} ({1}) was not properly packed: {2}" : "Value {0} ({1}) was not properly packed", value == null ? "" : value is string ? Clean(value as string) : value.ToString(), (value == null ? "null" : value.GetType().Name), message); - } - - [Test] - public void Test_FdbTupleParser_WriteInt64() - { - var test = new FdbTuplePackers.Encoder(FdbTupleParser.WriteInt64); - - PerformWriterTest(test, 0L, "14"); - - PerformWriterTest(test, 1L, "15 01"); - PerformWriterTest(test, 2L, "15 02"); - PerformWriterTest(test, 123L, "15 7B"); - PerformWriterTest(test, 255L, "15 FF"); - PerformWriterTest(test, 256L, "16 01 00"); - PerformWriterTest(test, 257L, "16 01 01"); - PerformWriterTest(test, 65535L, "16 FF FF"); - PerformWriterTest(test, 65536L, "17 01 00 00"); - PerformWriterTest(test, 65537L, "17 01 00 01"); - - PerformWriterTest(test, -1L, "13 FE"); - PerformWriterTest(test, -123L, "13 84"); - PerformWriterTest(test, -255L, "13 00"); - PerformWriterTest(test, -256L, "12 FE FF"); - PerformWriterTest(test, -65535L, "12 00 00"); - PerformWriterTest(test, -65536L, "11 FE FF FF"); - - PerformWriterTest(test, (1L << 24) - 1, "17 FF FF FF"); - PerformWriterTest(test, 1L << 24, "18 01 00 00 00"); - - PerformWriterTest(test, (1L << 32) - 1, "18 FF FF FF FF"); - PerformWriterTest(test, (1L << 32), "19 01 00 00 00 00"); - - PerformWriterTest(test, long.MaxValue, "1C 7F FF FF FF FF FF FF FF"); - PerformWriterTest(test, long.MinValue, "0C 7F FF FF FF FF FF FF FF"); - PerformWriterTest(test, long.MaxValue - 1, "1C 7F FF FF FF FF FF FF FE"); - PerformWriterTest(test, long.MinValue + 1, "0C 80 00 00 00 00 00 00 00"); - - } - - [Test] - public void Test_FdbTupleParser_WriteInt64_Respects_Ordering() - { - var list = new List>(); - - Action test = (x) => - { - var writer = new TupleWriter(); - FdbTupleParser.WriteInt64(ref writer, x); - var res = new KeyValuePair(x, writer.Output.ToSlice()); - list.Add(res); - Log("{0,20} : {0:x16} {1}", res.Key, res.Value.ToString()); - }; - - // We can't test 2^64 values, be we are interested at what happens around powers of two (were size can change) - - // negatives - for (int i = 63; i >= 3; i--) - { - long x = -(1L << i); - - if (i < 63) - { - test(x - 2); - test(x - 1); - } - test(x + 0); - test(x + 1); - test(x + 2); - } - - test(-2); - test(0); - test(+1); - test(+2); - - // positives - for (int i = 3; i <= 63; i++) - { - long x = (1L << i); - - test(x - 2); - test(x - 1); - if (i < 63) - { - test(x + 0); - test(x + 1); - test(x + 2); - } - } - - KeyValuePair previous = list[0]; - for (int i = 1; i < list.Count; i++) - { - KeyValuePair current = list[i]; - - Assert.That(current.Key, Is.GreaterThan(previous.Key)); - Assert.That(current.Value, Is.GreaterThan(previous.Value), "Expect {0} > {1}", current.Key, previous.Key); - - previous = current; - } - } - - [Test] - public void Test_FdbTupleParser_WriteUInt64() - { - var test = new FdbTuplePackers.Encoder(FdbTupleParser.WriteUInt64); - - PerformWriterTest(test, 0UL, "14"); - - PerformWriterTest(test, 1UL, "15 01"); - PerformWriterTest(test, 123UL, "15 7B"); - PerformWriterTest(test, 255UL, "15 FF"); - PerformWriterTest(test, 256UL, "16 01 00"); - PerformWriterTest(test, 257UL, "16 01 01"); - PerformWriterTest(test, 65535UL, "16 FF FF"); - PerformWriterTest(test, 65536UL, "17 01 00 00"); - PerformWriterTest(test, 65537UL, "17 01 00 01"); - - PerformWriterTest(test, (1UL << 24) - 1, "17 FF FF FF"); - PerformWriterTest(test, 1UL << 24, "18 01 00 00 00"); - - PerformWriterTest(test, (1UL << 32) - 1, "18 FF FF FF FF"); - PerformWriterTest(test, (1UL << 32), "19 01 00 00 00 00"); - - PerformWriterTest(test, ulong.MaxValue, "1C FF FF FF FF FF FF FF FF"); - PerformWriterTest(test, ulong.MaxValue-1, "1C FF FF FF FF FF FF FF FE"); - - } - - [Test] - public void Test_FdbTupleParser_WriteUInt64_Respects_Ordering() - { - var list = new List>(); - - Action test = (x) => - { - var writer = new TupleWriter(); - FdbTupleParser.WriteUInt64(ref writer, x); - var res = new KeyValuePair(x, writer.Output.ToSlice()); - list.Add(res); -#if DEBUG - Log("{0,20} : {0:x16} {1}", res.Key, res.Value); -#endif - }; - - // We can't test 2^64 values, be we are interested at what happens around powers of two (were size can change) - - test(0); - test(1); - - // positives - for (int i = 3; i <= 63; i++) - { - ulong x = (1UL << i); - - test(x - 2); - test(x - 1); - test(x + 0); - test(x + 1); - test(x + 2); - } - test(ulong.MaxValue - 2); - test(ulong.MaxValue - 1); - test(ulong.MaxValue); - - KeyValuePair previous = list[0]; - for (int i = 1; i < list.Count; i++) - { - KeyValuePair current = list[i]; - - Assert.That(current.Key, Is.GreaterThan(previous.Key)); - Assert.That(current.Value, Is.GreaterThan(previous.Value), "Expect {0} > {1}", current.Key, previous.Key); - - previous = current; - } - } - - [Test] - public void Test_FdbTupleParser_WriteString() - { - string s; - var test = new FdbTuplePackers.Encoder(FdbTupleParser.WriteString); - Func encodeSimple = (value) => "02 " + Slice.Create(Encoding.UTF8.GetBytes(value)).ToHexaString(' ') + " 00"; - Func encodeWithZeroes = (value) => "02 " + Slice.Create(Encoding.UTF8.GetBytes(value)).ToHexaString(' ').Replace("00", "00 FF") + " 00"; - - PerformWriterTest(test, null, "00"); - PerformWriterTest(test, String.Empty, "02 00"); - PerformWriterTest(test, "A", "02 41 00"); - PerformWriterTest(test, "\x80", "02 C2 80 00"); - PerformWriterTest(test, "\xFF", "02 C3 BF 00"); - PerformWriterTest(test, "\xFFFE", "02 EF BF BE 00"); // UTF-8 BOM - - PerformWriterTest(test, "ASCII", "02 41 53 43 49 49 00"); - PerformWriterTest(test, "héllø le 世界", "02 68 C3 A9 6C 6C C3 B8 20 6C 65 20 E4 B8 96 E7 95 8C 00"); - - // Must escape '\0' contained in the string as '\x00\xFF' - PerformWriterTest(test, "\0", "02 00 FF 00"); - PerformWriterTest(test, "A\0", "02 41 00 FF 00"); - PerformWriterTest(test, "\0A", "02 00 FF 41 00"); - PerformWriterTest(test, "A\0\0A", "02 41 00 FF 00 FF 41 00"); - PerformWriterTest(test, "A\0B\0\xFF", "02 41 00 FF 42 00 FF C3 BF 00"); - - // random human text samples - - s = "This is a long string that has more than 1024 chars to force the encoder to use multiple chunks, and with some random UNICODE at the end so that it can not be optimized as ASCII-only." + new string('A', 1024) + "ಠ_ಠ"; - PerformWriterTest(test, s, encodeSimple(s)); - - s = "String of exactly 1024 ASCII chars !"; s += new string('A', 1024 - s.Length); - PerformWriterTest(test, s, encodeSimple(s)); - - s = "Ceci est une chaîne de texte qui contient des caractères UNICODE supérieurs à 0x7F mais inférieurs à 0x800"; // n'est-il pas ? - PerformWriterTest(test, s, encodeSimple(s)); - - s = "色は匂へど 散りぬるを 我が世誰そ 常ならむ 有為の奥山 今日越えて 浅き夢見じ 酔ひもせず"; // iroha! - PerformWriterTest(test, s, encodeSimple(s)); - - s = "String that ends with funny UTF-32 chars like \xDFFF\xDBFF"; // supposed to be 0x10FFFF encoded in UTF-16 - PerformWriterTest(test, s, encodeSimple(s)); - - // strings with random non-zero UNICODE chars - var rnd = new Random(); - for (int k = 0; k < 100; k++) - { - int size = 1 + rnd.Next(10000); - var chars = new char[size]; - for (int i = 0; i < chars.Length; i++) - { - // 1..0xFFFF - switch (rnd.Next(3)) - { - case 0: chars[i] = (char)rnd.Next(1, 0x80); break; - case 1: chars[i] = (char)rnd.Next(0x80, 0x800); break; - case 2: chars[i] = (char)rnd.Next(0x800, 0xFFFF); break; - } - } - s = new string(chars); - PerformWriterTest(test, s, encodeSimple(s), "Random string with non-zero unicode chars (from 1 to 0xFFFF)"); - } - - // random strings with zeroes - for (int k = 0; k < 100; k++) - { - int size = 1 + rnd.Next(10000); - var chars = new char[size]; - for (int i = 0; i < chars.Length; i++) - { - switch(rnd.Next(4)) - { - case 0: chars[i] = '\0'; break; - case 1: chars[i] = (char)rnd.Next(1, 0x80); break; - case 2: chars[i] = (char)rnd.Next(0x80, 0x800); break; - case 3: chars[i] = (char)rnd.Next(0x800, 0xFFFF); break; - } - } - s = new string(chars); - PerformWriterTest(test, s, encodeWithZeroes(s), "Random string with zeros "); - } - - } - - [Test] - public void Test_FdbTupleParser_WriteChar() - { - var test = new FdbTuplePackers.Encoder(FdbTupleParser.WriteChar); - - // 1 bytes - PerformWriterTest(test, 'A', "02 41 00", "Unicode chars in the ASCII table take only one byte in UTF-8"); - PerformWriterTest(test, '\0', "02 00 FF 00", "\\0 must be escaped as 00 FF"); - PerformWriterTest(test, '\x7F', "02 7F 00", "1..127 take ony 1 bytes"); - // 2 bytes - PerformWriterTest(test, '\x80', "02 C2 80 00", "128 needs 2 bytes"); - PerformWriterTest(test, '\xFF', "02 C3 BF 00", "ASCII chars above 128 take at least 2 bytes in UTF-8"); - PerformWriterTest(test, 'é', "02 C3 A9 00", "0x00E9, LATIN SMALL LETTER E WITH ACUTE"); - PerformWriterTest(test, 'ø', "02 C3 B8 00", "0x00F8, LATIN SMALL LETTER O WITH STROKE"); - PerformWriterTest(test, '\x07FF', "02 DF BF 00"); - // 3 bytes - PerformWriterTest(test, '\x0800', "02 E0 A0 80 00", "0x800 takes at least 3 bytes"); - PerformWriterTest(test, 'ಠ', "02 E0 B2 A0 00", "KANNADA LETTER TTHA"); - PerformWriterTest(test, '世', "02 E4 B8 96 00", "0x4E16, CJK Ideograph"); - PerformWriterTest(test, '界', "02 E7 95 8C 00", "0x754C, CJK Ideoghaph"); - PerformWriterTest(test, '\xFFFE', "02 EF BF BE 00", "Unicode BOM becomes EF BF BE in UTF-8"); - PerformWriterTest(test, '\xFFFF', "02 EF BF BF 00", "Maximum UTF-16 character"); - - // check all the unicode chars - for (int i = 1; i <= 65535; i++) - { - char c = (char)i; - var writer = new TupleWriter(); - FdbTupleParser.WriteChar(ref writer, c); - string s = new string(c, 1); - Assert.That(writer.Output.ToSlice().ToString(), Is.EqualTo("<02>" + Slice.Create(Encoding.UTF8.GetBytes(s)).ToString() + "<00>"), "{0} '{1}'", i, c); - } - } - - #endregion - - #region Equality / Comparison - - private static void AssertEquality(IFdbTuple x, IFdbTuple y) - { - Assert.That(x.Equals(y), Is.True, "x.Equals(y)"); - Assert.That(x.Equals((object)y), Is.True, "x.Equals((object)y)"); - Assert.That(y.Equals(x), Is.True, "y.Equals(x)"); - Assert.That(y.Equals((object)x), Is.True, "y.Equals((object)y"); - } - - private static void AssertInequality(IFdbTuple x, IFdbTuple y) - { - Assert.That(x.Equals(y), Is.False, "!x.Equals(y)"); - Assert.That(x.Equals((object)y), Is.False, "!x.Equals((object)y)"); - Assert.That(y.Equals(x), Is.False, "!y.Equals(x)"); - Assert.That(y.Equals((object)x), Is.False, "!y.Equals((object)y"); - } - - [Test] - public void Test_FdbTuple_Equals() - { - var t1 = FdbTuple.Create(1, 2); - // self equality - AssertEquality(t1, t1); - - var t2 = FdbTuple.Create(1, 2); - // same type equality - AssertEquality(t1, t2); - - var t3 = FdbTuple.Create(new object[] { 1, 2 }); - // other tuple type equality - AssertEquality(t1, t3); - - var t4 = FdbTuple.Create(1).Append(2); - // multi step - AssertEquality(t1, t4); - } - - [Test] - public void Test_FdbTuple_Similar() - { - var t1 = FdbTuple.Create(1, 2); - var t2 = FdbTuple.Create((long)1, (short)2); - var t3 = FdbTuple.Create("1", "2"); - var t4 = FdbTuple.Create(new object[] { 1, 2L }); - var t5 = FdbTuple.Unpack(Slice.Unescape("<02>1<00><15><02>")); - - AssertEquality(t1, t1); - AssertEquality(t1, t2); - AssertEquality(t1, t3); - AssertEquality(t1, t4); - AssertEquality(t1, t5); - AssertEquality(t2, t2); - AssertEquality(t2, t3); - AssertEquality(t2, t4); - AssertEquality(t2, t5); - AssertEquality(t3, t3); - AssertEquality(t3, t4); - AssertEquality(t3, t5); - AssertEquality(t4, t4); - AssertEquality(t4, t5); - AssertEquality(t5, t5); - } - - [Test] - public void Test_FdbTuple_Not_Equal() - { - var t1 = FdbTuple.Create(1, 2); - - var x1 = FdbTuple.Create(2, 1); - var x2 = FdbTuple.Create("11", "22"); - var x3 = FdbTuple.Create(1, 2, 3); - var x4 = FdbTuple.Unpack(Slice.Unescape("<15><01>")); - - AssertInequality(t1, x1); - AssertInequality(t1, x2); - AssertInequality(t1, x3); - AssertInequality(t1, x4); - - AssertInequality(x1, x2); - AssertInequality(x1, x3); - AssertInequality(x1, x4); - AssertInequality(x2, x3); - AssertInequality(x2, x4); - AssertInequality(x3, x4); - } - - [Test] - public void Test_FdbTuple_Substring_Equality() - { - var x = FdbTuple.FromArray(new [] { "A", "C" }); - var y = FdbTuple.FromArray(new[] { "A", "B", "C" }); - - Assert.That(x.Substring(0, 1), Is.EqualTo(y.Substring(0, 1))); - Assert.That(x.Substring(1, 1), Is.EqualTo(y.Substring(2, 1))); - - var aa = FdbTuple.Create("A"); - var bb = FdbTuple.Create("A"); - Assert.That(aa == bb, Is.True); - - var a = x.Substring(0, 1); - var b = y.Substring(0, 1); - Assert.That(a.Equals((IFdbTuple)b), Is.True); - Assert.That(a.Equals((object)b), Is.True); - Assert.That(object.Equals(a, b), Is.True); - Assert.That(FdbTuple.Equals(a, b), Is.True); - Assert.That(FdbTuple.Equivalent(a, b), Is.True); - - // this is very unfortunate, but 'a == b' does NOT work because IFdbTuple is an interface, and there is no known way to make it work :( - //Assert.That(a == b, Is.True); - } - - [Test] - public void Test_FdbTuple_String_AutoCast() - { - // 'a' ~= "A" - AssertEquality(FdbTuple.Create("A"), FdbTuple.Create('A')); - AssertInequality(FdbTuple.Create("A"), FdbTuple.Create('B')); - AssertInequality(FdbTuple.Create("A"), FdbTuple.Create('a')); - - // ASCII ~= Unicode - AssertEquality(FdbTuple.Create("ABC"), FdbTuple.Create(Slice.FromAscii("ABC"))); - AssertInequality(FdbTuple.Create("ABC"), FdbTuple.Create(Slice.FromAscii("DEF"))); - AssertInequality(FdbTuple.Create("ABC"), FdbTuple.Create(Slice.FromAscii("abc"))); - - // 'a' ~= ASCII 'a' - AssertEquality(FdbTuple.Create(Slice.FromAscii("A")), FdbTuple.Create('A')); - AssertInequality(FdbTuple.Create(Slice.FromAscii("A")), FdbTuple.Create('B')); - AssertInequality(FdbTuple.Create(Slice.FromAscii("A")), FdbTuple.Create('a')); - } - - #endregion - - #region Formatters - - [Test] - public void Test_Default_FdbTupleFormatter_For_Common_Types() - { - - // common simple types - Assert.That(FdbTupleFormatter.Default, Is.InstanceOf>()); - Assert.That(FdbTupleFormatter.Default, Is.InstanceOf>()); - Assert.That(FdbTupleFormatter.Default, Is.InstanceOf>()); - - // corner cases - Assert.That(FdbTupleFormatter.Default, Is.InstanceOf>()); - Assert.That(FdbTupleFormatter.Default, Is.InstanceOf>()); - - // ITupleFormattable types - Assert.That(FdbTupleFormatter.Default, Is.InstanceOf>()); - } - - [Test] - public void Test_Format_Common_Types() - { - Assert.That(FdbTupleFormatter.Default.ToTuple(123), Is.EqualTo(FdbTuple.Create(123))); - Assert.That(FdbTupleFormatter.Default.FromTuple(FdbTuple.Create(123)), Is.EqualTo(123)); - - Assert.That(FdbTupleFormatter.Default.ToTuple(true), Is.EqualTo(FdbTuple.Create(true))); - Assert.That(FdbTupleFormatter.Default.FromTuple(FdbTuple.Create(true)), Is.True); - - Assert.That(FdbTupleFormatter.Default.ToTuple("hello"), Is.EqualTo(FdbTuple.Create("hello"))); - Assert.That(FdbTupleFormatter.Default.FromTuple(FdbTuple.Create("hello")), Is.EqualTo("hello")); - - var t = FdbTuple.Create(new object[] { "hello", 123, false }); - Assert.That(FdbTupleFormatter.Default.ToTuple(t), Is.SameAs(t)); - Assert.That(FdbTupleFormatter.Default.FromTuple(t), Is.SameAs(t)); - - var thing = new Thing { Foo = 123, Bar = "hello" }; - Assert.That(FdbTupleFormatter.Default.ToTuple(thing), Is.EqualTo(FdbTuple.Create(123, "hello"))); - - var thing2 = FdbTupleFormatter.Default.FromTuple(FdbTuple.Create(456, "world")); - Assert.That(thing2, Is.Not.Null); - Assert.That(thing2.Foo, Is.EqualTo(456)); - Assert.That(thing2.Bar, Is.EqualTo("world")); - - } - - [Test] - public void Test_Create_Appender_Formatter() - { - // create an appender formatter that will always add the values after the same prefix - - var fmtr = FdbTupleFormatter.CreateAppender(FdbTuple.Create("hello", "world")); - Assert.That(fmtr, Is.InstanceOf>()); - - Assert.That(fmtr.ToTuple(123), Is.EqualTo(FdbTuple.Create("hello", "world", 123))); - Assert.That(fmtr.ToTuple(456), Is.EqualTo(FdbTuple.Create("hello", "world", 456))); - Assert.That(fmtr.ToTuple(-1), Is.EqualTo(FdbTuple.Create("hello", "world", -1))); - - Assert.That(fmtr.FromTuple(FdbTuple.Create("hello", "world", 42)), Is.EqualTo(42)); - Assert.That(fmtr.FromTuple(FdbTuple.Create("hello", "world", -1)), Is.EqualTo(-1)); - - Assert.That(() => fmtr.FromTuple(null), Throws.InstanceOf()); - Assert.That(() => fmtr.FromTuple(FdbTuple.Empty), Throws.InstanceOf()); - Assert.That(() => fmtr.FromTuple(FdbTuple.Create("hello", "world", 42, 77)), Throws.InstanceOf(), "Too many values"); - Assert.That(() => fmtr.FromTuple(FdbTuple.Create("hello_world", 42)), Throws.InstanceOf(), "not enough values"); - Assert.That(() => fmtr.FromTuple(FdbTuple.Create("world", "hello", "42")), Throws.InstanceOf(), "incorrect type"); - Assert.That(() => fmtr.FromTuple(FdbTuple.Create(42)), Throws.InstanceOf(), "missing prefix"); - Assert.That(() => fmtr.FromTuple(FdbTuple.Create("extra", "hello", "world", 42)), Throws.InstanceOf(), "prefix must match exactly"); - Assert.That(() => fmtr.FromTuple(FdbTuple.Create("Hello", "World", 42)), Throws.InstanceOf(), "case sensitive"); - } - - #endregion - - #region Bench.... - - [Test] - public void Bench_FdbTuple_Unpack_Random() - { - const int N = 100 * 1000; - - Slice FUNKY_ASCII = Slice.FromAscii("bonjour\x00le\x00\xFFmonde"); - string FUNKY_STRING = "hello\x00world"; - string UNICODE_STRING = "héllø 世界"; - - Console.Write("Creating {0:N0} random tuples", N); - var tuples = new List(N); - var rnd = new Random(777); - var guids = Enumerable.Range(0, 10).Select(_ => Guid.NewGuid()).ToArray(); - var uuid128s = Enumerable.Range(0, 10).Select(_ => Uuid128.NewUuid()).ToArray(); - var uuid64s = Enumerable.Range(0, 10).Select(_ => Uuid64.NewUuid()).ToArray(); - var fuzz = new byte[1024 + 1000]; rnd.NextBytes(fuzz); - var sw = Stopwatch.StartNew(); - for (int i = 0; i < N; i++) - { - IFdbTuple tuple = FdbTuple.Empty; - int s = 1 + (int)Math.Sqrt(rnd.Next(128)); - if (i % (N / 100) == 0) Console.Write("."); - for (int j = 0; j < s; j++) - { - switch (rnd.Next(17)) - { - case 0: tuple = tuple.Append(rnd.Next(255)); break; - case 1: tuple = tuple.Append(-1 - rnd.Next(255)); break; - case 2: tuple = tuple.Append(256 + rnd.Next(65536 - 256)); break; - case 3: tuple = tuple.Append(rnd.Next(int.MaxValue)); break; - case 4: tuple = tuple.Append((rnd.Next(int.MaxValue) << 32) | rnd.Next(int.MaxValue)); break; - case 5: tuple = tuple.Append(new string('A', 1 + rnd.Next(16))); break; - case 6: tuple = tuple.Append(new string('B', 8 + (int)Math.Sqrt(rnd.Next(1024)))); break; - case 7: tuple = tuple.Append(UNICODE_STRING); break; - case 8: tuple = tuple.Append(FUNKY_STRING); break; - case 9: tuple = tuple.Append(FUNKY_ASCII); break; - case 10: tuple = tuple.Append(guids[rnd.Next(10)]); break; - case 11: tuple = tuple.Append(uuid128s[rnd.Next(10)]); break; - case 12: tuple = tuple.Append(uuid64s[rnd.Next(10)]); break; - case 13: tuple = tuple.Append(Slice.Create(fuzz, rnd.Next(1000), 1 + (int)Math.Sqrt(rnd.Next(1024)))); break; - case 14: tuple = tuple.Append(default(string)); break; - case 15: tuple = tuple.Append("hello"); break; - case 16: tuple = tuple.Append(rnd.Next(2) == 0); break; - } - } - tuples.Add(tuple); - } - sw.Stop(); - Log(" done in {0:N3} sec", sw.Elapsed.TotalSeconds); - Log(" > {0:N0} items", tuples.Sum(x => x.Count)); - Log(" > {0}", tuples[42]); - Log(); - - Console.Write("Packing tuples..."); - sw.Restart(); - var slices = FdbTuple.Pack(tuples); - sw.Stop(); - Log(" done in {0:N3} sec", sw.Elapsed.TotalSeconds); - Log(" > {0:N0} tps", N / sw.Elapsed.TotalSeconds); - Log(" > {0:N0} bytes", slices.Sum(x => x.Count)); - Log(" > {0}", slices[42]); - Log(); - - Console.Write("Unpacking tuples..."); - sw.Restart(); - var unpacked = slices.Select(slice => FdbTuple.Unpack(slice)).ToList(); - sw.Stop(); - Log(" done in {0:N3} sec", sw.Elapsed.TotalSeconds); - Log(" > {0:N0} tps", N / sw.Elapsed.TotalSeconds); - Log(" > {0}", unpacked[42]); - Log(); - - Console.Write("Comparing ..."); - sw.Restart(); - tuples.Zip(unpacked, (x, y) => x.Equals(y)).All(b => b); - sw.Stop(); - Log(" done in {0:N3} sec", sw.Elapsed.TotalSeconds); - Log(); - - Console.Write("Tuples.ToString ..."); - sw.Restart(); - var strings = tuples.Select(x => x.ToString()).ToList(); - sw.Stop(); - Log(" done in {0:N3} sec", sw.Elapsed.TotalSeconds); - Log(" > {0:N0} chars", strings.Sum(x => x.Length)); - Log(" > {0}", strings[42]); - Log(); - - Console.Write("Unpacked.ToString ..."); - sw.Restart(); - strings = unpacked.Select(x => x.ToString()).ToList(); - sw.Stop(); - Log(" done in {0:N3} sec", sw.Elapsed.TotalSeconds); - Log(" > {0:N0} chars", strings.Sum(x => x.Length)); - Log(" > {0}", strings[42]); - Log(); - - Console.Write("Memoizing ..."); - sw.Restart(); - var memoized = tuples.Select(x => x.Memoize()).ToList(); - sw.Stop(); - Log(" done in {0:N3} sec", sw.Elapsed.TotalSeconds); - } - - #endregion - - private class Thing : ITupleFormattable - { - public Thing() - { } - - public int Foo { get; set; } - public string Bar { get; set; } - - IFdbTuple ITupleFormattable.ToTuple() - { - return FdbTuple.Create(this.Foo, this.Bar); - } - - void ITupleFormattable.FromTuple(IFdbTuple tuple) - { - this.Foo = tuple.Get(0); - this.Bar = tuple.Get(1); - } - } - - } - - -} diff --git a/FoundationDB.Tests/Layers/VectorFacts.cs b/FoundationDB.Tests/Layers/VectorFacts.cs index 6a9e8a6a7..87f288f24 100644 --- a/FoundationDB.Tests/Layers/VectorFacts.cs +++ b/FoundationDB.Tests/Layers/VectorFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,17 +28,13 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Collections.Tests { - using FoundationDB.Client; - using FoundationDB.Client.Tests; - using FoundationDB.Layers.Tuples; - using NUnit.Framework; using System; - using System.Collections.Generic; - using System.Diagnostics; - using System.Linq; using System.Text; - using System.Threading; using System.Threading.Tasks; + using Doxense.Serialization.Encoders; + using FoundationDB.Client; + using FoundationDB.Client.Tests; + using NUnit.Framework; [TestFixture] [Obsolete] @@ -56,11 +52,11 @@ public async Task Test_Vector_Fast() using (var tr = db.BeginTransaction(this.Cancellation)) { - Console.WriteLine("Clearing any previous values in the vector"); + Log("Clearing any previous values in the vector"); vector.Clear(tr); - Console.WriteLine(); - Console.WriteLine("MODIFIERS"); + Log(); + Log("MODIFIERS"); // Set + Push vector.Set(tr, 0, Slice.FromInt32(1)); @@ -73,96 +69,96 @@ public async Task Test_Vector_Fast() await PrintVector(vector, tr); // Pop - Console.WriteLine("> Popped: " + await vector.PopAsync(tr)); + Log("> Popped: " + await vector.PopAsync(tr)); await PrintVector(vector, tr); // Clear vector.Clear(tr); - Console.WriteLine("> Pop empty: " + await vector.PopAsync(tr)); + Log("> Pop empty: " + await vector.PopAsync(tr)); await PrintVector(vector, tr); - await vector.PushAsync(tr, Slice.FromAscii("foo")); - Console.WriteLine("> Pop size 1: " + await vector.PopAsync(tr)); + await vector.PushAsync(tr, Slice.FromString("foo")); + Log("> Pop size 1: " + await vector.PopAsync(tr)); await PrintVector(vector, tr); - Console.WriteLine(); - Console.WriteLine("CAPACITY OPERATIONS"); + Log(); + Log("CAPACITY OPERATIONS"); - Console.WriteLine("> Size: " + await vector.SizeAsync(tr)); - Console.WriteLine("> Empty: " + await vector.EmptyAsync(tr)); + Log("> Size: " + await vector.SizeAsync(tr)); + Log("> Empty: " + await vector.EmptyAsync(tr)); - Console.WriteLine("> Resizing to length 5"); + Log("> Resizing to length 5"); await vector.ResizeAsync(tr, 5); await PrintVector(vector, tr); - Console.WriteLine("> Size: " + await vector.SizeAsync(tr)); - - Console.WriteLine("Settings values"); - vector.Set(tr, 0, Slice.FromAscii("Portez")); - vector.Set(tr, 1, Slice.FromAscii("ce vieux")); - vector.Set(tr, 2, Slice.FromAscii("whisky")); - vector.Set(tr, 3, Slice.FromAscii("au juge")); - vector.Set(tr, 4, Slice.FromAscii("blond qui")); - vector.Set(tr, 5, Slice.FromAscii("fume")); + Log("> Size: " + await vector.SizeAsync(tr)); + + Log("Settings values"); + vector.Set(tr, 0, Slice.FromString("Portez")); + vector.Set(tr, 1, Slice.FromString("ce vieux")); + vector.Set(tr, 2, Slice.FromString("whisky")); + vector.Set(tr, 3, Slice.FromString("au juge")); + vector.Set(tr, 4, Slice.FromString("blond qui")); + vector.Set(tr, 5, Slice.FromString("fume")); await PrintVector(vector, tr); - Console.WriteLine("FRONT"); - Console.WriteLine("> " + await vector.FrontAsync(tr)); + Log("FRONT"); + Log("> " + await vector.FrontAsync(tr)); - Console.WriteLine("BACK"); - Console.WriteLine("> " + await vector.BackAsync(tr)); + Log("BACK"); + Log("> " + await vector.BackAsync(tr)); - Console.WriteLine(); - Console.WriteLine("ELEMENT ACCESS"); - Console.WriteLine("> Index 0: " + await vector.GetAsync(tr, 0)); - Console.WriteLine("> Index 5: " + await vector.GetAsync(tr, 5)); - Console.WriteLine("> Size: " + await vector.SizeAsync(tr)); + Log(); + Log("ELEMENT ACCESS"); + Log("> Index 0: " + await vector.GetAsync(tr, 0)); + Log("> Index 5: " + await vector.GetAsync(tr, 5)); + Log("> Size: " + await vector.SizeAsync(tr)); - Console.WriteLine(); - Console.WriteLine("RESIZING"); - Console.WriteLine("> Resizing to 3"); + Log(); + Log("RESIZING"); + Log("> Resizing to 3"); await vector.ResizeAsync(tr, 3); await PrintVector(vector, tr); - Console.WriteLine("> Size: " + await vector.SizeAsync(tr)); + Log("> Size: " + await vector.SizeAsync(tr)); - Console.WriteLine("> Resizing to 3 again"); + Log("> Resizing to 3 again"); await vector.ResizeAsync(tr, 3); await PrintVector(vector, tr); - Console.WriteLine("> Size: " + await vector.SizeAsync(tr)); + Log("> Size: " + await vector.SizeAsync(tr)); - Console.WriteLine("> Resizing to 6"); + Log("> Resizing to 6"); await vector.ResizeAsync(tr, 6); await PrintVector(vector, tr); - Console.WriteLine("> Size: " + await vector.SizeAsync(tr)); + Log("> Size: " + await vector.SizeAsync(tr)); - Console.WriteLine(); - Console.WriteLine("SPARSE TEST"); + Log(); + Log("SPARSE TEST"); - Console.WriteLine("> Popping sparse vector"); + Log("> Popping sparse vector"); await vector.PopAsync(tr); await PrintVector(vector, tr); - Console.WriteLine("> Size: " + await vector.SizeAsync(tr)); + Log("> Size: " + await vector.SizeAsync(tr)); - Console.WriteLine("> Resizing to 4"); + Log("> Resizing to 4"); await vector.ResizeAsync(tr, 4); await PrintVector(vector, tr); - Console.WriteLine("> Size: " + await vector.SizeAsync(tr)); + Log("> Size: " + await vector.SizeAsync(tr)); - Console.WriteLine("> Adding 'word' to index 10, resize to 25"); - vector.Set(tr, 10, Slice.FromAscii("word")); + Log("> Adding 'word' to index 10, resize to 25"); + vector.Set(tr, 10, Slice.FromString("word")); await vector.ResizeAsync(tr, 25); await PrintVector(vector, tr); - Console.WriteLine("> Size: " + await vector.SizeAsync(tr)); + Log("> Size: " + await vector.SizeAsync(tr)); - Console.WriteLine("> Swapping with sparse element"); + Log("> Swapping with sparse element"); await vector.SwapAsync(tr, 10, 15); await PrintVector(vector, tr); - Console.WriteLine("> Size: " + await vector.SizeAsync(tr)); + Log("> Size: " + await vector.SizeAsync(tr)); - Console.WriteLine("> Swapping sparse elements"); + Log("> Swapping sparse elements"); await vector.SwapAsync(tr, 12, 13); await PrintVector(vector, tr); - Console.WriteLine("> Size: " + await vector.SizeAsync(tr)); + Log("> Size: " + await vector.SizeAsync(tr)); } } } @@ -175,10 +171,10 @@ private static async Task PrintVector(FdbVector vector, IFdbReadOnlyTransa await tr.GetRange(vector.Subspace.Keys.ToRange()).ForEachAsync((kvp) => { if (!first) sb.Append(", "); else first = false; - sb.Append(vector.Subspace.Keys.DecodeLast(kvp.Key) + ":" + kvp.Value.ToAsciiOrHexaString()); + sb.Append($"{vector.Subspace.Keys.DecodeLast(kvp.Key)}:{kvp.Value:P}"); }); - Console.WriteLine("> Vector: (" + sb.ToString() + ")"); + Log("> Vector: (" + sb.ToString() + ")"); } } diff --git a/FoundationDB.Tests/Linq/FdbAsyncEnumerableFacts.cs b/FoundationDB.Tests/Linq/AsyncEnumerableFacts.cs similarity index 85% rename from FoundationDB.Tests/Linq/FdbAsyncEnumerableFacts.cs rename to FoundationDB.Tests/Linq/AsyncEnumerableFacts.cs index 825a9e9e0..0a54272f2 100644 --- a/FoundationDB.Tests/Linq/FdbAsyncEnumerableFacts.cs +++ b/FoundationDB.Tests/Linq/AsyncEnumerableFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,12 +26,9 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion +// ReSharper disable AccessToDisposedClosure namespace FoundationDB.Linq.Tests { - using FoundationDB.Async; - using FoundationDB.Client.Tests; - using FoundationDB.Layers.Tuples; - using NUnit.Framework; using System; using System.Collections.Generic; using System.Diagnostics; @@ -39,9 +36,16 @@ namespace FoundationDB.Linq.Tests using System.Runtime.ExceptionServices; using System.Threading; using System.Threading.Tasks; + using Doxense; + using Doxense.Async; + using Doxense.Collections.Tuples; + using Doxense.Linq; + using Doxense.Linq.Async.Iterators; + using FoundationDB.Client.Tests; + using NUnit.Framework; [TestFixture] - public class FdbAsyncEnumerableFacts : FdbTest + public class AsyncEnumerableFacts : FdbTest { [Test] @@ -53,9 +57,9 @@ public async Task Test_Can_Convert_Enumerable_To_AsyncEnumerable() Assert.That(source, Is.Not.Null); var results = new List(); - using (var iterator = source.GetEnumerator()) + using (var iterator = source.GetEnumerator(this.Cancellation, AsyncIterationHint.Default)) { - while (await iterator.MoveNext(CancellationToken.None)) + while (await iterator.MoveNextAsync()) { Assert.That(results.Count, Is.LessThan(10)); results.Add(iterator.Current); @@ -78,9 +82,9 @@ public async Task Test_Can_Convert_Enumerable_To_AsyncEnumerable_With_Async_Tran Assert.That(source, Is.Not.Null); var results = new List(); - using (var iterator = source.GetEnumerator()) + using (var iterator = source.GetEnumerator(this.Cancellation, AsyncIterationHint.Default)) { - while (await iterator.MoveNext(CancellationToken.None)) + while (await iterator.MoveNextAsync()) { Assert.That(results.Count, Is.LessThan(10)); results.Add(iterator.Current); @@ -130,7 +134,7 @@ public async Task Test_Can_ToArrayAsync_Big() [Test] public async Task Test_Empty() { - var empty = FdbAsyncEnumerable.Empty(); + var empty = AsyncEnumerable.Empty(); Assert.That(empty, Is.Not.Null); var results = await empty.ToListAsync(); @@ -143,13 +147,13 @@ public async Task Test_Empty() Assert.That(none, Is.True); int count = await empty.CountAsync(); - Assert.That(count, Is.EqualTo(0)); + Assert.That(count, Is.Zero); } [Test] public async Task Test_Singleton() { - var singleton = FdbAsyncEnumerable.Singleton(42); + var singleton = AsyncEnumerable.Singleton(42); Assert.That(singleton, Is.Not.Null); var results = await singleton.ToListAsync(); @@ -170,15 +174,15 @@ public async Task Test_Producer_Single() { // Func - var singleton = FdbAsyncEnumerable.Single(() => 42); + var singleton = AsyncEnumerable.Single(() => 42); Assert.That(singleton, Is.Not.Null); - using(var iterator = singleton.GetEnumerator()) + using(var iterator = singleton.GetEnumerator(this.Cancellation, AsyncIterationHint.Default)) { - var res = await iterator.MoveNext(this.Cancellation); + var res = await iterator.MoveNextAsync(); Assert.That(res, Is.True); Assert.That(iterator.Current, Is.EqualTo(42)); - res = await iterator.MoveNext(this.Cancellation); + res = await iterator.MoveNextAsync(); Assert.That(res, Is.False); } @@ -196,15 +200,15 @@ public async Task Test_Producer_Single() // Func> - singleton = FdbAsyncEnumerable.Single(() => Task.Delay(50).ContinueWith(_ => 42)); + singleton = AsyncEnumerable.Single(() => Task.Delay(50).ContinueWith(_ => 42)); Assert.That(singleton, Is.Not.Null); - using (var iterator = singleton.GetEnumerator()) + using (var iterator = singleton.GetEnumerator(this.Cancellation, AsyncIterationHint.Default)) { - var res = await iterator.MoveNext(this.Cancellation); + var res = await iterator.MoveNextAsync(); Assert.That(res, Is.True); Assert.That(iterator.Current, Is.EqualTo(42)); - res = await iterator.MoveNext(this.Cancellation); + res = await iterator.MoveNextAsync(); Assert.That(res, Is.False); } @@ -222,15 +226,15 @@ public async Task Test_Producer_Single() // Func> - singleton = FdbAsyncEnumerable.Single((ct) => Task.Delay(50, ct).ContinueWith(_ => 42)); + singleton = AsyncEnumerable.Single((ct) => Task.Delay(50, ct).ContinueWith(_ => 42)); Assert.That(singleton, Is.Not.Null); - using (var iterator = singleton.GetEnumerator()) + using (var iterator = singleton.GetEnumerator(this.Cancellation, AsyncIterationHint.Default)) { - var res = await iterator.MoveNext(this.Cancellation); + var res = await iterator.MoveNextAsync(); Assert.That(res, Is.True); Assert.That(iterator.Current, Is.EqualTo(42)); - res = await iterator.MoveNext(this.Cancellation); + res = await iterator.MoveNextAsync(); Assert.That(res, Is.False); } @@ -254,7 +258,7 @@ public async Task Test_Can_Select_Sync() var selected = source.Select(x => x + 1); Assert.That(selected, Is.Not.Null); - Assert.That(selected, Is.InstanceOf>()); + Assert.That(selected, Is.InstanceOf>()); var results = await selected.ToListAsync(); Assert.That(results, Is.EqualTo(new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 })); @@ -271,7 +275,7 @@ public async Task Test_Can_Select_Async() return x + 1; }); Assert.That(selected, Is.Not.Null); - Assert.That(selected, Is.InstanceOf>()); + Assert.That(selected, Is.InstanceOf>()); var results = await selected.ToListAsync(); Assert.That(results, Is.EqualTo(new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 })); @@ -284,11 +288,11 @@ public async Task Test_Can_Select_Multiple_Times() var squares = source.Select(x => (long)x * x); Assert.That(squares, Is.Not.Null); - Assert.That(squares, Is.InstanceOf>()); + Assert.That(squares, Is.InstanceOf>()); var roots = squares.Select(x => Math.Sqrt(x)); Assert.That(roots, Is.Not.Null); - Assert.That(roots, Is.InstanceOf>()); + Assert.That(roots, Is.InstanceOf>()); var results = await roots.ToListAsync(); Assert.That(results, Is.EqualTo(new double[] { 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0 })); @@ -301,11 +305,11 @@ public async Task Test_Can_Select_Async_Multiple_Times() var squares = source.Select(x => Task.FromResult((long)x * x)); Assert.That(squares, Is.Not.Null); - Assert.That(squares, Is.InstanceOf>()); + Assert.That(squares, Is.InstanceOf>()); var roots = squares.Select(x => Task.FromResult(Math.Sqrt(x))); Assert.That(roots, Is.Not.Null); - Assert.That(roots, Is.InstanceOf>()); + Assert.That(roots, Is.InstanceOf>()); var results = await roots.ToListAsync(); Assert.That(results, Is.EqualTo(new double[] { 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0 })); @@ -330,7 +334,7 @@ public async Task Test_Can_Take() var query = source.Take(10); Assert.That(query, Is.Not.Null); - Assert.That(query, Is.InstanceOf>()); + Assert.That(query, Is.InstanceOf>()); var results = await query.ToListAsync(); Assert.That(results, Is.EqualTo(new int[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 })); @@ -346,7 +350,7 @@ public async Task Test_Can_Where_And_Take() .Where(x => x % 2 == 1) .Take(10); Assert.That(query, Is.Not.Null); - Assert.That(query, Is.InstanceOf>()); + Assert.That(query, Is.InstanceOf>()); var results = await query.ToListAsync(); Assert.That(results, Is.EqualTo(new int[] { 1, 3, 5, 7, 9, 11, 13, 15, 17, 19 })); @@ -361,7 +365,7 @@ public async Task Test_Can_Take_And_Where() .Take(10) .Where(x => x % 2 == 1); Assert.That(query, Is.Not.Null); - Assert.That(query, Is.InstanceOf>()); + Assert.That(query, Is.InstanceOf>()); var results = await query.ToListAsync(); Assert.That(results, Is.EqualTo(new int[] { 1, 3, 5, 7, 9 })); @@ -376,7 +380,7 @@ public async Task Test_Can_Combine_Where_Clauses() .Where(x => x % 2 == 1) .Where(x => x % 3 == 0); Assert.That(query, Is.Not.Null); - Assert.That(query, Is.InstanceOf>()); // should have been optimized + Assert.That(query, Is.InstanceOf>()); // should have been optimized var results = await query.ToListAsync(); Assert.That(results, Is.EqualTo(new int[] { 3, 9, 15, 21, 27, 33, 39 })); @@ -391,7 +395,7 @@ public async Task Test_Can_Skip_And_Where() .Skip(21) .Where(x => x % 2 == 1); Assert.That(query, Is.Not.Null); - Assert.That(query, Is.InstanceOf>()); + Assert.That(query, Is.InstanceOf>()); var results = await query.ToListAsync(); Assert.That(results, Is.EqualTo(new int[] { 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 41 })); @@ -406,7 +410,7 @@ public async Task Test_Can_Where_And_Skip() .Where(x => x % 2 == 1) .Skip(15); Assert.That(query, Is.Not.Null); - Assert.That(query, Is.InstanceOf>()); // should be optimized + Assert.That(query, Is.InstanceOf>()); // should be optimized var results = await query.ToListAsync(); Assert.That(results, Is.EqualTo(new int[] { 31, 33, 35, 37, 39, 41 })); @@ -431,7 +435,7 @@ public async Task Test_Can_Get_First() int first = await source.FirstAsync(); Assert.That(first, Is.EqualTo(42)); - source = FdbAsyncEnumerable.Empty(); + source = AsyncEnumerable.Empty(); Assert.That(() => source.FirstAsync().GetAwaiter().GetResult(), Throws.InstanceOf()); } @@ -443,7 +447,7 @@ public async Task Test_Can_Get_FirstOrDefault() int first = await source.FirstOrDefaultAsync(); Assert.That(first, Is.EqualTo(42)); - source = FdbAsyncEnumerable.Empty(); + source = AsyncEnumerable.Empty(); first = await source.FirstOrDefaultAsync(); Assert.That(first, Is.EqualTo(0)); @@ -456,7 +460,7 @@ public async Task Test_Can_Get_Single() int first = await source.SingleAsync(); Assert.That(first, Is.EqualTo(42)); - source = FdbAsyncEnumerable.Empty(); + source = AsyncEnumerable.Empty(); Assert.That(() => source.SingleAsync().GetAwaiter().GetResult(), Throws.InstanceOf()); source = Enumerable.Range(42, 3).ToAsyncEnumerable(); @@ -471,7 +475,7 @@ public async Task Test_Can_Get_SingleOrDefault() int first = await source.SingleOrDefaultAsync(); Assert.That(first, Is.EqualTo(42)); - source = FdbAsyncEnumerable.Empty(); + source = AsyncEnumerable.Empty(); first = await source.SingleOrDefaultAsync(); Assert.That(first, Is.EqualTo(0)); @@ -486,7 +490,7 @@ public async Task Test_Can_Get_Last() int first = await source.LastAsync(); Assert.That(first, Is.EqualTo(44)); - source = FdbAsyncEnumerable.Empty(); + source = AsyncEnumerable.Empty(); Assert.That(() => source.LastAsync().GetAwaiter().GetResult(), Throws.InstanceOf()); } @@ -498,7 +502,7 @@ public async Task Test_Can_Get_LastOrDefault() int first = await source.LastOrDefaultAsync(); Assert.That(first, Is.EqualTo(44)); - source = FdbAsyncEnumerable.Empty(); + source = AsyncEnumerable.Empty(); first = await source.LastOrDefaultAsync(); Assert.That(first, Is.EqualTo(0)); @@ -522,7 +526,7 @@ public async Task Test_Can_Get_ElementAt() Assert.That(() => source.ElementAtAsync(10).GetAwaiter().GetResult(), Throws.InstanceOf()); - source = FdbAsyncEnumerable.Empty(); + source = AsyncEnumerable.Empty(); Assert.That(() => source.ElementAtAsync(0).GetAwaiter().GetResult(), Throws.InstanceOf()); } @@ -545,7 +549,7 @@ public async Task Test_Can_Get_ElementAtOrDefault() item = await source.ElementAtOrDefaultAsync(10); Assert.That(item, Is.EqualTo(0)); - source = FdbAsyncEnumerable.Empty(); + source = AsyncEnumerable.Empty(); item = await source.ElementAtOrDefaultAsync(0); Assert.That(item, Is.EqualTo(0)); item = await source.ElementAtOrDefaultAsync(42); @@ -627,7 +631,7 @@ public async Task Test_Can_Any() any = await source.AnyAsync(); Assert.That(any, Is.True); - any = await FdbAsyncEnumerable.Empty().AnyAsync(); + any = await AsyncEnumerable.Empty().AnyAsync(); Assert.That(any, Is.False); } @@ -642,7 +646,7 @@ public async Task Test_Can_Any_With_Predicate() any = await source.AnyAsync(x => x < 0); Assert.That(any, Is.False); - any = await FdbAsyncEnumerable.Empty().AnyAsync(x => x == 42); + any = await AsyncEnumerable.Empty().AnyAsync(x => x == 42); Assert.That(any, Is.False); } @@ -657,7 +661,7 @@ public async Task Test_Can_None() none = await source.NoneAsync(); Assert.That(none, Is.False); - none = await FdbAsyncEnumerable.Empty().NoneAsync(); + none = await AsyncEnumerable.Empty().NoneAsync(); Assert.That(none, Is.True); } @@ -672,7 +676,7 @@ public async Task Test_Can_None_With_Predicate() any = await source.NoneAsync(x => x < 0); Assert.That(any, Is.True); - any = await FdbAsyncEnumerable.Empty().NoneAsync(x => x == 42); + any = await AsyncEnumerable.Empty().NoneAsync(x => x == 42); Assert.That(any, Is.True); } @@ -719,7 +723,7 @@ public async Task Test_Can_Min() Assert.That(min, Is.EqualTo(items.Min())); // empty should fail - source = FdbAsyncEnumerable.Empty(); + source = AsyncEnumerable.Empty(); Assert.That(() => source.MinAsync().GetAwaiter().GetResult(), Throws.InstanceOf()); } @@ -746,7 +750,7 @@ public async Task Test_Can_Max() Assert.That(max, Is.EqualTo(items.Max())); // empty should fail - source = FdbAsyncEnumerable.Empty(); + source = AsyncEnumerable.Empty(); Assert.That(() => source.MaxAsync().GetAwaiter().GetResult(), Throws.InstanceOf()); } @@ -763,7 +767,7 @@ public async Task Test_Can_Sum_Signed() Assert.That(sum, Is.EqualTo(expected)); // empty should return 0 - source = FdbAsyncEnumerable.Empty(); + source = AsyncEnumerable.Empty(); sum = await source.SumAsync(); Assert.That(sum, Is.EqualTo(0)); } @@ -781,7 +785,7 @@ public async Task Test_Can_Sum_Unsigned() Assert.That(sum, Is.EqualTo(expected)); // empty should return 0 - source = FdbAsyncEnumerable.Empty(); + source = AsyncEnumerable.Empty(); sum = await source.SumAsync(); Assert.That(sum, Is.EqualTo(0)); } @@ -894,7 +898,7 @@ public async Task Test_Can_Batch() // empty - query = FdbAsyncEnumerable.Empty().Batch(20); + query = AsyncEnumerable.Empty().Batch(20); Assert.That(query, Is.Not.Null); results = await query.ToListAsync(); @@ -907,7 +911,7 @@ public async Task Test_Can_Window() // generate a source that stalls every 13 items, from 0 to 49 - var source = new FdbAnonymousAsyncGenerator((index, ct) => + var source = new AnonymousAsyncGenerator((index, ct) => { if (index >= 50) return Task.FromResult(Maybe.Nothing()); if (index % 13 == 0) return Task.Delay(100).ContinueWith((_) => Maybe.Return((int)index)); @@ -951,14 +955,14 @@ public async Task Test_Can_Prefetch_On_Constant_Latency_Source() int called = 0; var sw = new Stopwatch(); - Console.WriteLine("CONSTANT LATENCY GENERATOR:"); + Log("CONSTANT LATENCY GENERATOR:"); // this iterator waits on each item produced - var source = new FdbAnonymousAsyncGenerator((index, ct) => + var source = new AnonymousAsyncGenerator((index, ct) => { Interlocked.Increment(ref called); if (index >= 10) return Task.FromResult(Maybe.Nothing()); - return Task.Delay(15).ContinueWith((_) => Maybe.Return((int)index)); + return Task.Delay(15, ct).ContinueWith((_) => Maybe.Return((int)index), ct); }); var results = await source.ToListAsync(); @@ -967,32 +971,32 @@ public async Task Test_Can_Prefetch_On_Constant_Latency_Source() // record the timing and call history to ensure that inner is called at least twice before the first item gets out - Func> record = (x) => FdbTuple.Create(x, Volatile.Read(ref called)); + Func record = (x) => STuple.Create(x, Volatile.Read(ref called)); // without prefetching, the number of calls should match for the producer and the consumer called = 0; sw.Restart(); var withoutPrefetching = await source.Select(record).ToListAsync(this.Cancellation); - Console.WriteLine("P0: {0}", String.Join(", ", withoutPrefetching)); - Assert.That(withoutPrefetching.Select(x => x.Item1), Is.EqualTo(Enumerable.Range(0, 10))); - Assert.That(withoutPrefetching.Select(x => x.Item2), Is.EqualTo(Enumerable.Range(1, 10))); + Log("P0: {0}", String.Join(", ", withoutPrefetching)); + Assert.That(withoutPrefetching.Select(x => x.Value), Is.EqualTo(Enumerable.Range(0, 10))); + Assert.That(withoutPrefetching.Select(x => x.Called), Is.EqualTo(Enumerable.Range(1, 10))); // with prefetching, the consumer should always have one item in advance called = 0; sw.Restart(); var withPrefetching1 = await source.Prefetch().Select(record).ToListAsync(this.Cancellation); - Console.WriteLine("P1: {0}", String.Join(", ", withPrefetching1)); - Assert.That(withPrefetching1.Select(x => x.Item1), Is.EqualTo(Enumerable.Range(0, 10))); - Assert.That(withPrefetching1.Select(x => x.Item2), Is.EqualTo(Enumerable.Range(2, 10))); + Log("P1: {0}", String.Join(", ", withPrefetching1)); + Assert.That(withPrefetching1.Select(x => x.Value), Is.EqualTo(Enumerable.Range(0, 10))); + Assert.That(withPrefetching1.Select(x => x.Called), Is.EqualTo(Enumerable.Range(2, 10))); // prefetching more than 1 item on a consumer that is not buffered should not change the picture (since we can only read one ahead anyway) //REVIEW: maybe we should change the implementation of the operator so that it still prefetch items in the background if the rest of the query is lagging a bit? called = 0; sw.Restart(); var withPrefetching2 = await source.Prefetch(2).Select(record).ToListAsync(this.Cancellation); - Console.WriteLine("P2: {0}", String.Join(", ", withPrefetching2)); - Assert.That(withPrefetching2.Select(x => x.Item1), Is.EqualTo(Enumerable.Range(0, 10))); - Assert.That(withPrefetching2.Select(x => x.Item2), Is.EqualTo(Enumerable.Range(2, 10))); + Log("P2: {0}", String.Join(", ", withPrefetching2)); + Assert.That(withPrefetching2.Select(x => x.Value), Is.EqualTo(Enumerable.Range(0, 10))); + Assert.That(withPrefetching2.Select(x => x.Called), Is.EqualTo(Enumerable.Range(2, 10))); } [Test] @@ -1001,10 +1005,10 @@ public async Task Test_Can_Prefetch_On_Bursty_Source() int called = 0; var sw = new Stopwatch(); - Console.WriteLine("BURSTY GENERATOR:"); + Log("BURSTY GENERATOR:"); // this iterator produce burst of items - var source = new FdbAnonymousAsyncGenerator((index, ct) => + var source = new AnonymousAsyncGenerator((index, ct) => { Interlocked.Increment(ref called); if (index >= 10) return Task.FromResult(Maybe.Nothing()); @@ -1012,9 +1016,9 @@ public async Task Test_Can_Prefetch_On_Bursty_Source() return Task.FromResult(Maybe.Return((int)index)); }); - Func> record = (x) => + Func> record = (x) => { - var res = FdbTuple.Create(x, Volatile.Read(ref called), sw.Elapsed); + var res = STuple.Create(x, Volatile.Read(ref called), sw.Elapsed); sw.Restart(); return res; }; @@ -1023,7 +1027,7 @@ public async Task Test_Can_Prefetch_On_Bursty_Source() called = 0; sw.Restart(); var withoutPrefetching = await source.Select(record).ToListAsync(this.Cancellation); - Console.WriteLine("P0: {0}", String.Join(", ", withoutPrefetching)); + Log("P0: {0}", String.Join(", ", withoutPrefetching)); Assert.That(withoutPrefetching.Select(x => x.Item1), Is.EqualTo(Enumerable.Range(0, 10))); // with prefetching K, the consumer should always have K items in advance @@ -1033,7 +1037,7 @@ public async Task Test_Can_Prefetch_On_Bursty_Source() called = 0; sw.Restart(); var withPrefetchingK = await source.Prefetch(K).Select(record).ToListAsync(this.Cancellation); - Console.WriteLine("P{0}: {1}", K, String.Join(", ", withPrefetchingK)); + Log("P{0}: {1}", K, String.Join(", ", withPrefetchingK)); Assert.That(withPrefetchingK.Select(x => x.Item1), Is.EqualTo(Enumerable.Range(0, 10))); Assert.That(withPrefetchingK[0].Item2, Is.EqualTo(K + 1), "Generator must have {0} call(s) in advance!", K); Assert.That(withPrefetchingK.Select(x => x.Item2), Is.All.LessThanOrEqualTo(11)); @@ -1043,7 +1047,7 @@ public async Task Test_Can_Prefetch_On_Bursty_Source() called = 0; sw.Restart(); var withPrefetching5 = await source.Prefetch(5).Select(record).ToListAsync(this.Cancellation); - Console.WriteLine("P5: {0}", String.Join(", ", withPrefetching5)); + Log("P5: {0}", String.Join(", ", withPrefetching5)); Assert.That(withPrefetching5.Select(x => x.Item1), Is.EqualTo(Enumerable.Range(0, 10))); Assert.That(withPrefetching5[0].Item2, Is.EqualTo(5), "Generator must have only 4 calls in advance because it only produces 4 items at a time!"); Assert.That(withPrefetching5.Select(x => x.Item2), Is.All.LessThanOrEqualTo(11)); @@ -1116,21 +1120,20 @@ public async Task Test_Exceptions_Are_Propagated_To_Caller() return x; }); - using (var iterator = query.GetEnumerator()) + using (var iterator = query.GetEnumerator(this.Cancellation, AsyncIterationHint.Default)) { // first move next should succeed - bool res = await iterator.MoveNext(CancellationToken.None); + bool res = await iterator.MoveNextAsync(); Assert.That(res, Is.True); // second move next should fail - var x = Assert.Throws(async () => await iterator.MoveNext(CancellationToken.None), "Should have failed"); - Assert.That(x.Message, Is.EqualTo("KABOOM")); + Assert.That(async () => await iterator.MoveNextAsync(), Throws.InstanceOf().With.Message.EqualTo("KABOOM"), "Should have failed"); // accessing current should rethrow the exception Assert.That(() => iterator.Current, Throws.InstanceOf()); // another attempt at MoveNext should fail immediately but with a different error - Assert.Throws(async () => await iterator.MoveNext(CancellationToken.None)); + Assert.That(async () => await iterator.MoveNextAsync(), Throws.InstanceOf()); } } @@ -1167,14 +1170,14 @@ public async Task Test_Parallel_Select_Async() try { Assert.That(n, Is.LessThanOrEqualTo(MAX_CONCURRENCY)); - Console.WriteLine("** " + sw.Elapsed + " start " + x + " (" + n + ")"); + Log("** " + sw.Elapsed + " start " + x + " (" + n + ")"); #if DEBUG_STACK_TRACES - Console.WriteLine("> " + new StackTrace().ToString().Replace("\r\n", "\r\n> ")); + Log("> " + new StackTrace().ToString().Replace("\r\n", "\r\n> ")); #endif int ms; lock (rnd) { ms = rnd.Next(25) + 50; } await Task.Delay(ms); - Console.WriteLine("** " + sw.Elapsed + " stop " + x + " (" + Volatile.Read(ref concurrent) + ")"); + Log("** " + sw.Elapsed + " stop " + x + " (" + Volatile.Read(ref concurrent) + ")"); return x * x; } @@ -1190,13 +1193,13 @@ public async Task Test_Parallel_Select_Async() throw; } }, - new FdbParallelQueryOptions { MaxConcurrency = MAX_CONCURRENCY } + new ParallelAsyncQueryOptions { MaxConcurrency = MAX_CONCURRENCY } ); var results = await query.ToListAsync(token); Assert.That(Volatile.Read(ref concurrent), Is.EqualTo(0)); - Console.WriteLine("Results: " + string.Join(", ", results)); + Log("Results: " + string.Join(", ", results)); Assert.That(results, Is.EqualTo(Enumerable.Range(1, N).Select(x => x * x).ToArray())); } @@ -1221,24 +1224,24 @@ public async Task Test_FdbAsyncBuffer() { while (!token.IsCancellationRequested) { - Console.WriteLine("[consumer] start receiving next..."); + Log("[consumer] start receiving next..."); var msg = await buffer.ReceiveAsync(token); #if DEBUG_STACK_TRACES - Console.WriteLine("[consumer] > " + new StackTrace().ToString().Replace("\r\n", "\r\n[consumer] > ")); + Log("[consumer] > " + new StackTrace().ToString().Replace("\r\n", "\r\n[consumer] > ")); #endif if (msg.HasValue) { - Console.WriteLine("[consumer] Got value " + msg.Value); + Log("[consumer] Got value " + msg.Value); } else if (msg.HasValue) { - Console.WriteLine("[consumer] Got error: " + msg.Error); + Log("[consumer] Got error: " + msg.Error); msg.ThrowForNonSuccess(); break; } else { - Console.WriteLine("[consumer] Done!"); + Log("[consumer] Done!"); break; } @@ -1251,25 +1254,25 @@ public async Task Test_FdbAsyncBuffer() // first 5 calls to enqueue should already be completed while (!token.IsCancellationRequested && i < MAX_CAPACITY * 10) { - Console.WriteLine("[PRODUCER] Publishing " + i); + Log("[PRODUCER] Publishing " + i); #if DEBUG_STACK_TRACES - Console.WriteLine("[PRODUCER] > " + new StackTrace().ToString().Replace("\r\n", "\r\n[PRODUCER] > ")); + Log("[PRODUCER] > " + new StackTrace().ToString().Replace("\r\n", "\r\n[PRODUCER] > ")); #endif await buffer.OnNextAsync(i, token); ++i; - Console.WriteLine("[PRODUCER] Published"); + Log("[PRODUCER] Published"); #if DEBUG_STACK_TRACES - Console.WriteLine("[PRODUCER] > " + new StackTrace().ToString().Replace("\r\n", "\r\n[PRODUCER] > ")); + Log("[PRODUCER] > " + new StackTrace().ToString().Replace("\r\n", "\r\n[PRODUCER] > ")); #endif if (rnd.Next(10) < 2) { - Console.WriteLine("[PRODUCER] Thinking " + i); + Log("[PRODUCER] Thinking " + i); await Task.Delay(10); } } - Console.WriteLine("[PRODUCER] COMPLETED!"); + Log("[PRODUCER] COMPLETED!"); buffer.OnCompleted(); var t = await Task.WhenAny(pump, Task.Delay(TimeSpan.FromSeconds(10), token)); @@ -1295,7 +1298,7 @@ public async Task Test_FdbASyncIteratorPump() { await Task.Delay(15); } - Console.WriteLine("[PRODUCER] publishing " + x + " at " + sw.Elapsed.TotalMilliseconds + " on #" + Thread.CurrentThread.ManagedThreadId); + Log("[PRODUCER] publishing " + x + " at " + sw.Elapsed.TotalMilliseconds + " on #" + Thread.CurrentThread.ManagedThreadId); return x; }); @@ -1311,40 +1314,40 @@ public async Task Test_FdbASyncIteratorPump() var queue = AsyncHelpers.CreateTarget( onNextAsync: (x, ct) => { - Console.WriteLine("[consumer] onNextAsync(" + x + ") at " + sw.Elapsed.TotalMilliseconds + " on #" + Thread.CurrentThread.ManagedThreadId); + Log("[consumer] onNextAsync(" + x + ") at " + sw.Elapsed.TotalMilliseconds + " on #" + Thread.CurrentThread.ManagedThreadId); #if DEBUG_STACK_TRACES - Console.WriteLine("> " + new StackTrace().ToString().Replace("\r\n", "\r\n> ")); + Log("> " + new StackTrace().ToString().Replace("\r\n", "\r\n> ")); #endif ct.ThrowIfCancellationRequested(); items.Add(x); - return TaskHelpers.CompletedTask; + return Task.CompletedTask; }, onCompleted: () => { - Console.WriteLine("[consumer] onCompleted() at " + sw.Elapsed.TotalMilliseconds + " on #" + Thread.CurrentThread.ManagedThreadId); + Log("[consumer] onCompleted() at " + sw.Elapsed.TotalMilliseconds + " on #" + Thread.CurrentThread.ManagedThreadId); #if DEBUG_STACK_TRACES - Console.WriteLine("> " + new StackTrace().ToString().Replace("\r\n", "\r\n> ")); + Log("> " + new StackTrace().ToString().Replace("\r\n", "\r\n> ")); #endif done = true; }, onError: (x) => { - Console.WriteLine("[consumer] onError() at " + sw.Elapsed.TotalMilliseconds + " on #" + Thread.CurrentThread.ManagedThreadId); - Console.WriteLine("[consumer] > " + x); + Log("[consumer] onError() at " + sw.Elapsed.TotalMilliseconds + " on #" + Thread.CurrentThread.ManagedThreadId); + Log("[consumer] > " + x); error = x; go.Cancel(); } ); - using(var inner = source.GetEnumerator()) + using(var inner = source.GetEnumerator(this.Cancellation, AsyncIterationHint.Default)) { - var pump = new FdbAsyncIteratorPump(inner, queue); + var pump = new AsyncIteratorPump(inner, queue); - Console.WriteLine("[PUMP] Start pumping on #" + Thread.CurrentThread.ManagedThreadId); + Log("[PUMP] Start pumping on #" + Thread.CurrentThread.ManagedThreadId); sw.Start(); await pump.PumpAsync(token); sw.Stop(); - Console.WriteLine("[PUMP] Pumping completed! at " + sw.Elapsed.TotalMilliseconds + " on #" + Thread.CurrentThread.ManagedThreadId); + Log("[PUMP] Pumping completed! at " + sw.Elapsed.TotalMilliseconds + " on #" + Thread.CurrentThread.ManagedThreadId); // We should have N items, plus 1 message for the completion Assert.That(items.Count, Is.EqualTo(N)); @@ -1392,7 +1395,7 @@ private static async Task VerifyResult(Func> asyncQuery, Func refe } catch(AssertionException x) { - Console.WriteLine("FAIL: " + witness.Expression + "\r\n > " + x.Message); + Log("FAIL: " + witness.Expression + "\r\n > " + x.Message); } } @@ -1430,7 +1433,7 @@ private static async Task VerifySequence(Func> asyncQuery, F } catch (AssertionException x) { - Console.WriteLine("FAIL: " + witness.Expression + "\r\n > " + x.Message); + Log("FAIL: " + witness.Expression + "\r\n > " + x.Message); } } @@ -1456,7 +1459,7 @@ public async Task Test_AsyncLinq_vs_LinqToObject() for(int i=0;i query = SourceOfInts.ToAsyncEnumerable(); + IAsyncEnumerable query = SourceOfInts.ToAsyncEnumerable(); IEnumerable reference = SourceOfInts; IQueryable witness = Queryable.AsQueryable(SourceOfInts); @@ -1652,14 +1655,14 @@ public async Task Test_Record_Items() .Observe((x) => after.Add(x)) .Select((x) => x + 1); - Console.WriteLine("query: " + query); + Log("query: " + query); var results = await query.ToListAsync(); - Console.WriteLine("input : " + String.Join(", ", items)); - Console.WriteLine("before: " + String.Join(", ", before)); - Console.WriteLine("after : " + String.Join(", ", after)); - Console.WriteLine("output: " + String.Join(", ", results)); + Log("input : " + String.Join(", ", items)); + Log("before: " + String.Join(", ", before)); + Log("after : " + String.Join(", ", after)); + Log("output: " + String.Join(", ", results)); Assert.That(before, Is.EqualTo(Enumerable.Range(0, 10).ToList())); Assert.That(after, Is.EqualTo(Enumerable.Range(0, 10).Where(x => x % 2 == 1).ToList())); diff --git a/FoundationDB.Tests/Linq/FdbAsyncQueryableFacts.cs b/FoundationDB.Tests/Linq/AsyncQueryableFacts.cs similarity index 85% rename from FoundationDB.Tests/Linq/FdbAsyncQueryableFacts.cs rename to FoundationDB.Tests/Linq/AsyncQueryableFacts.cs index 836913402..ec87f4034 100644 --- a/FoundationDB.Tests/Linq/FdbAsyncQueryableFacts.cs +++ b/FoundationDB.Tests/Linq/AsyncQueryableFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,21 +28,18 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Linq.Tests { + using System; + using System.Collections.Generic; + using System.Threading.Tasks; using FoundationDB.Client; using FoundationDB.Client.Tests; using FoundationDB.Layers.Indexing; - using FoundationDB.Layers.Tuples; using FoundationDB.Linq.Expressions; using FoundationDB.Linq.Providers; using NUnit.Framework; - using System; - using System.Collections.Generic; - using System.Linq.Expressions; - using System.Threading; - using System.Threading.Tasks; [TestFixture] - public class FdbAsyncQueryableFacts : FdbTest + public class AsyncQueryableFacts : FdbTest { [Test] @@ -62,25 +59,25 @@ await db.WriteAsync((tr) => tr.Set(location.Keys.Encode("Narf"), Slice.FromString("Zort")); }, this.Cancellation); - var range = db.Query().RangeStartsWith(location.Key); + var range = db.Query().RangeStartsWith(location.GetPrefix()); Assert.That(range, Is.InstanceOf>>()); Assert.That(range.Expression, Is.InstanceOf()); - Console.WriteLine(range.Expression.DebugView); + Log(range.Expression.DebugView); var projection = range.Select(kvp => kvp.Value.ToString()); Assert.That(projection, Is.InstanceOf>()); Assert.That(projection.Expression, Is.InstanceOf, string>>()); - Console.WriteLine(projection.Expression.DebugView); + Log(projection.Expression.DebugView); var results = await projection.ToListAsync(); - Console.WriteLine("ToListAsync() => [ " + String.Join(", ", results) + " ]"); + Log("ToListAsync() => [ " + String.Join(", ", results) + " ]"); var count = await projection.CountAsync(); - Console.WriteLine("CountAsync() => " + count); + Log("CountAsync() => " + count); Assert.That(count, Is.EqualTo(2)); var first = await projection.FirstAsync(); - Console.WriteLine("FirstAsync() => " + first); + Log("FirstAsync() => " + first); Assert.That(first, Is.EqualTo("World!")); } } @@ -110,10 +107,10 @@ await db.WriteAsync((tr) => Assert.That(lookup, Is.InstanceOf>()); Assert.That(lookup.Expression, Is.InstanceOf>()); - Console.WriteLine(lookup.Expression.DebugView); + Log(lookup.Expression.DebugView); var ids = await lookup.ToListAsync(); - Console.WriteLine("=> [ " + String.Join(", ", ids) + " ]"); + Log("=> [ " + String.Join(", ", ids) + " ]"); } @@ -149,19 +146,19 @@ await db.WriteAsync((tr) => var lookup = index.Query(db).Lookup(x => x <= 100); Assert.That(lookup, Is.InstanceOf>()); Assert.That(lookup.Expression, Is.InstanceOf>()); - Console.WriteLine(lookup.Expression.DebugView); + Log(lookup.Expression.DebugView); var ids = await lookup.ToListAsync(); - Console.WriteLine("=> [ " + String.Join(", ", ids) + " ]"); + Log("=> [ " + String.Join(", ", ids) + " ]"); // find all that are over nine thousand lookup = index.Query(db).Lookup(x => x >= 9000); Assert.That(lookup, Is.InstanceOf>()); Assert.That(lookup.Expression, Is.InstanceOf>()); - Console.WriteLine(lookup.Expression.DebugView); + Log(lookup.Expression.DebugView); ids = await lookup.ToListAsync(); - Console.WriteLine("=> [ " + String.Join(", ", ids) + " ]"); + Log("=> [ " + String.Join(", ", ids) + " ]"); } diff --git a/FoundationDB.Tests/Linq/FdbQueryExpressionFacts.cs b/FoundationDB.Tests/Linq/FdbQueryExpressionFacts.cs index 93e38037d..7106ad15f 100644 --- a/FoundationDB.Tests/Linq/FdbQueryExpressionFacts.cs +++ b/FoundationDB.Tests/Linq/FdbQueryExpressionFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,183 +26,187 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Linq.Expressions.Tests + namespace FoundationDB.Linq.Expressions.Tests { - using FoundationDB.Client; - using FoundationDB.Layers.Indexing; - using FoundationDB.Layers.Tuples; - using NUnit.Framework; using System; using System.Collections.Generic; using System.Linq.Expressions; + using Doxense.Collections.Tuples; + using Doxense.Linq; + using FoundationDB.Client; + using FoundationDB.Client.Tests; + using FoundationDB.Layers.Indexing; + using NUnit.Framework; [TestFixture] - public class FdbQueryExpressionFacts + public class FdbQueryExpressionFacts : FdbTest + + { + + private readonly FdbIndex FooBarIndex = new FdbIndex("Foos.ByBar", KeySubspace.FromKey(TuPack.EncodeKey("Foos", 1))); + + private readonly FdbIndex FooBazIndex = new FdbIndex("Foos.ByBaz", KeySubspace.FromKey(TuPack.EncodeKey("Foos", 2))); + + [Test] + public void Test_FdbQueryIndexLookupExpression() + { + var expr = FdbQueryIndexLookupExpression.Lookup( + FooBarIndex, + ExpressionType.Equal, + Expression.Constant("world") + ); + Log(expr); + + Assert.That(expr, Is.Not.Null); + Assert.That(expr.Index, Is.SameAs(FooBarIndex)); //TODO: .Index.Index does not look very nice + Assert.That(expr.Operator, Is.EqualTo(ExpressionType.Equal)); + Assert.That(expr.Value, Is.Not.Null); + Assert.That(expr.Value, Is.InstanceOf().With.Property("Value").EqualTo("world")); + + Assert.That(expr.Type, Is.EqualTo(typeof(IAsyncEnumerable))); + Assert.That(expr.ElementType, Is.EqualTo(typeof(int))); + + Log(FdbQueryExpressions.ExplainSequence(expr)); + + } + + [Test] + public void Test_FdbQueryIndexLookupExpression_From_Lambda() + { + var expr = FdbQueryIndexLookupExpression.Lookup( + FooBarIndex, + (bar) => bar == "world" + ); + Log(expr); + + Assert.That(expr, Is.Not.Null); + Assert.That(expr.Index, Is.SameAs(FooBarIndex)); //TODO: .Index.Index does not look very nice + Assert.That(expr.Operator, Is.EqualTo(ExpressionType.Equal)); + Assert.That(expr.Value, Is.Not.Null); + Assert.That(expr.Value, Is.InstanceOf().With.Property("Value").EqualTo("world")); + + Assert.That(expr.Type, Is.EqualTo(typeof(IAsyncEnumerable))); + Assert.That(expr.ElementType, Is.EqualTo(typeof(int))); + + Log(FdbQueryExpressions.ExplainSequence(expr)); + + } + + [Test] + public void Test_FdbQueryRangeExpression() + { + var expr = FdbQueryExpressions.Range( + KeySelectorPair.Create(TuPack.ToKeyRange("Foo")) + ); + Log(expr); + + Assert.That(expr, Is.Not.Null); + Assert.That(expr.Range.Begin.Key.ToString(), Is.EqualTo("<02>Foo<00><00>")); + Assert.That(expr.Range.End.Key.ToString(), Is.EqualTo("<02>Foo<00>")); + + Assert.That(expr.Type, Is.EqualTo(typeof(IAsyncEnumerable>))); + Assert.That(expr.ElementType, Is.EqualTo(typeof(KeyValuePair))); + + Log(FdbQueryExpressions.ExplainSequence(expr)); + } + + [Test] + public void Test_FdbQueryIntersectExpression() + { + var expr1 = FdbQueryIndexLookupExpression.Lookup( + FooBarIndex, + (x) => x == "world" + ); + var expr2 = FdbQueryIndexLookupExpression.Lookup( + FooBazIndex, + (x) => x == 1234L + ); + + var expr = FdbQueryExpressions.Intersect( + expr1, + expr2 + ); + Log(expr); + + Assert.That(expr, Is.Not.Null); + Assert.That(expr.Terms, Is.Not.Null); + Assert.That(expr.Terms.Count, Is.EqualTo(2)); + Assert.That(expr.Terms[0], Is.SameAs(expr1)); + Assert.That(expr.Terms[1], Is.SameAs(expr2)); + + Assert.That(expr.Type, Is.EqualTo(typeof(IAsyncEnumerable))); + Assert.That(expr.ElementType, Is.EqualTo(typeof(int))); + + Log(FdbQueryExpressions.ExplainSequence(expr)); + } + + [Test] + public void Test_FdbQueryUnionExpression() { + var expr1 = FdbQueryIndexLookupExpression.Lookup( + FooBarIndex, + (x) => x == "world" + ); + var expr2 = FdbQueryIndexLookupExpression.Lookup( + FooBazIndex, + (x) => x == 1234L + ); + + var expr = FdbQueryExpressions.Union( + expr1, + expr2 + ); + Log(expr); + + Assert.That(expr, Is.Not.Null); + Assert.That(expr.Terms, Is.Not.Null); + Assert.That(expr.Terms.Count, Is.EqualTo(2)); + Assert.That(expr.Terms[0], Is.SameAs(expr1)); + Assert.That(expr.Terms[1], Is.SameAs(expr2)); + + Assert.That(expr.Type, Is.EqualTo(typeof(IAsyncEnumerable))); + Assert.That(expr.ElementType, Is.EqualTo(typeof(int))); + + Log(FdbQueryExpressions.ExplainSequence(expr)); + } - private FdbIndex FooBarIndex = new FdbIndex("Foos.ByBar", FdbSubspace.Create(FdbTuple.Create("Foos", 1))); - private FdbIndex FooBazIndex = new FdbIndex("Foos.ByBaz", FdbSubspace.Create(FdbTuple.Create("Foos", 2))); - - [Test] - public void Test_FdbQueryIndexLookupExpression() - { - var expr = FdbQueryIndexLookupExpression.Lookup( - FooBarIndex, - ExpressionType.Equal, - Expression.Constant("world") - ); - Console.WriteLine(expr); - - Assert.That(expr, Is.Not.Null); - Assert.That(expr.Index, Is.SameAs(FooBarIndex)); //TODO: .Index.Index does not look very nice - Assert.That(expr.Operator, Is.EqualTo(ExpressionType.Equal)); - Assert.That(expr.Value, Is.Not.Null); - Assert.That(expr.Value, Is.InstanceOf().With.Property("Value").EqualTo("world")); - - Assert.That(expr.Type, Is.EqualTo(typeof(IFdbAsyncEnumerable))); - Assert.That(expr.ElementType, Is.EqualTo(typeof(int))); - - Console.WriteLine(FdbQueryExpressions.ExplainSequence(expr)); - - } - - [Test] - public void Test_FdbQueryIndexLookupExpression_From_Lambda() - { - var expr = FdbQueryIndexLookupExpression.Lookup( - FooBarIndex, - (bar) => bar == "world" - ); - Console.WriteLine(expr); - - Assert.That(expr, Is.Not.Null); - Assert.That(expr.Index, Is.SameAs(FooBarIndex)); //TODO: .Index.Index does not look very nice - Assert.That(expr.Operator, Is.EqualTo(ExpressionType.Equal)); - Assert.That(expr.Value, Is.Not.Null); - Assert.That(expr.Value, Is.InstanceOf().With.Property("Value").EqualTo("world")); - - Assert.That(expr.Type, Is.EqualTo(typeof(IFdbAsyncEnumerable))); - Assert.That(expr.ElementType, Is.EqualTo(typeof(int))); - - Console.WriteLine(FdbQueryExpressions.ExplainSequence(expr)); - - } - - [Test] - public void Test_FdbQueryRangeExpression() - { - var expr = FdbQueryExpressions.Range( - FdbTuple.Create("Foo").ToSelectorPair() - ); - Console.WriteLine(expr); - - Assert.That(expr, Is.Not.Null); - Assert.That(expr.Range.Begin.Key.ToString(), Is.EqualTo("<02>Foo<00>")); - Assert.That(expr.Range.End.Key.ToString(), Is.EqualTo("<02>Foo<01>")); - - Assert.That(expr.Type, Is.EqualTo(typeof(IFdbAsyncEnumerable>))); - Assert.That(expr.ElementType, Is.EqualTo(typeof(KeyValuePair))); - - Console.WriteLine(FdbQueryExpressions.ExplainSequence(expr)); - } - - [Test] - public void Test_FdbQueryIntersectExpression() - { - var expr1 = FdbQueryIndexLookupExpression.Lookup( - FooBarIndex, - (x) => x == "world" - ); - var expr2 = FdbQueryIndexLookupExpression.Lookup( - FooBazIndex, - (x) => x == 1234L - ); - - var expr = FdbQueryExpressions.Intersect( - expr1, - expr2 - ); - Console.WriteLine(expr); - - Assert.That(expr, Is.Not.Null); - Assert.That(expr.Terms, Is.Not.Null); - Assert.That(expr.Terms.Count, Is.EqualTo(2)); - Assert.That(expr.Terms[0], Is.SameAs(expr1)); - Assert.That(expr.Terms[1], Is.SameAs(expr2)); - - Assert.That(expr.Type, Is.EqualTo(typeof(IFdbAsyncEnumerable))); - Assert.That(expr.ElementType, Is.EqualTo(typeof(int))); - - Console.WriteLine(FdbQueryExpressions.ExplainSequence(expr)); - } - - [Test] - public void Test_FdbQueryUnionExpression() - { - var expr1 = FdbQueryIndexLookupExpression.Lookup( - FooBarIndex, - (x) => x == "world" - ); - var expr2 = FdbQueryIndexLookupExpression.Lookup( - FooBazIndex, - (x) => x == 1234L - ); - - var expr = FdbQueryExpressions.Union( - expr1, - expr2 - ); - Console.WriteLine(expr); - - Assert.That(expr, Is.Not.Null); - Assert.That(expr.Terms, Is.Not.Null); - Assert.That(expr.Terms.Count, Is.EqualTo(2)); - Assert.That(expr.Terms[0], Is.SameAs(expr1)); - Assert.That(expr.Terms[1], Is.SameAs(expr2)); - - Assert.That(expr.Type, Is.EqualTo(typeof(IFdbAsyncEnumerable))); - Assert.That(expr.ElementType, Is.EqualTo(typeof(int))); - - Console.WriteLine(FdbQueryExpressions.ExplainSequence(expr)); - } - - [Test] - public void Test_FdbQueryTransformExpression() - { - var expr = FdbQueryExpressions.Transform( - FdbQueryExpressions.RangeStartsWith(FdbTuple.Create("Hello", "World")), - (kvp) => kvp.Value.ToUnicode() - ); - Console.WriteLine(expr); - - Assert.That(expr, Is.Not.Null); - Assert.That(expr.Source, Is.Not.Null.And.InstanceOf()); - Assert.That(expr.Transform, Is.Not.Null); - - Assert.That(expr.Type, Is.EqualTo(typeof(IFdbAsyncEnumerable))); - Assert.That(expr.ElementType, Is.EqualTo(typeof(string))); - - Console.WriteLine(FdbQueryExpressions.ExplainSequence(expr)); - } - - [Test] - public void Test_FdbQueryFilterExpression() - { - var expr = FdbQueryExpressions.Filter( - FdbQueryExpressions.RangeStartsWith(FdbTuple.Create("Hello", "World")), - (kvp) => kvp.Value.ToInt32() % 2 == 0 - ); - Console.WriteLine(expr); - - Assert.That(expr, Is.Not.Null); - Assert.That(expr.Source, Is.Not.Null.And.InstanceOf()); - Assert.That(expr.Filter, Is.Not.Null); - - Assert.That(expr.Type, Is.EqualTo(typeof(IFdbAsyncEnumerable>))); - Assert.That(expr.ElementType, Is.EqualTo(typeof(KeyValuePair))); - - Console.WriteLine(FdbQueryExpressions.ExplainSequence(expr)); - } + [Test] + public void Test_FdbQueryTransformExpression() + { + var expr = FdbQueryExpressions.Transform( + FdbQueryExpressions.RangeStartsWith(TuPack.EncodeKey("Hello", "World")), + (kvp) => kvp.Value.ToUnicode() + ); + Log(expr); + + Assert.That(expr, Is.Not.Null); + Assert.That(expr.Source, Is.Not.Null.And.InstanceOf()); + Assert.That(expr.Transform, Is.Not.Null); + + Assert.That(expr.Type, Is.EqualTo(typeof(IAsyncEnumerable))); + Assert.That(expr.ElementType, Is.EqualTo(typeof(string))); + + Log(FdbQueryExpressions.ExplainSequence(expr)); + } + + [Test] + public void Test_FdbQueryFilterExpression() + { + var expr = FdbQueryExpressions.Filter( + FdbQueryExpressions.RangeStartsWith(TuPack.EncodeKey("Hello", "World")), + (kvp) => kvp.Value.ToInt32() % 2 == 0 + ); + Log(expr); + + Assert.That(expr, Is.Not.Null); + Assert.That(expr.Source, Is.Not.Null.And.InstanceOf()); + Assert.That(expr.Filter, Is.Not.Null); + + Assert.That(expr.Type, Is.EqualTo(typeof(IAsyncEnumerable>))); + Assert.That(expr.ElementType, Is.EqualTo(typeof(KeyValuePair))); + + Log(FdbQueryExpressions.ExplainSequence(expr)); + } } diff --git a/FoundationDB.Tests/Properties/AssemblyInfo.cs b/FoundationDB.Tests/Properties/AssemblyInfo.cs index 7c27100a0..28fd55b2b 100644 --- a/FoundationDB.Tests/Properties/AssemblyInfo.cs +++ b/FoundationDB.Tests/Properties/AssemblyInfo.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/FoundationDB.Tests/RangeQueryFacts.cs b/FoundationDB.Tests/RangeQueryFacts.cs index b5848c27d..13a4525e6 100644 --- a/FoundationDB.Tests/RangeQueryFacts.cs +++ b/FoundationDB.Tests/RangeQueryFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -25,20 +25,20 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #endregion - + namespace FoundationDB.Client.Tests { - using FoundationDB.Filters.Logging; - using FoundationDB.Layers.Tuples; - using FoundationDB.Layers.Directories; - using FoundationDB.Linq; - using NUnit.Framework; using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; - using System.Text; using System.Threading.Tasks; + using Doxense.Collections.Tuples; + using Doxense.Linq; + using Doxense.Linq.Async.Iterators; + using Doxense.Serialization.Encoders; + using FoundationDB.Layers.Directories; + using NUnit.Framework; [TestFixture] public class RangeQueryFacts : FdbTest @@ -170,10 +170,10 @@ await db.WriteAsync((tr) => Assert.That(res.Value, Is.EqualTo(Slice.FromInt32(9))); // should fail because there is more than one - Assert.Throws(async () => await query.SingleOrDefaultAsync(), "SingleOrDefaultAsync should throw if the range returns more than 1 result"); + Assert.That(async () => await query.SingleOrDefaultAsync(), Throws.InstanceOf(), "SingleOrDefaultAsync should throw if the range returns more than 1 result"); // should fail because there is more than one - Assert.Throws(async () => await query.SingleAsync(), "SingleAsync should throw if the range returns more than 1 result"); + Assert.That(async () => await query.SingleAsync(), Throws.InstanceOf(), "SingleAsync should throw if the range returns more than 1 result"); } // B: exactly one item @@ -223,7 +223,7 @@ await db.WriteAsync((tr) => Assert.That(res.Value, Is.EqualTo(Slice.Nil)); // should return the first one - Assert.Throws(async () => await query.FirstAsync(), "FirstAsync should throw if the range returns nothing"); + Assert.That(async () => await query.FirstAsync(), Throws.InstanceOf(), "FirstAsync should throw if the range returns nothing"); // should return the last one res = await query.LastOrDefaultAsync(); @@ -231,7 +231,7 @@ await db.WriteAsync((tr) => Assert.That(res.Value, Is.EqualTo(Slice.Nil)); // should return the last one - Assert.Throws(async () => await query.LastAsync(), "LastAsync should throw if the range returns nothing"); + Assert.That(async () => await query.LastAsync(), Throws.InstanceOf(), "LastAsync should throw if the range returns nothing"); // should fail because there is more than one res = await query.SingleOrDefaultAsync(); @@ -239,7 +239,7 @@ await db.WriteAsync((tr) => Assert.That(res.Value, Is.EqualTo(Slice.Nil)); // should fail because there is none - Assert.Throws(async () => await query.SingleAsync(), "SingleAsync should throw if the range returns nothing"); + Assert.That(async () => await query.SingleAsync(), Throws.InstanceOf(), "SingleAsync should throw if the range returns nothing"); } // A: with a size limit @@ -296,8 +296,8 @@ await db.WriteAsync((tr) => tr.Set(a.Keys.Encode(i), Slice.FromInt32(i)); } // add guard keys - tr.Set(location.Key, Slice.FromInt32(-1)); - tr.Set(location.Key + (byte)255, Slice.FromInt32(-1)); + tr.Set(location.GetPrefix(), Slice.FromInt32(-1)); + tr.Set(location.GetPrefix() + (byte)255, Slice.FromInt32(-1)); }, this.Cancellation); // Take(5) should return the first 5 items @@ -342,11 +342,11 @@ await db.WriteAsync((tr) => { var query = tr.GetRange(a.Keys.ToRange()).Take(0); Assert.That(query, Is.Not.Null); - Assert.That(query.Limit, Is.EqualTo(0)); + Assert.That(query.Limit, Is.Zero); var elements = await query.ToListAsync(); Assert.That(elements, Is.Not.Null); - Assert.That(elements.Count, Is.EqualTo(0)); + Assert.That(elements.Count, Is.Zero); } } @@ -384,11 +384,11 @@ public async Task Test_Can_Skip() // |xxxxxxxxxxxxxxxxxxxxxxxxxxxxx|(100->) res = await query.Skip(100).ToListAsync(); - Assert.That(res.Count, Is.EqualTo(0), "100 --> 99"); + Assert.That(res.Count, Is.Zero, "100 --> 99"); // |xxxxxxxxxxxxxxxxxxxxxxxxxxxxx|_____________(150->) res = await query.Skip(150).ToListAsync(); - Assert.That(res.Count, Is.EqualTo(0), "150 --> 100"); + Assert.That(res.Count, Is.Zero, "150 --> 100"); } // from the end @@ -411,11 +411,11 @@ public async Task Test_Can_Skip() // (<- -1)|<<<<<<<<<<<<<<<<<<<<<<<<<<<<<| res = await query.Reverse().Skip(100).ToListAsync(); - Assert.That(res.Count, Is.EqualTo(0), "0 <-- -1"); + Assert.That(res.Count, Is.Zero, "0 <-- -1"); // (<- -51)<<<<<<<<<<<<<|<<<<<<<<<<<<<<<<<<<<<<<<<<<<<| res = await query.Reverse().Skip(100).ToListAsync(); - Assert.That(res.Count, Is.EqualTo(0), "0 <-- -51"); + Assert.That(res.Count, Is.Zero, "0 <-- -51"); } // from both sides @@ -443,7 +443,7 @@ public async Task Test_Original_Range_Does_Not_Overflow() var location = await GetCleanDirectory(db, "Queries", "Range"); // import test data - var data = Enumerable.Range(0, 30).Select(x => new KeyValuePair(location.Keys.Encode(x), Slice.FromFixed32(x))); + var data = Enumerable.Range(0, 30).Select(x => (location.Keys.Encode(x), Slice.FromFixed32(x))); await Fdb.Bulk.WriteAsync(db, data, this.Cancellation); using (var tr = db.BeginReadOnlyTransaction(this.Cancellation)) @@ -502,7 +502,7 @@ public async Task Test_Can_MergeSort() { for (int i = 0; i < N; i++) { - tr.Set(lists[k].Keys.Encode((i * K) + k), FdbTuple.EncodeKey(k, i)); + tr.Set(lists[k].Keys.Encode((i * K) + k), TuPack.EncodeKey(k, i)); } await tr.CommitAsync(); } @@ -514,12 +514,12 @@ public async Task Test_Can_MergeSort() using (var tr = db.BeginTransaction(this.Cancellation)) { var merge = tr.MergeSort( - lists.Select(list => FdbKeySelectorPair.Create(list.Keys.ToRange())), + lists.Select(list => KeySelectorPair.Create(list.Keys.ToRange())), kvp => location.Keys.DecodeLast(kvp.Key) ); Assert.That(merge, Is.Not.Null); - Assert.That(merge, Is.InstanceOf, int, KeyValuePair>>()); + Assert.That(merge, Is.InstanceOf, int, KeyValuePair>>()); var results = await merge.ToListAsync(); Assert.That(results, Is.Not.Null); @@ -527,8 +527,8 @@ public async Task Test_Can_MergeSort() for (int i = 0; i < K * N; i++) { - Assert.That(location.ExtractKey(results[i].Key), Is.EqualTo(FdbTuple.EncodeKey(i % K, i))); - Assert.That(results[i].Value, Is.EqualTo(FdbTuple.EncodeKey(i % K, i / K))); + Assert.That(location.ExtractKey(results[i].Key), Is.EqualTo(TuPack.EncodeKey(i % K, i))); + Assert.That(results[i].Value, Is.EqualTo(TuPack.EncodeKey(i % K, i / K))); } } } @@ -556,19 +556,19 @@ public async Task Test_Range_Intersect() var series = Enumerable.Range(1, K).Select(k => Enumerable.Range(1, N).Select(x => k * x).ToArray()).ToArray(); //foreach(var serie in series) //{ - // Console.WriteLine(String.Join(", ", serie)); + // Log(String.Join(", ", serie)); //} for (int k = 0; k < K; k++) { - //Console.WriteLine("> k = " + k); + //Log("> k = " + k); using (var tr = db.BeginTransaction(this.Cancellation)) { for (int i = 0; i < N; i++) { var key = lists[k].Keys.Encode(series[k][i]); - var value = FdbTuple.EncodeKey(k, i); - //Console.WriteLine("> " + key + " = " + value); + var value = TuPack.EncodeKey(k, i); + //Log("> " + key + " = " + value); tr.Set(key, value); } await tr.CommitAsync(); @@ -584,12 +584,12 @@ public async Task Test_Range_Intersect() using (var tr = db.BeginTransaction(this.Cancellation)) { var merge = tr.Intersect( - lists.Select(list => FdbKeySelectorPair.Create(list.Keys.ToRange())), + lists.Select(list => KeySelectorPair.Create(list.Keys.ToRange())), kvp => location.Keys.DecodeLast(kvp.Key) ); Assert.That(merge, Is.Not.Null); - Assert.That(merge, Is.InstanceOf, int, KeyValuePair>>()); + Assert.That(merge, Is.InstanceOf, int, KeyValuePair>>()); var results = await merge.ToListAsync(); Assert.That(results, Is.Not.Null); @@ -628,19 +628,19 @@ public async Task Test_Range_Except() var series = Enumerable.Range(1, K).Select(k => Enumerable.Range(1, N).Select(x => k * x).ToArray()).ToArray(); //foreach(var serie in series) //{ - // Console.WriteLine(String.Join(", ", serie)); + // Log(String.Join(", ", serie)); //} for (int k = 0; k < K; k++) { - //Console.WriteLine("> k = " + k); + //Log("> k = " + k); using (var tr = db.BeginTransaction(this.Cancellation)) { for (int i = 0; i < N; i++) { var key = lists[k].Keys.Encode(series[k][i]); - var value = FdbTuple.EncodeKey(k, i); - //Console.WriteLine("> " + key + " = " + value); + var value = TuPack.EncodeKey(k, i); + //Log("> " + key + " = " + value); tr.Set(key, value); } await tr.CommitAsync(); @@ -656,12 +656,12 @@ public async Task Test_Range_Except() using (var tr = db.BeginTransaction(this.Cancellation)) { var merge = tr.Except( - lists.Select(list => FdbKeySelectorPair.Create(list.Keys.ToRange())), + lists.Select(list => KeySelectorPair.Create(list.Keys.ToRange())), kvp => location.Keys.DecodeLast(kvp.Key) ); Assert.That(merge, Is.Not.Null); - Assert.That(merge, Is.InstanceOf, int, KeyValuePair>>()); + Assert.That(merge, Is.InstanceOf, int, KeyValuePair>>()); var results = await merge.ToListAsync(); Assert.That(results, Is.Not.Null); @@ -688,20 +688,20 @@ public async Task Test_Range_Except_Composite_Key() var location = await GetCleanDirectory(db, "Queries", "ExceptComposite"); // Items contains a list of all ("user", id) that were created - var locItems = await location.CreateOrOpenAsync(db, "Items", this.Cancellation); + var locItems = (await location.CreateOrOpenAsync(db, "Items", this.Cancellation)).AsTyped(); // Processed contain the list of all ("user", id) that were processed - var locProcessed = await location.CreateOrOpenAsync(db, "Processed", this.Cancellation); + var locProcessed = (await location.CreateOrOpenAsync(db, "Processed", this.Cancellation)).AsTyped(); // the goal is to have a query that returns the list of all unprocessed items (ie: in Items but not in Processed) await db.WriteAsync((tr) => { // Items - tr.Set(locItems.Keys.Encode("userA", 10093), Slice.Empty); - tr.Set(locItems.Keys.Encode("userA", 19238), Slice.Empty); - tr.Set(locItems.Keys.Encode("userB", 20003), Slice.Empty); + tr.Set(locItems.Keys["userA", 10093], Slice.Empty); + tr.Set(locItems.Keys["userA", 19238], Slice.Empty); + tr.Set(locItems.Keys["userB", 20003], Slice.Empty); // Processed - tr.Set(locProcessed.Keys.Encode("userA", 19238), Slice.Empty); + tr.Set(locProcessed.Keys["userA", 19238], Slice.Empty); }, this.Cancellation); // the query (Items ∩ Processed) should return (userA, 10093) and (userB, 20003) @@ -712,13 +712,13 @@ await db.WriteAsync((tr) => { var query = tr.Except( new[] { locItems.Keys.ToRange(), locProcessed.Keys.ToRange() }, - (kv) => FdbTuple.Unpack(kv.Key).Substring(-2), // note: keys come from any of the two ranges, so we must only keep the last 2 elements of the tuple - FdbTupleComparisons.Composite() // compares t[0] as a string, and t[1] as an int + (kv) => TuPack.Unpack(kv.Key).Substring(-2), // note: keys come from any of the two ranges, so we must only keep the last 2 elements of the tuple + TupleComparisons.Composite() // compares t[0] as a string, and t[1] as an int ); // problem: Except() still returns the original (Slice,Slice) pairs from the first range, // meaning that we still need to unpack agin the key (this time knowing the location) - return query.Select(kv => locItems.Keys.Unpack(kv.Key)); + return query.Select(kv => locItems.Keys.Decode(kv.Key)); }, this.Cancellation); foreach(var r in results) @@ -726,8 +726,8 @@ await db.WriteAsync((tr) => Trace.WriteLine(r); } Assert.That(results.Count, Is.EqualTo(2)); - Assert.That(results[0], Is.EqualTo(FdbTuple.Create("userA", 10093))); - Assert.That(results[1], Is.EqualTo(FdbTuple.Create("userB", 20003))); + Assert.That(results[0], Is.EqualTo(("userA", 10093))); + Assert.That(results[1], Is.EqualTo(("userB", 20003))); // Second Method: pre-parse the queries, and merge on the results directly Trace.WriteLine("Method 2:"); @@ -735,14 +735,14 @@ await db.WriteAsync((tr) => { var items = tr .GetRange(locItems.Keys.ToRange()) - .Select(kv => locItems.Keys.Unpack(kv.Key)); + .Select(kv => locItems.Keys.Decode(kv.Key)); var processed = tr .GetRange(locProcessed.Keys.ToRange()) - .Select(kv => locProcessed.Keys.Unpack(kv.Key)); + .Select(kv => locProcessed.Keys.Decode(kv.Key)); // items and processed are lists of (string, int) tuples, we can compare them directly - var query = items.Except(processed, FdbTupleComparisons.Composite()); + var query = items.Except(processed, TupleComparisons.Composite()); // query is already a list of tuples, nothing more to do return query; @@ -753,8 +753,8 @@ await db.WriteAsync((tr) => Trace.WriteLine(r); } Assert.That(results.Count, Is.EqualTo(2)); - Assert.That(results[0], Is.EqualTo(FdbTuple.Create("userA", 10093))); - Assert.That(results[1], Is.EqualTo(FdbTuple.Create("userB", 20003))); + Assert.That(results[0], Is.EqualTo(("userA", 10093))); + Assert.That(results[1], Is.EqualTo(("userB", 20003))); } diff --git a/FoundationDB.Tests/SubspaceFacts.cs b/FoundationDB.Tests/SubspaceFacts.cs index fb7b54de9..0804e37ea 100644 --- a/FoundationDB.Tests/SubspaceFacts.cs +++ b/FoundationDB.Tests/SubspaceFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,56 +28,49 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Layers.Tuples.Tests { + using System; + using Doxense.Collections.Tuples; + using Doxense.Serialization.Encoders; using FoundationDB.Client; - using FoundationDB.Layers.Tuples; + using FoundationDB.Client.Tests; using NUnit.Framework; - using System; [TestFixture] - public class SubspaceFacts + public class SubspaceFacts : FdbTest { [Test] public void Test_Empty_Subspace_Is_Empty() { - var subspace = FdbSubspace.Empty; + var subspace = KeySubspace.FromKey(Slice.Empty); Assert.That(subspace, Is.Not.Null, "FdbSubspace.Empty should not return null"); - Assert.That(FdbSubspace.Empty, Is.SameAs(subspace), "FdbSubspace.Empty is a singleton"); - - Assert.That(subspace.Key.Count, Is.EqualTo(0), "FdbSubspace.Empty.Key should be equal to Slice.Empty"); - Assert.That(subspace.Key.HasValue, Is.True, "FdbSubspace.Empty.Key should be equal to Slice.Empty"); - - Assert.That(FdbSubspace.Copy(subspace), Is.Not.SameAs(subspace)); + Assert.That(subspace.GetPrefix(), Is.EqualTo(Slice.Empty), "FdbSubspace.Empty.Key should be equal to Slice.Empty"); + Assert.That(subspace.Copy(), Is.Not.SameAs(subspace)); } [Test] [Category("LocalCluster")] public void Test_Subspace_With_Binary_Prefix() { - var subspace = FdbSubspace.CreateDynamic(Slice.Create(new byte[] { 42, 255, 0, 127 })); + var subspace = KeySubspace.CreateDynamic(new byte[] { 42, 255, 0, 127 }.AsSlice()); - Assert.That(subspace.Key.ToString(), Is.EqualTo("*<00><7F>")); - Assert.That(FdbSubspace.Copy(subspace), Is.Not.SameAs(subspace)); - Assert.That(FdbSubspace.Copy(subspace).Key, Is.EqualTo(subspace.Key)); + Assert.That(subspace.GetPrefix().ToString(), Is.EqualTo("*<00><7F>")); + Assert.That(subspace.Copy(), Is.Not.SameAs(subspace)); + Assert.That(subspace.Copy().GetPrefix(), Is.EqualTo(subspace.GetPrefix())); // concat(Slice) should append the slice to the binary prefix directly - Assert.That(subspace.ConcatKey(Slice.FromInt32(0x01020304)).ToString(), Is.EqualTo("*<00><7F><04><03><02><01>")); - Assert.That(subspace.ConcatKey(Slice.FromAscii("hello")).ToString(), Is.EqualTo("*<00><7F>hello")); + Assert.That(subspace[Slice.FromInt32(0x01020304)].ToString(), Is.EqualTo("*<00><7F><04><03><02><01>")); + Assert.That(subspace[Slice.FromStringAscii("hello")].ToString(), Is.EqualTo("*<00><7F>hello")); // pack(...) should use tuple serialization Assert.That(subspace.Keys.Encode(123).ToString(), Is.EqualTo("*<00><7F><15>{")); Assert.That(subspace.Keys.Encode("hello").ToString(), Is.EqualTo("*<00><7F><02>hello<00>")); - Assert.That(subspace.Keys.Encode(Slice.FromAscii("world")).ToString(), Is.EqualTo("*<00><7F><01>world<00>")); - Assert.That(subspace.Keys.Pack(FdbTuple.Create("hello", 123)).ToString(), Is.EqualTo("*<00><7F><02>hello<00><15>{")); - - // if we derive a tuple from this subspace, it should keep the binary prefix when converted to a key - var t = subspace.Keys.Append("world", 123, false); - Assert.That(t, Is.Not.Null); - Assert.That(t.Count, Is.EqualTo(3)); - Assert.That(t.Get(0), Is.EqualTo("world")); - Assert.That(t.Get(1), Is.EqualTo(123)); - Assert.That(t.Get(2), Is.False); - var k = t.ToSlice(); + Assert.That(subspace.Keys.Encode(Slice.FromStringAscii("world")).ToString(), Is.EqualTo("*<00><7F><01>world<00>")); + Assert.That(subspace.Keys.Pack(STuple.Create("hello", 123)).ToString(), Is.EqualTo("*<00><7F><02>hello<00><15>{")); + Assert.That(subspace.Keys.Pack(("hello", 123)).ToString(), Is.EqualTo("*<00><7F><02>hello<00><15>{")); + + // if we encode a tuple from this subspace, it should keep the binary prefix when converted to a key + var k = subspace.Keys.Pack(("world", 123, false)); Assert.That(k.ToString(), Is.EqualTo("*<00><7F><02>world<00><15>{<14>")); // if we unpack the key with the binary prefix, we should get a valid tuple @@ -87,17 +80,23 @@ public void Test_Subspace_With_Binary_Prefix() Assert.That(t2.Get(0), Is.EqualTo("world")); Assert.That(t2.Get(1), Is.EqualTo(123)); Assert.That(t2.Get(2), Is.False); + + // ValueTuple + Assert.That(subspace.Keys.Pack(ValueTuple.Create("hello")).ToString(), Is.EqualTo("*<00><7F><02>hello<00>")); + Assert.That(subspace.Keys.Pack(("hello", 123)).ToString(), Is.EqualTo("*<00><7F><02>hello<00><15>{")); + Assert.That(subspace.Keys.Pack(("hello", 123, "world")).ToString(), Is.EqualTo("*<00><7F><02>hello<00><15>{<02>world<00>")); + Assert.That(subspace.Keys.Pack(("hello", 123, "world", 456)).ToString(), Is.EqualTo("*<00><7F><02>hello<00><15>{<02>world<00><16><01>")); } [Test] public void Test_Subspace_Copy_Does_Not_Share_Key_Buffer() { - var original = FdbSubspace.Create(Slice.FromString("Hello")); - var copy = FdbSubspace.Copy(original); + var original = KeySubspace.FromKey(Slice.FromString("Hello")); + var copy = original.Copy(); Assert.That(copy, Is.Not.Null); Assert.That(copy, Is.Not.SameAs(original), "Copy should be a new instance"); - Assert.That(copy.Key, Is.EqualTo(original.Key), "Key should be equal"); - Assert.That(copy.Key.Array, Is.Not.SameAs(original.Key.Array), "Key should be a copy of the original"); + Assert.That(copy.GetPrefix(), Is.EqualTo(original.GetPrefix()), "Key should be equal"); + Assert.That(copy.GetPrefix().Array, Is.Not.SameAs(original.GetPrefix().Array), "Key should be a copy of the original"); Assert.That(copy, Is.EqualTo(original), "Copy and original should be considered equal"); Assert.That(copy.ToString(), Is.EqualTo(original.ToString()), "Copy and original should have the same string representation"); @@ -107,49 +106,33 @@ public void Test_Subspace_Copy_Does_Not_Share_Key_Buffer() [Test] public void Test_Cannot_Create_Or_Partition_Subspace_With_Slice_Nil() { - Assert.That(() => new FdbSubspace(Slice.Nil), Throws.ArgumentException); - Assert.That(() => FdbSubspace.Create(Slice.Nil), Throws.ArgumentException); + Assert.That(() => new KeySubspace(Slice.Nil), Throws.ArgumentException); + Assert.That(() => KeySubspace.FromKey(Slice.Nil), Throws.ArgumentException); //FIXME: typed subspaces refactoring ! //Assert.That(() => FdbSubspace.Empty.Partition[Slice.Nil], Throws.ArgumentException); //Assert.That(() => FdbSubspace.Create(FdbKey.Directory).Partition[Slice.Nil], Throws.ArgumentException); } - [Test] - public void Test_Cannot_Create_Or_Partition_Subspace_With_Null_Tuple() - { - Assert.That(() => FdbSubspace.Create(default(IFdbTuple)), Throws.InstanceOf()); - //FIXME: typed subspaces refactoring ! - //Assert.That(() => FdbSubspace.Empty.Partition[default(IFdbTuple)], Throws.InstanceOf()); - //Assert.That(() => FdbSubspace.Create(FdbKey.Directory).Partition[default(IFdbTuple)], Throws.InstanceOf()); - } - [Test] [Category("LocalCluster")] public void Test_Subspace_With_Tuple_Prefix() { - var subspace = FdbSubspace.CreateDynamic(FdbTuple.Create("hello")); + var subspace = KeySubspace.CreateDynamic(TuPack.EncodeKey("hello")); - Assert.That(subspace.Key.ToString(), Is.EqualTo("<02>hello<00>")); - Assert.That(FdbSubspace.Copy(subspace), Is.Not.SameAs(subspace)); - Assert.That(FdbSubspace.Copy(subspace).Key, Is.EqualTo(subspace.Key)); + Assert.That(subspace.GetPrefix().ToString(), Is.EqualTo("<02>hello<00>")); + Assert.That(subspace.Copy(), Is.Not.SameAs(subspace)); + Assert.That(subspace.Copy().GetPrefix(), Is.EqualTo(subspace.GetPrefix())); // concat(Slice) should append the slice to the tuple prefix directly - Assert.That(subspace.ConcatKey(Slice.FromInt32(0x01020304)).ToString(), Is.EqualTo("<02>hello<00><04><03><02><01>")); - Assert.That(subspace.ConcatKey(Slice.FromAscii("world")).ToString(), Is.EqualTo("<02>hello<00>world")); + Assert.That(subspace[Slice.FromInt32(0x01020304)].ToString(), Is.EqualTo("<02>hello<00><04><03><02><01>")); + Assert.That(subspace[Slice.FromStringAscii("world")].ToString(), Is.EqualTo("<02>hello<00>world")); // pack(...) should use tuple serialization Assert.That(subspace.Keys.Encode(123).ToString(), Is.EqualTo("<02>hello<00><15>{")); Assert.That(subspace.Keys.Encode("world").ToString(), Is.EqualTo("<02>hello<00><02>world<00>")); // even though the subspace prefix is a tuple, appending to it will only return the new items - var t = subspace.Keys.Append("world", 123, false); - Assert.That(t, Is.Not.Null); - Assert.That(t.Count, Is.EqualTo(3)); - Assert.That(t.Get(0), Is.EqualTo("world")); - Assert.That(t.Get(1), Is.EqualTo(123)); - Assert.That(t.Get(2), Is.False); - // but ToSlice() should include the prefix - var k = t.ToSlice(); + var k = subspace.Keys.Pack(("world", 123, false)); Assert.That(k.ToString(), Is.EqualTo("<02>hello<00><02>world<00><15>{<14>")); // if we unpack the key with the binary prefix, we should get a valid tuple @@ -166,59 +149,197 @@ public void Test_Subspace_With_Tuple_Prefix() public void Test_Subspace_Partitioning_With_Binary_Suffix() { // start from a parent subspace - var parent = FdbSubspace.Empty.Using(TypeSystem.Tuples); - Assert.That(parent.Key.ToString(), Is.EqualTo("")); + var parent = KeySubspace.CreateDynamic(Slice.Empty); + Assert.That(parent.GetPrefix().ToString(), Is.EqualTo("")); // create a child subspace using a tuple var child = parent.Partition[FdbKey.Directory]; Assert.That(child, Is.Not.Null); - Assert.That(child.Key.ToString(), Is.EqualTo("")); + Assert.That(child.GetPrefix().ToString(), Is.EqualTo("")); // create a key from this child subspace - var key = child.ConcatKey(Slice.FromFixed32(0x01020304)); + var key = child[Slice.FromFixed32(0x01020304)]; Assert.That(key.ToString(), Is.EqualTo("<04><03><02><01>")); // create another child - var grandChild = child.Partition[Slice.FromAscii("hello")]; + var grandChild = child.Partition[Slice.FromStringAscii("hello")]; Assert.That(grandChild, Is.Not.Null); - Assert.That(grandChild.Key.ToString(), Is.EqualTo("hello")); + Assert.That(grandChild.GetPrefix().ToString(), Is.EqualTo("hello")); - key = grandChild.ConcatKey(Slice.FromFixed32(0x01020304)); + key = grandChild[Slice.FromFixed32(0x01020304)]; Assert.That(key.ToString(), Is.EqualTo("hello<04><03><02><01>")); // cornercase - Assert.That(child.Partition[Slice.Empty].Key, Is.EqualTo(child.Key)); + Assert.That(child.Partition[Slice.Empty].GetPrefix(), Is.EqualTo(child.GetPrefix())); } [Test] - [Category("LocalCluster")] - public void Test_Subspace_Partitioning_With_Tuple_Suffix() + public void Test_DynamicKeySpace_API() { - // start from a parent subspace - var parent = FdbSubspace.CreateDynamic(Slice.FromByte(254), TypeSystem.Tuples); - Assert.That(parent.Key.ToString(), Is.EqualTo("")); + var location = KeySubspace.CreateDynamic(Slice.FromString("PREFIX")); + + Assert.That(location[Slice.FromString("SUFFIX")].ToString(), Is.EqualTo("PREFIXSUFFIX")); + + // Encode(...) + Assert.That(location.Keys.Encode("hello").ToString(), Is.EqualTo("PREFIX<02>hello<00>")); + Assert.That(location.Keys.Encode("hello", 123).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{")); + Assert.That(location.Keys.Encode("hello", 123, "world").ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00>")); + Assert.That(location.Keys.Encode("hello", 123, "world", 456).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00><16><01>")); + Assert.That(location.Keys.Encode("hello", 123, "world", 456, "!").ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00><16><01><02>!<00>")); + Assert.That(location.Keys.Encode("hello", 123, "world", 456, "!", 789).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00><16><01><02>!<00><16><03><15>")); + + // Pack(ITuple) + Assert.That(location.Keys.Pack((ITuple) STuple.Create("hello")).ToString(), Is.EqualTo("PREFIX<02>hello<00>")); + Assert.That(location.Keys.Pack((ITuple) STuple.Create("hello", 123)).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{")); + Assert.That(location.Keys.Pack((ITuple) STuple.Create("hello", 123, "world")).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00>")); + Assert.That(location.Keys.Pack((ITuple) STuple.Create("hello", 123, "world", 456)).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00><16><01>")); + Assert.That(location.Keys.Pack((ITuple) STuple.Create("hello", 123, "world", 456, "!")).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00><16><01><02>!<00>")); + Assert.That(location.Keys.Pack((ITuple) STuple.Create("hello", 123, "world", 456, "!", 789)).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00><16><01><02>!<00><16><03><15>")); + + // Pack(ValueTuple) + Assert.That(location.Keys.Pack(ValueTuple.Create("hello")).ToString(), Is.EqualTo("PREFIX<02>hello<00>")); + Assert.That(location.Keys.Pack(("hello", 123)).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{")); + Assert.That(location.Keys.Pack(("hello", 123, "world")).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00>")); + Assert.That(location.Keys.Pack(("hello", 123, "world", 456)).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00><16><01>")); + Assert.That(location.Keys.Pack(("hello", 123, "world", 456, "!")).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00><16><01><02>!<00>")); + Assert.That(location.Keys.Pack(("hello", 123, "world", 456, "!", 789)).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00><16><01><02>!<00><16><03><15>")); + + // ITuple Unpack(Slice) + Assert.That(location.Keys.Unpack(Slice.Unescape("PREFIX<02>hello<00>")), Is.EqualTo(STuple.Create("hello"))); + Assert.That(location.Keys.Unpack(Slice.Unescape("PREFIX<02>hello<00><15>{")), Is.EqualTo(STuple.Create("hello", 123))); + Assert.That(location.Keys.Unpack(Slice.Unescape("PREFIX<02>hello<00><15>{<02>world<00>")), Is.EqualTo(STuple.Create("hello", 123, "world"))); + Assert.That(location.Keys.Unpack(Slice.Unescape("PREFIX<02>hello<00><15>{<02>world<00><16><01>")), Is.EqualTo(STuple.Create("hello", 123, "world", 456))); + Assert.That(location.Keys.Unpack(Slice.Unescape("PREFIX<02>hello<00><15>{<02>world<00><16><01><02>!<00>")), Is.EqualTo(STuple.Create("hello", 123, "world", 456, "!"))); + Assert.That(location.Keys.Unpack(Slice.Unescape("PREFIX<02>hello<00><15>{<02>world<00><16><01><02>!<00><16><03><15>")), Is.EqualTo(STuple.Create("hello", 123, "world", 456, "!", 789))); + + // STuple Decode(Slice) + Assert.That(location.Keys.Decode(Slice.Unescape("PREFIX<02>hello<00>")), Is.EqualTo("hello")); + Assert.That(location.Keys.Decode(Slice.Unescape("PREFIX<02>hello<00><15>{")), Is.EqualTo(("hello", 123))); + Assert.That(location.Keys.Decode(Slice.Unescape("PREFIX<02>hello<00><15>{<02>world<00>")), Is.EqualTo(("hello", 123, "world"))); + Assert.That(location.Keys.Decode(Slice.Unescape("PREFIX<02>hello<00><15>{<02>world<00><16><01>")), Is.EqualTo(("hello", 123, "world", 456))); + Assert.That(location.Keys.Decode(Slice.Unescape("PREFIX<02>hello<00><15>{<02>world<00><16><01><02>!<00>")), Is.EqualTo(("hello", 123, "world", 456, "!"))); + Assert.That(location.Keys.Decode(Slice.Unescape("PREFIX<02>hello<00><15>{<02>world<00><16><01><02>!<00><16><03><15>")), Is.EqualTo(("hello", 123, "world", 456, "!", 789))); + + // DecodeFirst/DecodeLast + Assert.That(location.Keys.DecodeFirst(Slice.Unescape("PREFIX<02>hello<00><15>{<02>world<00><16><01><02>!<00><16><03><15>")), Is.EqualTo("hello")); + Assert.That(location.Keys.DecodeLast(Slice.Unescape("PREFIX<02>hello<00><15>{<02>world<00><16><01><02>!<00><16><03><15>")), Is.EqualTo(789)); - // create a child subspace using a tuple - var child = parent.Partition.ByKey("hca"); - Assert.That(child, Is.Not.Null); - Assert.That(child.Key.ToString(), Is.EqualTo("<02>hca<00>")); + } - // create a tuple from this child subspace - var tuple = child.Keys.Append(123); - Assert.That(tuple, Is.Not.Null); - Assert.That(tuple.ToSlice().ToString(), Is.EqualTo("<02>hca<00><15>{")); + [Test] + public void Test_TypedKeySpace_T1() + { + var location = KeySubspace.CreateTyped(Slice.FromString("PREFIX")); + Assert.That(location.KeyEncoder, Is.Not.Null, "Should have a Key Encoder"); + Assert.That(location.KeyEncoder.Encoding, Is.SameAs(TuPack.Encoding), "Encoder should use Tuple type system"); - // derive another tuple from this one - var t1 = tuple.Append(false); - Assert.That(t1.ToSlice().ToString(), Is.EqualTo("<02>hca<00><15>{<14>")); + // shortcuts + Assert.That(location[Slice.FromString("SUFFIX")].ToString(), Is.EqualTo("PREFIXSUFFIX")); + Assert.That(location.Keys["hello"].ToString(), Is.EqualTo("PREFIX<02>hello<00>")); + Assert.That(location.Keys[ValueTuple.Create("hello")].ToString(), Is.EqualTo("PREFIX<02>hello<00>")); - // check that we could also create the same tuple starting from the parent subspace - var t2 = parent.Keys.Append("hca", 123, false); - Assert.That(t2.ToSlice(), Is.EqualTo(t1.ToSlice())); + // Encode(...) + Assert.That(location.Keys.Encode("hello").ToString(), Is.EqualTo("PREFIX<02>hello<00>")); - // cornercase - Assert.That(child.Partition[FdbTuple.Empty].Key, Is.EqualTo(child.Key)); + // Pack(ITuple) + Assert.That(location.Keys.Pack((ITuple) STuple.Create("hello")).ToString(), Is.EqualTo("PREFIX<02>hello<00>")); + + // Pack(ValueTuple) + Assert.That(location.Keys.Pack(ValueTuple.Create("hello")).ToString(), Is.EqualTo("PREFIX<02>hello<00>")); + + // STuple Decode(...) + Assert.That(location.Keys.Decode(Slice.Unescape("PREFIX<02>hello<00>")), Is.EqualTo("hello")); + + // Decode(..., out T) + location.Keys.Decode(Slice.Unescape("PREFIX<02>hello<00>"), out string x); + Assert.That(x, Is.EqualTo("hello")); + } + + [Test] + public void Test_TypedKeySpace_T2() + { + var location = KeySubspace.CreateTyped(Slice.FromString("PREFIX")); + + // shortcuts + Assert.That(location[Slice.FromString("SUFFIX")].ToString(), Is.EqualTo("PREFIXSUFFIX")); + Assert.That(location.Keys["hello", 123].ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{")); + Assert.That(location.Keys[("hello", 123)].ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{")); + + // Encode(...) + Assert.That(location.Keys.Encode("hello", 123).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{")); + + // Pack(ITuple) + Assert.That(location.Keys.Pack((ITuple) STuple.Create("hello", 123)).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{")); + + // Pack(ValueTuple) + Assert.That(location.Keys.Pack(("hello", 123)).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{")); + + // STuple Decode(...) + Assert.That(location.Keys.Decode(Slice.Unescape("PREFIX<02>hello<00><15>{")), Is.EqualTo(("hello", 123))); + + // Decode(..., out T) + location.Keys.Decode(Slice.Unescape("PREFIX<02>hello<00><15>{"), out string x1, out int x2); + Assert.That(x1, Is.EqualTo("hello")); + Assert.That(x2, Is.EqualTo(123)); + } + + [Test] + public void Test_TypedKeySpace_T3() + { + var location = KeySubspace.CreateTyped(Slice.FromString("PREFIX")); + + // shortcuts + Assert.That(location[Slice.FromString("SUFFIX")].ToString(), Is.EqualTo("PREFIXSUFFIX")); + Assert.That(location.Keys["hello", 123, "world"].ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00>")); + Assert.That(location.Keys[("hello", 123, "world")].ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00>")); + + // Encode(...) + Assert.That(location.Keys.Encode("hello", 123, "world").ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00>")); + + // Pack(ITuple) + Assert.That(location.Keys.Pack((ITuple) STuple.Create("hello", 123, "world")).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00>")); + + // Pack(ValueTuple) + Assert.That(location.Keys.Pack(("hello", 123, "world")).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00>")); + + // STuple Decode(...) + Assert.That(location.Keys.Decode(Slice.Unescape("PREFIX<02>hello<00><15>{<02>world<00>")), Is.EqualTo(("hello", 123, "world"))); + + // Decode(..., out T) + location.Keys.Decode(Slice.Unescape("PREFIX<02>hello<00><15>{<02>world<00>"), out string x1, out int x2, out string x3); + Assert.That(x1, Is.EqualTo("hello")); + Assert.That(x2, Is.EqualTo(123)); + Assert.That(x3, Is.EqualTo("world")); + } + + [Test] + public void Test_TypedKeySpace_T4() + { + var location = KeySubspace.CreateTyped(Slice.FromString("PREFIX")); + + // shortcuts + Assert.That(location[Slice.FromString("SUFFIX")].ToString(), Is.EqualTo("PREFIXSUFFIX")); + Assert.That(location.Keys["hello", 123, "world", 456].ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00><16><01>")); + Assert.That(location.Keys[("hello", 123, "world", 456)].ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00><16><01>")); + + // Encode(...) + Assert.That(location.Keys.Encode("hello", 123, "world", 456).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00><16><01>")); + + // Pack(ITuple) + Assert.That(location.Keys.Pack((ITuple) STuple.Create("hello", 123, "world", 456)).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00><16><01>")); + + // Pack(ValueTuple) + Assert.That(location.Keys.Pack(("hello", 123, "world", 456)).ToString(), Is.EqualTo("PREFIX<02>hello<00><15>{<02>world<00><16><01>")); + + // STuple Decode(...) + Assert.That(location.Keys.Decode(Slice.Unescape("PREFIX<02>hello<00><15>{<02>world<00><16><01>")), Is.EqualTo(("hello", 123, "world", 456))); + // Decode(..., out T) + location.Keys.Decode(Slice.Unescape("PREFIX<02>hello<00><15>{<02>world<00><16><01>"), out string x1, out int x2, out string x3, out int x4); + Assert.That(x1, Is.EqualTo("hello")); + Assert.That(x2, Is.EqualTo(123)); + Assert.That(x3, Is.EqualTo("world")); + Assert.That(x4, Is.EqualTo(456)); } } diff --git a/FoundationDB.Tests/TestHelpers.cs b/FoundationDB.Tests/TestHelpers.cs index fa943156e..72bec8f9d 100644 --- a/FoundationDB.Tests/TestHelpers.cs +++ b/FoundationDB.Tests/TestHelpers.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -36,6 +36,7 @@ namespace FoundationDB.Client.Tests using System; using System.Threading; using System.Threading.Tasks; + using Doxense.Collections.Tuples; internal static class TestHelpers { @@ -43,7 +44,7 @@ internal static class TestHelpers public static readonly string TestClusterFile = null; public static readonly string TestDbName = "DB"; - public static readonly Slice TestGlobalPrefix = Slice.FromAscii("T"); + public static readonly Slice TestGlobalPrefix = Slice.FromStringAscii("T"); public static readonly string[] TestPartition = new string[] { "Tests", Environment.MachineName }; public static readonly int DefaultTimeout = 15 * 1000; @@ -52,7 +53,7 @@ internal static class TestHelpers /// Connect to the local test database public static Task OpenTestDatabaseAsync(CancellationToken ct) { - var subspace = new FdbSubspace(TestGlobalPrefix.Memoize()); + var subspace = new KeySubspace(TestGlobalPrefix.Memoize()); return Fdb.OpenAsync(TestClusterFile, TestDbName, subspace, false, ct); } @@ -78,14 +79,14 @@ public static async Task GetCleanDirectory([NotNull] IFdbD // create new var subspace = await db.Directory.CreateAsync(path, ct); Assert.That(subspace, Is.Not.Null); - Assert.That(db.GlobalSpace.Contains(subspace.Key), Is.True); + Assert.That(db.GlobalSpace.Contains(subspace.GetPrefix()), Is.True); return subspace; } - public static async Task DumpSubspace([NotNull] IFdbDatabase db, [NotNull] IFdbSubspace subspace, CancellationToken ct) + public static async Task DumpSubspace([NotNull] IFdbDatabase db, [NotNull] IKeySubspace subspace, CancellationToken ct) { Assert.That(db, Is.Not.Null); - Assert.That(db.GlobalSpace.Contains(subspace.ToFoundationDbKey()), Is.True, "Using a location outside of the test database partition!!! This is probably a bug in the test..."); + Assert.That(db.GlobalSpace.Contains(subspace.GetPrefix()), Is.True, "Using a location outside of the test database partition!!! This is probably a bug in the test..."); // do not log db = db.WithoutLogging(); @@ -96,14 +97,14 @@ public static async Task DumpSubspace([NotNull] IFdbDatabase db, [NotNull] IFdbS } } - public static async Task DumpSubspace([NotNull] IFdbReadOnlyTransaction tr, [NotNull] IFdbSubspace subspace) + public static async Task DumpSubspace([NotNull] IFdbReadOnlyTransaction tr, [NotNull] IKeySubspace subspace) { Assert.That(tr, Is.Not.Null); - Console.WriteLine("Dumping content of subspace " + subspace.ToString() + " :"); + FdbTest.Log("Dumping content of subspace " + subspace.ToString() + " :"); int count = 0; await tr - .GetRange(FdbKeyRange.StartsWith(subspace.ToFoundationDbKey())) + .GetRange(KeyRange.StartsWith(subspace.GetPrefix())) .ForEachAsync((kvp) => { var key = subspace.ExtractKey(kvp.Key, boundCheck: true); @@ -112,7 +113,7 @@ await tr try { // attemps decoding it as a tuple - keyDump = key.ToTuple().ToString(); + keyDump = TuPack.Unpack(key).ToString(); } catch (Exception) { @@ -120,13 +121,13 @@ await tr keyDump = "'" + key.ToString() + "'"; } - Console.WriteLine("- " + keyDump + " = " + kvp.Value.ToString()); + FdbTest.Log("- " + keyDump + " = " + kvp.Value.ToString()); }); if (count == 0) - Console.WriteLine("> empty !"); + FdbTest.Log("> empty !"); else - Console.WriteLine("> Found " + count + " values"); + FdbTest.Log("> Found " + count + " values"); } public static async Task AssertThrowsFdbErrorAsync([NotNull] Func asyncTest, FdbError expectedCode, string message = null, object[] args = null) diff --git a/FoundationDB.Tests/TransactionFacts.cs b/FoundationDB.Tests/TransactionFacts.cs index de4d4e3cb..2aeb40b7d 100644 --- a/FoundationDB.Tests/TransactionFacts.cs +++ b/FoundationDB.Tests/TransactionFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,7 +28,6 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Client.Tests { - using FoundationDB.Layers.Tuples; using NUnit.Framework; using System; using System.Collections.Generic; @@ -43,6 +42,7 @@ public class TransactionFacts : FdbTest { [Test] + public async Task Test_Can_Create_And_Dispose_Transactions() { using (var db = await OpenTestDatabaseAsync()) @@ -56,7 +56,7 @@ public async Task Test_Can_Create_And_Dispose_Transactions() Assert.That(tr.StillAlive, Is.True, "Transaction should be alive"); Assert.That(tr.Handler.IsClosed, Is.False, "Transaction handle should not be closed"); Assert.That(tr.Database, Is.SameAs(db), "Transaction should reference the parent Database"); - Assert.That(tr.Size, Is.EqualTo(0), "Estimated size should be zero"); + Assert.That(tr.Size, Is.Zero, "Estimated size should be zero"); Assert.That(tr.IsReadOnly, Is.False, "Transaction is not read-only"); Assert.That(tr.IsSnapshot, Is.False, "Transaction is not in snapshot mode by default"); @@ -153,8 +153,8 @@ public async Task Test_Creating_Concurrent_Transactions_Are_Independent() } finally { - if (tr1 != null) tr1.Dispose(); - if (tr2 != null) tr2.Dispose(); + tr1?.Dispose(); + tr2?.Dispose(); } } @@ -259,7 +259,7 @@ public async Task Test_Cancelling_Transaction_During_Commit_Should_Abort_Task() // Writes about 5 MB of stuff in 100k chunks for (int i = 0; i < 50; i++) { - tr.Set(location.Keys.Encode(i), Slice.Random(rnd, 100 * 1000)); + tr.Set(location.Keys.Encode(i), Slice.Random(rnd, 100_000)); } // start commiting @@ -270,11 +270,7 @@ public async Task Test_Cancelling_Transaction_During_Commit_Should_Abort_Task() Assume.That(t.IsCompleted, Is.False, "Commit task already completed before having a chance to cancel"); tr.Cancel(); - await TestHelpers.AssertThrowsFdbErrorAsync( - () => t, - FdbError.TransactionCancelled, - "Cancelling a transaction that is writing to the server should fail the commit task" - ); + Assert.That(async () => await t, Throws.InstanceOf(), "Cancelling a transaction that is writing to the server should fail the commit task"); } } } @@ -312,7 +308,7 @@ public async Task Test_Cancelling_Token_During_Commit_Should_Abort_Task() Assume.That(t.IsCompleted, Is.False, "Commit task already completed before having a chance to cancel"); cts.Cancel(); - Assert.Throws(async () => await t, "Cancelling a token passed to CommitAsync that is still pending should cancel the task"); + Assert.That(async () => await t, Throws.InstanceOf(), "Cancelling a token passed to CommitAsync that is still pending should cancel the task"); } } } @@ -352,7 +348,7 @@ public async Task Test_Write_And_Read_Simple_Keys() { tr.Set(location.Keys.Encode("hello"), Slice.FromString("World!")); tr.Set(location.Keys.Encode("timestamp"), Slice.FromInt64(ticks)); - tr.Set(location.Keys.Encode("blob"), Slice.Create(new byte[] { 42, 123, 7 })); + tr.Set(location.Keys.Encode("blob"), new byte[] { 42, 123, 7 }.AsSlice()); await tr.CommitAsync(); @@ -393,8 +389,8 @@ public async Task Test_Can_Resolve_Key_Selector() var location = db.Partition.ByKey("keys"); await db.ClearRangeAsync(location, this.Cancellation); - var minKey = location.Key + FdbKey.MinValue; - var maxKey = location.Key + FdbKey.MaxValue; + var minKey = location.GetPrefix() + FdbKey.MinValue; + var maxKey = location.GetPrefix() + FdbKey.MaxValue; #region Insert a bunch of keys ... using (var tr = db.BeginTransaction(this.Cancellation)) @@ -415,44 +411,44 @@ public async Task Test_Can_Resolve_Key_Selector() using (var tr = db.BeginTransaction(this.Cancellation)) { - FdbKeySelector sel; + KeySelector sel; // >= 0 - sel = FdbKeySelector.FirstGreaterOrEqual(location.Keys.Encode(0)); + sel = KeySelector.FirstGreaterOrEqual(location.Keys.Encode(0)); Assert.That(await tr.GetKeyAsync(sel), Is.EqualTo(location.Keys.Encode(0)), "fGE(0) should return 0"); Assert.That(await tr.GetKeyAsync(sel - 1), Is.EqualTo(minKey), "fGE(0)-1 should return minKey"); Assert.That(await tr.GetKeyAsync(sel + 1), Is.EqualTo(location.Keys.Encode(1)), "fGE(0)+1 should return 1"); // > 0 - sel = FdbKeySelector.FirstGreaterThan(location.Keys.Encode(0)); + sel = KeySelector.FirstGreaterThan(location.Keys.Encode(0)); Assert.That(await tr.GetKeyAsync(sel), Is.EqualTo(location.Keys.Encode(1)), "fGT(0) should return 1"); Assert.That(await tr.GetKeyAsync(sel - 1), Is.EqualTo(location.Keys.Encode(0)), "fGT(0)-1 should return 0"); Assert.That(await tr.GetKeyAsync(sel + 1), Is.EqualTo(location.Keys.Encode(2)), "fGT(0)+1 should return 2"); // <= 10 - sel = FdbKeySelector.LastLessOrEqual(location.Keys.Encode(10)); + sel = KeySelector.LastLessOrEqual(location.Keys.Encode(10)); Assert.That(await tr.GetKeyAsync(sel), Is.EqualTo(location.Keys.Encode(10)), "lLE(10) should return 10"); Assert.That(await tr.GetKeyAsync(sel - 1), Is.EqualTo(location.Keys.Encode(9)), "lLE(10)-1 should return 9"); Assert.That(await tr.GetKeyAsync(sel + 1), Is.EqualTo(location.Keys.Encode(11)), "lLE(10)+1 should return 11"); // < 10 - sel = FdbKeySelector.LastLessThan(location.Keys.Encode(10)); + sel = KeySelector.LastLessThan(location.Keys.Encode(10)); Assert.That(await tr.GetKeyAsync(sel), Is.EqualTo(location.Keys.Encode(9)), "lLT(10) should return 9"); Assert.That(await tr.GetKeyAsync(sel - 1), Is.EqualTo(location.Keys.Encode(8)), "lLT(10)-1 should return 8"); Assert.That(await tr.GetKeyAsync(sel + 1), Is.EqualTo(location.Keys.Encode(10)), "lLT(10)+1 should return 10"); // < 0 - sel = FdbKeySelector.LastLessThan(location.Keys.Encode(0)); + sel = KeySelector.LastLessThan(location.Keys.Encode(0)); Assert.That(await tr.GetKeyAsync(sel), Is.EqualTo(minKey), "lLT(0) should return minKey"); Assert.That(await tr.GetKeyAsync(sel + 1), Is.EqualTo(location.Keys.Encode(0)), "lLT(0)+1 should return 0"); // >= 20 - sel = FdbKeySelector.FirstGreaterOrEqual(location.Keys.Encode(20)); + sel = KeySelector.FirstGreaterOrEqual(location.Keys.Encode(20)); Assert.That(await tr.GetKeyAsync(sel), Is.EqualTo(maxKey), "fGE(20) should return maxKey"); Assert.That(await tr.GetKeyAsync(sel - 1), Is.EqualTo(location.Keys.Encode(19)), "fGE(20)-1 should return 19"); // > 19 - sel = FdbKeySelector.FirstGreaterThan(location.Keys.Encode(19)); + sel = KeySelector.FirstGreaterThan(location.Keys.Encode(19)); Assert.That(await tr.GetKeyAsync(sel), Is.EqualTo(maxKey), "fGT(19) should return maxKey"); Assert.That(await tr.GetKeyAsync(sel - 1), Is.EqualTo(location.Keys.Encode(19)), "fGT(19)-1 should return 19"); } @@ -482,50 +478,50 @@ public async Task Test_Can_Resolve_Key_Selector_Outside_Boundaries() Slice key; // note: we can't have any prefix on the keys, so open the test database in read-only mode - using (var db = await Fdb.OpenAsync(TestHelpers.TestClusterFile, TestHelpers.TestDbName, FdbSubspace.Empty, readOnly: true, cancellationToken: this.Cancellation)) + using (var db = await Fdb.OpenAsync(TestHelpers.TestClusterFile, TestHelpers.TestDbName, KeySubspace.Empty, readOnly: true, ct: this.Cancellation)) { using (var tr = db.BeginReadOnlyTransaction(this.Cancellation)) { // before <00> - key = await tr.GetKeyAsync(FdbKeySelector.LastLessThan(FdbKey.MinValue)); + key = await tr.GetKeyAsync(KeySelector.LastLessThan(FdbKey.MinValue)); Assert.That(key, Is.EqualTo(Slice.Empty), "lLT(<00>) => ''"); // before the first key in the db - var minKey = await tr.GetKeyAsync(FdbKeySelector.FirstGreaterOrEqual(FdbKey.MinValue)); + var minKey = await tr.GetKeyAsync(KeySelector.FirstGreaterOrEqual(FdbKey.MinValue)); Assert.That(minKey, Is.Not.Null); - key = await tr.GetKeyAsync(FdbKeySelector.LastLessThan(minKey)); + key = await tr.GetKeyAsync(KeySelector.LastLessThan(minKey)); Assert.That(key, Is.EqualTo(Slice.Empty), "lLT(min_key) => ''"); // after the last key in the db - var maxKey = await tr.GetKeyAsync(FdbKeySelector.LastLessThan(FdbKey.MaxValue)); + var maxKey = await tr.GetKeyAsync(KeySelector.LastLessThan(FdbKey.MaxValue)); Assert.That(maxKey, Is.Not.Null); - key = await tr.GetKeyAsync(FdbKeySelector.FirstGreaterThan(maxKey)); + key = await tr.GetKeyAsync(KeySelector.FirstGreaterThan(maxKey)); Assert.That(key, Is.EqualTo(FdbKey.MaxValue), "fGT(maxKey) => "); // after - key = await tr.GetKeyAsync(FdbKeySelector.FirstGreaterThan(FdbKey.MaxValue)); + key = await tr.GetKeyAsync(KeySelector.FirstGreaterThan(FdbKey.MaxValue)); Assert.That(key, Is.EqualTo(FdbKey.MaxValue), "fGT() => "); - Assert.That(async () => await tr.GetKeyAsync(FdbKeySelector.FirstGreaterThan(FdbKey.MaxValue + FdbKey.MaxValue)), Throws.InstanceOf().With.Property("Code").EqualTo(FdbError.KeyOutsideLegalRange)); - Assert.That(async () => await tr.GetKeyAsync(FdbKeySelector.LastLessThan(Fdb.System.MinValue)), Throws.InstanceOf().With.Property("Code").EqualTo(FdbError.KeyOutsideLegalRange)); + Assert.That(async () => await tr.GetKeyAsync(KeySelector.FirstGreaterThan(FdbKey.MaxValue + FdbKey.MaxValue)), Throws.InstanceOf().With.Property("Code").EqualTo(FdbError.KeyOutsideLegalRange)); + Assert.That(async () => await tr.GetKeyAsync(KeySelector.LastLessThan(Fdb.System.MinValue)), Throws.InstanceOf().With.Property("Code").EqualTo(FdbError.KeyOutsideLegalRange)); tr.WithReadAccessToSystemKeys(); - var firstSystemKey = await tr.GetKeyAsync(FdbKeySelector.FirstGreaterThan(FdbKey.MaxValue)); + var firstSystemKey = await tr.GetKeyAsync(KeySelector.FirstGreaterThan(FdbKey.MaxValue)); // usually the first key in the system space is /backupDataFormat, but that may change in the future version. Assert.That(firstSystemKey, Is.Not.Null); Assert.That(firstSystemKey, Is.GreaterThan(FdbKey.MaxValue), "key should be between and "); Assert.That(firstSystemKey, Is.LessThan(Fdb.System.MaxValue), "key should be between and "); // with access to system keys, the maximum possible key becomes - key = await tr.GetKeyAsync(FdbKeySelector.FirstGreaterOrEqual(Fdb.System.MaxValue)); + key = await tr.GetKeyAsync(KeySelector.FirstGreaterOrEqual(Fdb.System.MaxValue)); Assert.That(key, Is.EqualTo(Fdb.System.MaxValue), "fGE() => (with access to system keys)"); - key = await tr.GetKeyAsync(FdbKeySelector.FirstGreaterThan(Fdb.System.MaxValue)); + key = await tr.GetKeyAsync(KeySelector.FirstGreaterThan(Fdb.System.MaxValue)); Assert.That(key, Is.EqualTo(Fdb.System.MaxValue), "fGT() => (with access to system keys)"); - key = await tr.GetKeyAsync(FdbKeySelector.LastLessThan(Fdb.System.MinValue)); + key = await tr.GetKeyAsync(KeySelector.LastLessThan(Fdb.System.MinValue)); Assert.That(key, Is.EqualTo(maxKey), "lLT(<00>) => max_key (with access to system keys)"); - key = await tr.GetKeyAsync(FdbKeySelector.FirstGreaterThan(maxKey)); + key = await tr.GetKeyAsync(KeySelector.FirstGreaterThan(maxKey)); Assert.That(key, Is.EqualTo(firstSystemKey), "fGT(max_key) => first_system_key (with access to system keys)"); } @@ -585,8 +581,8 @@ public async Task Test_Get_Multiple_Keys() var location = db.Partition.ByKey("keys"); await db.ClearRangeAsync(location, this.Cancellation); - var minKey = location.Key + FdbKey.MinValue; - var maxKey = location.Key + FdbKey.MaxValue; + var minKey = location.GetPrefix() + FdbKey.MinValue; + var maxKey = location.GetPrefix() + FdbKey.MaxValue; #region Insert a bunch of keys ... using (var tr = db.BeginTransaction(this.Cancellation)) @@ -607,7 +603,7 @@ public async Task Test_Get_Multiple_Keys() using (var tr = db.BeginTransaction(this.Cancellation)) { - var selectors = Enumerable.Range(0, N).Select((i) => FdbKeySelector.FirstGreaterOrEqual(location.Keys.Encode(i))).ToArray(); + var selectors = Enumerable.Range(0, N).Select((i) => KeySelector.FirstGreaterOrEqual(location.Keys.Encode(i))).ToArray(); // GetKeysAsync([]) var results = await tr.GetKeysAsync(selectors); @@ -619,7 +615,7 @@ public async Task Test_Get_Multiple_Keys() } // GetKeysAsync(cast to enumerable) - var results2 = await tr.GetKeysAsync((IEnumerable)selectors); + var results2 = await tr.GetKeysAsync((IEnumerable)selectors); Assert.That(results2, Is.EqualTo(results)); // GetKeysAsync(real enumerable) @@ -958,7 +954,7 @@ public async Task Test_GetRange_With_Concurrent_Change_Should_Conflict() await db.WriteAsync((tr) => { tr.ClearRange(loc); - tr.Set(loc.Keys.Encode("foo", 50), Slice.FromAscii("fifty")); + tr.Set(loc.Keys.Encode("foo", 50), Slice.FromString("fifty")); }, this.Cancellation); // we will read the first key from [0, 100), expected 50 @@ -976,7 +972,7 @@ await db.WriteAsync((tr) => // 42 < 50 > conflict !!! using (var tr2 = db.BeginTransaction(this.Cancellation)) { - tr2.Set(loc.Keys.Encode("foo", 42), Slice.FromAscii("forty-two")); + tr2.Set(loc.Keys.Encode("foo", 42), Slice.FromString("forty-two")); await tr2.CommitAsync(); } @@ -993,7 +989,7 @@ await db.WriteAsync((tr) => await db.WriteAsync((tr) => { tr.ClearRange(loc); - tr.Set(loc.Keys.Encode("foo", 50), Slice.FromAscii("fifty")); + tr.Set(loc.Keys.Encode("foo", 50), Slice.FromString("fifty")); }, this.Cancellation); using (var tr1 = db.BeginTransaction(this.Cancellation)) @@ -1007,7 +1003,7 @@ await db.WriteAsync((tr) => // 77 > 50 => no conflict using (var tr2 = db.BeginTransaction(this.Cancellation)) { - tr2.Set(loc.Keys.Encode("foo", 77), Slice.FromAscii("docm")); + tr2.Set(loc.Keys.Encode("foo", 77), Slice.FromString("docm")); await tr2.CommitAsync(); } @@ -1032,7 +1028,7 @@ public async Task Test_GetKey_With_Concurrent_Change_Should_Conflict() await db.WriteAsync((tr) => { tr.ClearRange(loc); - tr.Set(loc.Keys.Encode("foo", 50), Slice.FromAscii("fifty")); + tr.Set(loc.Keys.Encode("foo", 50), Slice.FromString("fifty")); }, this.Cancellation); // we will ask for the first key from >= 0, expecting 50, but if another transaction inserts something BEFORE 50, our key selector would have returned a different result, causing a conflict @@ -1040,13 +1036,13 @@ await db.WriteAsync((tr) => using (var tr1 = db.BeginTransaction(this.Cancellation)) { // fGE{0} => 50 - var key = await tr1.GetKeyAsync(FdbKeySelector.FirstGreaterOrEqual(loc.Keys.Encode("foo", 0))); + var key = await tr1.GetKeyAsync(KeySelector.FirstGreaterOrEqual(loc.Keys.Encode("foo", 0))); Assert.That(key, Is.EqualTo(loc.Keys.Encode("foo", 50))); // 42 < 50 => conflict !!! using (var tr2 = db.BeginTransaction(this.Cancellation)) { - tr2.Set(loc.Keys.Encode("foo", 42), Slice.FromAscii("forty-two")); + tr2.Set(loc.Keys.Encode("foo", 42), Slice.FromString("forty-two")); await tr2.CommitAsync(); } @@ -1061,19 +1057,19 @@ await db.WriteAsync((tr) => await db.WriteAsync((tr) => { tr.ClearRange(loc); - tr.Set(loc.Keys.Encode("foo", 50), Slice.FromAscii("fifty")); + tr.Set(loc.Keys.Encode("foo", 50), Slice.FromString("fifty")); }, this.Cancellation); using (var tr1 = db.BeginTransaction(this.Cancellation)) { // fGE{0} => 50 - var key = await tr1.GetKeyAsync(FdbKeySelector.FirstGreaterOrEqual(loc.Keys.Encode("foo", 0))); + var key = await tr1.GetKeyAsync(KeySelector.FirstGreaterOrEqual(loc.Keys.Encode("foo", 0))); Assert.That(key, Is.EqualTo(loc.Keys.Encode("foo", 50))); // 77 > 50 => no conflict using (var tr2 = db.BeginTransaction(this.Cancellation)) { - tr2.Set(loc.Keys.Encode("foo", 77), Slice.FromAscii("docm")); + tr2.Set(loc.Keys.Encode("foo", 77), Slice.FromString("docm")); await tr2.CommitAsync(); } @@ -1089,20 +1085,20 @@ await db.WriteAsync((tr) => await db.WriteAsync((tr) => { tr.ClearRange(loc); - tr.Set(loc.Keys.Encode("foo", 50), Slice.FromAscii("fifty")); - tr.Set(loc.Keys.Encode("foo", 100), Slice.FromAscii("one hundred")); + tr.Set(loc.Keys.Encode("foo", 50), Slice.FromString("fifty")); + tr.Set(loc.Keys.Encode("foo", 100), Slice.FromString("one hundred")); }, this.Cancellation); using (var tr1 = db.BeginTransaction(this.Cancellation)) { // fGE{50} + 1 => 100 - var key = await tr1.GetKeyAsync(FdbKeySelector.FirstGreaterOrEqual(loc.Keys.Encode("foo", 50)) + 1); + var key = await tr1.GetKeyAsync(KeySelector.FirstGreaterOrEqual(loc.Keys.Encode("foo", 50)) + 1); Assert.That(key, Is.EqualTo(loc.Keys.Encode("foo", 100))); // 77 between 50 and 100 => conflict !!! using (var tr2 = db.BeginTransaction(this.Cancellation)) { - tr2.Set(loc.Keys.Encode("foo", 77), Slice.FromAscii("docm")); + tr2.Set(loc.Keys.Encode("foo", 77), Slice.FromString("docm")); await tr2.CommitAsync(); } @@ -1118,20 +1114,20 @@ await db.WriteAsync((tr) => await db.WriteAsync((tr) => { tr.ClearRange(loc); - tr.Set(loc.Keys.Encode("foo", 50), Slice.FromAscii("fifty")); - tr.Set(loc.Keys.Encode("foo", 100), Slice.FromAscii("one hundred")); + tr.Set(loc.Keys.Encode("foo", 50), Slice.FromString("fifty")); + tr.Set(loc.Keys.Encode("foo", 100), Slice.FromString("one hundred")); }, this.Cancellation); using (var tr1 = db.BeginTransaction(this.Cancellation)) { // fGT{50} => 100 - var key = await tr1.GetKeyAsync(FdbKeySelector.FirstGreaterThan(loc.Keys.Encode("foo", 50))); + var key = await tr1.GetKeyAsync(KeySelector.FirstGreaterThan(loc.Keys.Encode("foo", 50))); Assert.That(key, Is.EqualTo(loc.Keys.Encode("foo", 100))); // another transaction changes the VALUE of 50 and 100 (but does not change the fact that they exist nor add keys in between) using (var tr2 = db.BeginTransaction(this.Cancellation)) { - tr2.Set(loc.Keys.Encode("foo", 100), Slice.FromAscii("cent")); + tr2.Set(loc.Keys.Encode("foo", 100), Slice.FromString("cent")); await tr2.CommitAsync(); } @@ -1147,14 +1143,14 @@ await db.WriteAsync((tr) => await db.WriteAsync((tr) => { tr.ClearRange(loc); - tr.Set(loc.Keys.Encode("foo", 50), Slice.FromAscii("fifty")); - tr.Set(loc.Keys.Encode("foo", 100), Slice.FromAscii("one hundred")); + tr.Set(loc.Keys.Encode("foo", 50), Slice.FromString("fifty")); + tr.Set(loc.Keys.Encode("foo", 100), Slice.FromString("one hundred")); }, this.Cancellation); using (var tr1 = db.BeginTransaction(this.Cancellation)) { // lLT{100} => 50 - var key = await tr1.GetKeyAsync(FdbKeySelector.LastLessThan(loc.Keys.Encode("foo", 100))); + var key = await tr1.GetKeyAsync(KeySelector.LastLessThan(loc.Keys.Encode("foo", 100))); Assert.That(key, Is.EqualTo(loc.Keys.Encode("foo", 50))); // another transaction changes the VALUE of 50 and 100 (but does not change the fact that they exist nor add keys in between) @@ -1252,17 +1248,17 @@ public async Task Test_Read_Isolation_From_Writes() { // By default: // - Regular reads see the writes made by the transaction itself, but not the writes made by other transactions that committed in between - // - Snapshot reads never see the writes made since the transaction read version, including the writes made by the transaction itself + // - Snapshot reads never see the writes made since the transaction read version, but will see the writes made by the transaction itself using (var db = await OpenTestPartitionAsync()) { var location = db.Partition.ByKey("test"); await db.ClearRangeAsync(location, this.Cancellation); - var a = location.Keys.Encode("A"); - var b = location.Keys.Encode("B"); - var c = location.Keys.Encode("C"); - var d = location.Keys.Encode("D"); + var A = location.Keys.Encode("A"); + var B = location.Keys.Encode("B"); + var C = location.Keys.Encode("C"); + var D = location.Keys.Encode("D"); // Reads (before and after): // - A and B will use regular reads @@ -1273,39 +1269,105 @@ public async Task Test_Read_Isolation_From_Writes() await db.WriteAsync((tr) => { - tr.Set(a, Slice.FromString("a")); - tr.Set(b, Slice.FromString("b")); - tr.Set(c, Slice.FromString("c")); - tr.Set(d, Slice.FromString("d")); + tr.Set(A, Slice.FromString("a")); + tr.Set(B, Slice.FromString("b")); + tr.Set(C, Slice.FromString("c")); + tr.Set(D, Slice.FromString("d")); }, this.Cancellation); + Log("Initial db state:"); + await DumpSubspace(db, location); + using (var tr = db.BeginTransaction(this.Cancellation)) { - var aval = await tr.GetAsync(a); - var bval = await tr.GetAsync(b); - var cval = await tr.Snapshot.GetAsync(c); - var dval = await tr.Snapshot.GetAsync(d); - Assert.That(aval.ToUnicode(), Is.EqualTo("a")); - Assert.That(bval.ToUnicode(), Is.EqualTo("b")); - Assert.That(cval.ToUnicode(), Is.EqualTo("c")); - Assert.That(dval.ToUnicode(), Is.EqualTo("d")); - tr.Set(a, Slice.FromString("aa")); - tr.Set(c, Slice.FromString("cc")); + // check initial state + Assert.That((await tr.GetAsync(A)).ToStringUtf8(), Is.EqualTo("a")); + Assert.That((await tr.GetAsync(B)).ToStringUtf8(), Is.EqualTo("b")); + Assert.That((await tr.Snapshot.GetAsync(C)).ToStringUtf8(), Is.EqualTo("c")); + Assert.That((await tr.Snapshot.GetAsync(D)).ToStringUtf8(), Is.EqualTo("d")); + + // mutate (not yet comitted) + tr.Set(A, Slice.FromString("aa")); + tr.Set(C, Slice.FromString("cc")); await db.WriteAsync((tr2) => - { - tr2.Set(b, Slice.FromString("bb")); - tr2.Set(d, Slice.FromString("dd")); + { // have another transaction change B and D under our nose + tr2.Set(B, Slice.FromString("bb")); + tr2.Set(D, Slice.FromString("dd")); + }, this.Cancellation); + + // check what the transaction sees + Assert.That((await tr.GetAsync(A)).ToStringUtf8(), Is.EqualTo("aa"), "The transaction own writes should change the value of regular reads"); + Assert.That((await tr.GetAsync(B)).ToStringUtf8(), Is.EqualTo("b"), "Other transaction writes should not change the value of regular reads"); + Assert.That((await tr.Snapshot.GetAsync(C)).ToStringUtf8(), Is.EqualTo("cc"), "The transaction own writes should be visible in snapshot reads"); + Assert.That((await tr.Snapshot.GetAsync(D)).ToStringUtf8(), Is.EqualTo("d"), "Other transaction writes should not change the value of snapshot reads"); + + //note: committing here would conflict + } + } + } + + [Test] + public async Task Test_Read_Isolation_From_Writes_Pre_300() + { + // By in API v200 and below: + // - Regular reads see the writes made by the transaction itself, but not the writes made by other transactions that committed in between + // - Snapshot reads never see the writes made since the transaction read version, but will see the writes made by the transaction itself + // In API 300, this can be emulated by setting the SnapshotReadYourWriteDisable options + + using (var db = await OpenTestPartitionAsync()) + { + var location = db.Partition.ByKey("test"); + await db.ClearRangeAsync(location, this.Cancellation); + + var A = location.Keys.Encode("A"); + var B = location.Keys.Encode("B"); + var C = location.Keys.Encode("C"); + var D = location.Keys.Encode("D"); + + // Reads (before and after): + // - A and B will use regular reads + // - C and D will use snapshot reads + // Writes: + // - A and C will be modified by the transaction itself + // - B and D will be modified by a different transaction + + await db.WriteAsync((tr) => + { + tr.Set(A, Slice.FromString("a")); + tr.Set(B, Slice.FromString("b")); + tr.Set(C, Slice.FromString("c")); + tr.Set(D, Slice.FromString("d")); + }, this.Cancellation); + + Log("Initial db state:"); + await DumpSubspace(db, location); + + using (var tr = db.BeginTransaction(this.Cancellation)) + { + tr.SetOption(FdbTransactionOption.SnapshotReadYourWriteDisable); + + // check initial state + Assert.That((await tr.GetAsync(A)).ToStringUtf8(), Is.EqualTo("a")); + Assert.That((await tr.GetAsync(B)).ToStringUtf8(), Is.EqualTo("b")); + Assert.That((await tr.Snapshot.GetAsync(C)).ToStringUtf8(), Is.EqualTo("c")); + Assert.That((await tr.Snapshot.GetAsync(D)).ToStringUtf8(), Is.EqualTo("d")); + + // mutate (not yet comitted) + tr.Set(A, Slice.FromString("aa")); + tr.Set(C, Slice.FromString("cc")); + await db.WriteAsync((tr2) => + { // have another transaction change B and D under our nose + tr2.Set(B, Slice.FromString("bb")); + tr2.Set(D, Slice.FromString("dd")); }, this.Cancellation); - aval = await tr.GetAsync(a); - bval = await tr.GetAsync(b); - cval = await tr.Snapshot.GetAsync(c); - dval = await tr.Snapshot.GetAsync(d); - Assert.That(aval.ToUnicode(), Is.EqualTo("aa"), "The transaction own writes should change the value of regular reads"); - Assert.That(bval.ToUnicode(), Is.EqualTo("b"), "Other transaction writes should not change the value of regular reads"); - Assert.That(cval.ToUnicode(), Is.EqualTo("c"), "The transaction own writes should not change the value of snapshot reads"); - Assert.That(dval.ToUnicode(), Is.EqualTo("d"), "Other transaction writes should not change the value of snapshot reads"); + // check what the transaction sees + Assert.That((await tr.GetAsync(A)).ToStringUtf8(), Is.EqualTo("aa"), "The transaction own writes should change the value of regular reads"); + Assert.That((await tr.GetAsync(B)).ToStringUtf8(), Is.EqualTo("b"), "Other transaction writes should not change the value of regular reads"); + //FAIL: test fails here because we read "CC" ?? + Assert.That((await tr.Snapshot.GetAsync(C)).ToStringUtf8(), Is.EqualTo("c"), "The transaction own writes should not change the value of snapshot reads"); + Assert.That((await tr.Snapshot.GetAsync(D)).ToStringUtf8(), Is.EqualTo("d"), "Other transaction writes should not change the value of snapshot reads"); //note: committing here would conflict } @@ -1442,15 +1504,15 @@ public async Task Test_Has_Access_To_System_Keys() // should fail if access to system keys has not been requested await TestHelpers.AssertThrowsFdbErrorAsync( - () => tr.GetRange(Slice.FromAscii("\xFF"), Slice.FromAscii("\xFF\xFF"), new FdbRangeOptions { Limit = 10 }).ToListAsync(), - FdbError.KeyOutsideLegalRange, + () => tr.GetRange(Slice.FromByteString("\xFF"), Slice.FromByteString("\xFF\xFF"), new FdbRangeOptions { Limit = 10 }).ToListAsync(), + FdbError.KeyOutsideLegalRange, "Should not have access to system keys by default" ); // should succeed once system access has been requested tr.WithReadAccessToSystemKeys(); - var keys = await tr.GetRange(Slice.FromAscii("\xFF"), Slice.FromAscii("\xFF\xFF"), new FdbRangeOptions { Limit = 10 }).ToListAsync(); + var keys = await tr.GetRange(Slice.FromByteString("\xFF"), Slice.FromByteString("\xFF\xFF"), new FdbRangeOptions { Limit = 10 }).ToListAsync(); Assert.That(keys, Is.Not.Null); } @@ -1464,9 +1526,9 @@ public async Task Test_Can_Set_Timeout_And_RetryLimit() { using (var tr = db.BeginTransaction(this.Cancellation)) { - Assert.That(tr.Timeout, Is.EqualTo(0), "Timeout (default)"); - Assert.That(tr.RetryLimit, Is.EqualTo(0), "RetryLimit (default)"); - Assert.That(tr.MaxRetryDelay, Is.EqualTo(0), "MaxRetryDelay (default)"); + Assert.That(tr.Timeout, Is.Zero, "Timeout (default)"); + Assert.That(tr.RetryLimit, Is.Zero, "RetryLimit (default)"); + Assert.That(tr.MaxRetryDelay, Is.Zero, "MaxRetryDelay (default)"); tr.Timeout = 1000; // 1 sec max tr.RetryLimit = 5; // 5 retries max @@ -1484,9 +1546,9 @@ public async Task Test_Timeout_And_RetryLimit_Inherits_Default_From_Database() { using (var db = await OpenTestDatabaseAsync()) { - Assert.That(db.DefaultTimeout, Is.EqualTo(0), "db.DefaultTimeout (default)"); - Assert.That(db.DefaultRetryLimit, Is.EqualTo(0), "db.DefaultRetryLimit (default)"); - Assert.That(db.DefaultMaxRetryDelay, Is.EqualTo(0), "db.DefaultMaxRetryDelay (default)"); + Assert.That(db.DefaultTimeout, Is.Zero, "db.DefaultTimeout (default)"); + Assert.That(db.DefaultRetryLimit, Is.Zero, "db.DefaultRetryLimit (default)"); + Assert.That(db.DefaultMaxRetryDelay, Is.Zero, "db.DefaultMaxRetryDelay (default)"); db.DefaultTimeout = 500; db.DefaultRetryLimit = 3; @@ -1533,8 +1595,8 @@ public async Task Test_Transaction_RetryLoop_Respects_DefaultRetryLimit_Value() using (var db = await OpenTestDatabaseAsync()) using (var go = new CancellationTokenSource()) { - Assert.That(db.DefaultTimeout, Is.EqualTo(0), "db.DefaultTimeout (default)"); - Assert.That(db.DefaultRetryLimit, Is.EqualTo(0), "db.DefaultRetryLimit (default)"); + Assert.That(db.DefaultTimeout, Is.Zero, "db.DefaultTimeout (default)"); + Assert.That(db.DefaultRetryLimit, Is.Zero, "db.DefaultRetryLimit (default)"); // By default, a transaction that gets reset or retried, clears the RetryLimit and Timeout settings, which needs to be reset everytime. // But if the DefaultRetryLimit and DefaultTimeout are set on the database instance, they should automatically be re-applied inside transaction loops! @@ -1583,19 +1645,19 @@ public async Task Test_Transaction_RetryLoop_Resets_RetryLimit_And_Timeout() // simulate a first error tr.RetryLimit = 10; await tr.OnErrorAsync(FdbError.PastVersion); - Assert.That(tr.RetryLimit, Is.EqualTo(0), "Retry limit should be reset"); + Assert.That(tr.RetryLimit, Is.Zero, "Retry limit should be reset"); // simulate some more errors await tr.OnErrorAsync(FdbError.PastVersion); await tr.OnErrorAsync(FdbError.PastVersion); await tr.OnErrorAsync(FdbError.PastVersion); await tr.OnErrorAsync(FdbError.PastVersion); - Assert.That(tr.RetryLimit, Is.EqualTo(0), "Retry limit should be reset"); + Assert.That(tr.RetryLimit, Is.Zero, "Retry limit should be reset"); // we still haven't failed 10 times.. tr.RetryLimit = 10; await tr.OnErrorAsync(FdbError.PastVersion); - Assert.That(tr.RetryLimit, Is.EqualTo(0), "Retry limit should be reset"); + Assert.That(tr.RetryLimit, Is.Zero, "Retry limit should be reset"); // we already have failed 6 times, so this one should abort tr.RetryLimit = 2; // value is too low @@ -1825,7 +1887,7 @@ public async Task Test_Can_Get_Boundary_Keys() // the datacenter id seems to be at offset 40 var dataCenterId = key.Value.Substring(40, 16).ToHexaString(); - Log("- {0} : ({1}) {2}", key.Key.ToHexaString(), key.Value.Count, key.Value.ToAsciiOrHexaString()); + Log("- {0:X} : ({1}) {2:P}", key.Key, key.Value.Count, key.Value); Log(" > node = {0}", nodeId); Log(" > machine = {0}", machineId); Log(" > datacenter = {0}", dataCenterId); @@ -1843,7 +1905,7 @@ public async Task Test_Can_Get_Boundary_Keys() string[] ids = null; foreach (var key in shards) { - // - the first 12 bytes are some sort of header: + // - the first 12 bytes are some sort of header: // - bytes 0-5 usually are 01 00 01 10 A2 00 // - bytes 6-7 contains 0x0FDB which is the product's signature // - bytes 8-9 contains the version (02 00 for "2.0"?) @@ -1859,10 +1921,10 @@ public async Task Test_Can_Get_Boundary_Keys() distinctNodes.Add(ids[i]); } replicationFactor = Math.Max(replicationFactor, ids.Length); - + // the node id seems to be at offset 12 - //Console.WriteLine("- " + key.Value.Substring(0, 12).ToAsciiOrHexaString() + " : " + String.Join(", ", ids) + " = " + key.Key); + //Log("- " + key.Value.Substring(0, 12).ToAsciiOrHexaString() + " : " + String.Join(", ", ids) + " = " + key.Key); } Log(); Log("Distinct nodes: {0}", distinctNodes.Count); @@ -1887,10 +1949,10 @@ public async Task Test_Simple_Read_Transaction() { await tr.GetReadVersionAsync(); - var a = location.ConcatKey(Slice.FromString("A")); - var b = location.ConcatKey(Slice.FromString("B")); - var c = location.ConcatKey(Slice.FromString("C")); - var z = location.ConcatKey(Slice.FromString("Z")); + var a = location[Slice.FromString("A")]; + var b = location[Slice.FromString("B")]; + var c = location[Slice.FromString("C")]; + var z = location[Slice.FromString("Z")]; //await tr.GetAsync(location.Concat(Slice.FromString("KEY"))); @@ -1901,20 +1963,20 @@ public async Task Test_Simple_Read_Transaction() tr.Set(b, Slice.FromString("BAZ")); tr.Set(c, Slice.FromString("BAT")); tr.ClearRange(a, c); - + //tr.ClearRange(location.Concat(Slice.FromString("A")), location.Concat(Slice.FromString("Z"))); //tr.Set(location.Concat(Slice.FromString("C")), Slice.Empty); //var slice = await tr.GetRange(location.Concat(Slice.FromString("A")), location.Concat(Slice.FromString("Z"))).FirstOrDefaultAsync(); - //Console.WriteLine(slice); + //Log(slice); //tr.AddReadConflictKey(location.Concat(Slice.FromString("READ_CONFLICT"))); //tr.AddWriteConflictKey(location.Concat(Slice.FromString("WRITE_CONFLICT"))); - //tr.AddReadConflictRange(new FdbKeyRange(location.Concat(Slice.FromString("D")), location.Concat(Slice.FromString("E")))); - //tr.AddReadConflictRange(new FdbKeyRange(location.Concat(Slice.FromString("C")), location.Concat(Slice.FromString("G")))); - //tr.AddReadConflictRange(new FdbKeyRange(location.Concat(Slice.FromString("B")), location.Concat(Slice.FromString("F")))); - //tr.AddReadConflictRange(new FdbKeyRange(location.Concat(Slice.FromString("A")), location.Concat(Slice.FromString("Z")))); + //tr.AddReadConflictRange(new KeyRange(location.Concat(Slice.FromString("D")), location.Concat(Slice.FromString("E")))); + //tr.AddReadConflictRange(new KeyRange(location.Concat(Slice.FromString("C")), location.Concat(Slice.FromString("G")))); + //tr.AddReadConflictRange(new KeyRange(location.Concat(Slice.FromString("B")), location.Concat(Slice.FromString("F")))); + //tr.AddReadConflictRange(new KeyRange(location.Concat(Slice.FromString("A")), location.Concat(Slice.FromString("Z")))); await tr.CommitAsync(); @@ -1923,130 +1985,326 @@ public async Task Test_Simple_Read_Transaction() } } - [Test, Category("LongRunning")] - public async Task Test_BadPractice_Future_Fuzzer() + [Test] + public async Task Test_VersionStamps_Share_The_Same_Token_Per_Transaction_Attempt() { - const int DURATION_SEC = 30; - const int R = 100; + // Veryify that we can set versionstamped keys inside a transaction using (var db = await OpenTestDatabaseAsync()) { - var location = db.Partition.ByKey("Fuzzer"); + using (var tr = db.BeginTransaction(this.Cancellation)) + { + // should return a 80-bit incomplete stamp, using a random token + var x = tr.CreateVersionStamp(); + Log($"> x : {x.ToSlice():X} => {x}"); + Assert.That(x.IsIncomplete, Is.True, "Placeholder token should be incomplete"); + Assert.That(x.HasUserVersion, Is.False); + Assert.That(x.UserVersion, Is.Zero); + Assert.That(x.TransactionVersion >> 56, Is.EqualTo(0xFF), "Highest 8 bit of Transaction Version should be set to 1"); + Assert.That(x.TransactionOrder >> 12, Is.EqualTo(0xF), "Hight 4 bits of Transaction Order should be set to 1"); + + // should return a 96-bit incomplete stamp, using a the same random token and user version 0 + var x0 = tr.CreateVersionStamp(0); + Log($"> x0 : {x0.ToSlice():X} => {x0}"); + Assert.That(x0.IsIncomplete, Is.True, "Placeholder token should be incomplete"); + Assert.That(x0.TransactionVersion, Is.EqualTo(x.TransactionVersion), "All generated stamps by one transaction should share the random token value "); + Assert.That(x0.TransactionOrder, Is.EqualTo(x.TransactionOrder), "All generated stamps by one transaction should share the random token value "); + Assert.That(x0.HasUserVersion, Is.True); + Assert.That(x0.UserVersion, Is.EqualTo(0)); + + // should return a 96-bit incomplete stamp, using a the same random token and user version 1 + var x1 = tr.CreateVersionStamp(1); + Log($"> x1 : {x1.ToSlice():X} => {x1}"); + Assert.That(x1.IsIncomplete, Is.True, "Placeholder token should be incomplete"); + Assert.That(x1.TransactionVersion, Is.EqualTo(x.TransactionVersion), "All generated stamps by one transaction should share the random token value "); + Assert.That(x1.TransactionOrder, Is.EqualTo(x.TransactionOrder), "All generated stamps by one transaction should share the random token value "); + Assert.That(x1.HasUserVersion, Is.True); + Assert.That(x1.UserVersion, Is.EqualTo(1)); + + // should return a 96-bit incomplete stamp, using a the same random token and user version 42 + var x42 = tr.CreateVersionStamp(42); + Log($"> x42: {x42.ToSlice():X} => {x42}"); + Assert.That(x42.IsIncomplete, Is.True, "Placeholder token should be incomplete"); + Assert.That(x42.TransactionVersion, Is.EqualTo(x.TransactionVersion), "All generated stamps by one transaction should share the random token value "); + Assert.That(x42.TransactionOrder, Is.EqualTo(x.TransactionOrder), "All generated stamps by one transaction should share the random token value "); + Assert.That(x42.HasUserVersion, Is.True); + Assert.That(x42.UserVersion, Is.EqualTo(42)); + + // Reset the transaction + // => stamps should use a new value + Log("Reset!"); + tr.Reset(); - var rnd = new Random(); - int seed = rnd.Next(); - Log("Using random seeed {0}", seed); - rnd = new Random(seed); + var y = tr.CreateVersionStamp(); + Log($"> y : {y.ToSlice():X} => {y}'"); + Assert.That(y, Is.Not.EqualTo(x), "VersionStamps should change when a transaction is reset"); + + Assert.That(y.IsIncomplete, Is.True, "Placeholder token should be incomplete"); + Assert.That(y.HasUserVersion, Is.False); + Assert.That(y.UserVersion, Is.Zero); + Assert.That(y.TransactionVersion >> 56, Is.EqualTo(0xFF), "Highest 8 bit of Transaction Version should be set to 1"); + Assert.That(y.TransactionOrder >> 12, Is.EqualTo(0xF), "Hight 4 bits of Transaction Order should be set to 1"); + + var y42 = tr.CreateVersionStamp(42); + Log($"> y42: {y42.ToSlice():X} => {y42}"); + Assert.That(y42.IsIncomplete, Is.True, "Placeholder token should be incomplete"); + Assert.That(y42.TransactionVersion, Is.EqualTo(y.TransactionVersion), "All generated stamps by one transaction should share the random token value "); + Assert.That(y42.TransactionOrder, Is.EqualTo(y.TransactionOrder), "All generated stamps by one transaction should share the random token value "); + Assert.That(y42.HasUserVersion, Is.True); + Assert.That(y42.UserVersion, Is.EqualTo(42)); + } + } + } - await db.WriteAsync((tr) => + [Test] + public async Task Test_VersionStamp_Operations() + { + // Veryify that we can set versionstamped keys inside a transaction + + using (var db = await OpenTestDatabaseAsync()) + { + var location = db.Partition.ByKey("versionstamps"); + + await db.ClearRangeAsync(location, this.Cancellation); + + VersionStamp vsActual; // will contain the actual version stamp used by the database + + Log("Inserting keys with version stamps:"); + using (var tr = db.BeginTransaction(this.Cancellation)) + { + + // should return a 80-bit incomplete stamp, using a random token + var vs = tr.CreateVersionStamp(); + Log($"> placeholder stamp: {vs} with token '{vs.ToSlice():X}'"); + + // a single key using the 80-bit stamp + tr.SetVersionStampedKey(location.Keys.Encode("foo", vs, 123), Slice.FromString("Hello, World!")); + + // simulate a batch of 3 keys, using 96-bits stamps + tr.SetVersionStampedKey(location.Keys.Encode("bar", tr.CreateVersionStamp(0)), Slice.FromString("Zero")); + tr.SetVersionStampedKey(location.Keys.Encode("bar", tr.CreateVersionStamp(1)), Slice.FromString("One")); + tr.SetVersionStampedKey(location.Keys.Encode("bar", tr.CreateVersionStamp(42)), Slice.FromString("FortyTwo")); + + // value that contain the stamp + var val = Slice.FromString("$$$$$$$$$$Hello World!"); // '$' will be replaced by the stamp + Log($"> {val:X}"); + tr.SetVersionStampedValue(location.Keys.Encode("baz"), val); + + // need to be request BEFORE the commit + var vsTask = tr.GetVersionStampAsync(); + + await tr.CommitAsync(); + Log(tr.GetCommittedVersion()); + + // need to be resolved AFTER the commit + vsActual = await vsTask; + Log($"> actual stamp: {vsActual} with token '{vsActual.ToSlice():X}'"); + } + + await DumpSubspace(db, location); + + Log("Checking database content:"); + using (var tr = db.BeginReadOnlyTransaction(this.Cancellation)) { - for (int i = 0; i < R; i++) { - tr.Set(location.Keys.Encode(i), Slice.FromInt32(i)); + var foo = await tr.GetRange(location.Keys.ToKeyRange("foo")).SingleAsync(); + Log("> Found 1 result under (foo,)"); + Log($"- {location.ExtractKey(foo.Key):K} = {foo.Value:V}"); + Assert.That(foo.Value.ToString(), Is.EqualTo("Hello, World!")); + + var t = location.Keys.Unpack(foo.Key); + Assert.That(t.Get(0), Is.EqualTo("foo")); + Assert.That(t.Get(2), Is.EqualTo(123)); + + var vs = t.Get(1); + Assert.That(vs.IsIncomplete, Is.False); + Assert.That(vs.HasUserVersion, Is.False); + Assert.That(vs.UserVersion, Is.Zero); + Assert.That(vs.TransactionVersion, Is.EqualTo(vsActual.TransactionVersion)); + Assert.That(vs.TransactionOrder, Is.EqualTo(vsActual.TransactionOrder)); } - }, this.Cancellation); - var start = DateTime.UtcNow; - Log("This test will run for {0} seconds", DURATION_SEC); + { + var items = await tr.GetRange(location.Keys.ToKeyRange("bar")).ToListAsync(); + Log($"> Found {items.Count} results under (bar,)"); + foreach (var item in items) + { + Log($"- {location.ExtractKey(item.Key):K} = {item.Value:V}"); + } + + Assert.That(items.Count, Is.EqualTo(3), "Should have found 3 keys under 'foo'"); + + Assert.That(items[0].Value.ToString(), Is.EqualTo("Zero")); + var vs0 = location.Keys.DecodeLast(items[0].Key); + Assert.That(vs0.IsIncomplete, Is.False); + Assert.That(vs0.HasUserVersion, Is.True); + Assert.That(vs0.UserVersion, Is.EqualTo(0)); + Assert.That(vs0.TransactionVersion, Is.EqualTo(vsActual.TransactionVersion)); + Assert.That(vs0.TransactionOrder, Is.EqualTo(vsActual.TransactionOrder)); + + Assert.That(items[1].Value.ToString(), Is.EqualTo("One")); + var vs1 = location.Keys.DecodeLast(items[1].Key); + Assert.That(vs1.IsIncomplete, Is.False); + Assert.That(vs1.HasUserVersion, Is.True); + Assert.That(vs1.UserVersion, Is.EqualTo(1)); + Assert.That(vs1.TransactionVersion, Is.EqualTo(vsActual.TransactionVersion)); + Assert.That(vs1.TransactionOrder, Is.EqualTo(vsActual.TransactionOrder)); + + Assert.That(items[2].Value.ToString(), Is.EqualTo("FortyTwo")); + var vs42 = location.Keys.DecodeLast(items[2].Key); + Assert.That(vs42.IsIncomplete, Is.False); + Assert.That(vs42.HasUserVersion, Is.True); + Assert.That(vs42.UserVersion, Is.EqualTo(42)); + Assert.That(vs42.TransactionVersion, Is.EqualTo(vsActual.TransactionVersion)); + Assert.That(vs42.TransactionOrder, Is.EqualTo(vsActual.TransactionOrder)); + } + + { + var baz = await tr.GetAsync(location.Keys.Encode("baz")); + Log($"> {baz:X}"); + // ensure that the first 10 bytes have been overwritten with the stamp + Assert.That(baz.Count, Is.GreaterThan(0), "Key should be present in the database"); + Assert.That(baz.StartsWith(vsActual.ToSlice()), Is.True, "The first 10 bytes should match the resolved stamp"); + Assert.That(baz.Substring(10), Is.EqualTo(Slice.FromString("Hello World!")), "The rest of the slice should be untouched"); + } + } + + } + } - int time = 0; + [Test, Category("LongRunning")] + public async Task Test_BadPractice_Future_Fuzzer() + { +#if DEBUG + const int DURATION_SEC = 5; +#else + const int DURATION_SEC = 20; +#endif + const int R = 100; - List m_alive = new List(); - while (DateTime.UtcNow - start < TimeSpan.FromSeconds(DURATION_SEC)) + try + { + using (var db = await OpenTestDatabaseAsync()) { - switch (rnd.Next(10)) + var location = db.Partition.ByKey("Fuzzer"); + var rnd = new Random(); + int seed = rnd.Next(); + Log("Using random seeed {0}", seed); + rnd = new Random(seed); + + await db.WriteAsync((tr) => { - case 0: + for (int i = 0; i < R; i++) + { + tr.Set(location.Keys.Encode(i), Slice.FromInt32(i)); + } + }, this.Cancellation); + + var start = DateTime.UtcNow; + Log("This test will run for {0} seconds", DURATION_SEC); + + int time = 0; + + var line = new StringBuilder(256); + + var alive = new List(100); + var lastCheck = start; + while (DateTime.UtcNow - start < TimeSpan.FromSeconds(DURATION_SEC)) + { + int x = rnd.Next(10); + + if (x == 0) { // start a new transaction - Console.Write('T'); + line.Append('T'); var tr = db.BeginTransaction(FdbTransactionMode.Default, this.Cancellation); - m_alive.Add(tr); - break; + alive.Add(tr); } - case 1: + else if (x == 1) { // drop a random transaction - if (m_alive.Count == 0) continue; - Console.Write('L'); - int p = rnd.Next(m_alive.Count); + if (alive.Count == 0) continue; + line.Append('L'); + int p = rnd.Next(alive.Count); - m_alive.RemoveAt(p); - //no dispose - break; + alive.RemoveAt(p); + //no dispose! } - case 2: + else if (x == 2) { // dispose a random transaction - if (m_alive.Count == 0) continue; - Console.Write('D'); - int p = rnd.Next(m_alive.Count); + if (alive.Count == 0) continue; + line.Append('D'); + int p = rnd.Next(alive.Count); - var tr = m_alive[p]; + var tr = alive[p]; + alive.RemoveAt(p); tr.Dispose(); - m_alive.RemoveAt(p); - break; } - case 3: - { // GC! - Console.Write('C'); + else if (x == 3) + { // get read version + line.Append('R'); var tr = db.BeginTransaction(FdbTransactionMode.ReadOnly, this.Cancellation); - m_alive.Add(tr); - await tr.GetReadVersionAsync(); - break; + alive.Add(tr); + _ = await tr.GetReadVersionAsync(); } - - case 4: - case 5: - case 6: - { // read a random value from a random transaction - Console.Write('G'); - if (m_alive.Count == 0) break; - int p = rnd.Next(m_alive.Count); - var tr = m_alive[p]; - - int x = rnd.Next(R); - try - { - var res = await tr.GetAsync(location.Keys.Encode(x)); + else + { + if (x % 2 == 0) + { // read a random value from a random transaction + if (alive.Count == 0) continue; + line.Append('G'); + int p = rnd.Next(alive.Count); + var tr = alive[p]; + + int k = rnd.Next(R); + try + { + await tr.GetAsync(location.Keys.Encode(x)); + } + catch (FdbException) + { + line.Append('!'); + alive.RemoveAt(p); + tr.Dispose(); + } } - catch (FdbException) - { - Console.Write('!'); + else + { // read a random value, but drop the task + if (alive.Count == 0) continue; + line.Append('g'); + int p = rnd.Next(alive.Count); + var tr = alive[p]; + + int k = rnd.Next(R); + var t = tr.GetAsync(location.Keys.Encode(k)).ContinueWith((_) => { var err = _.Exception; }, TaskContinuationOptions.OnlyOnFaulted); + // => t is not stored } - break; } - case 7: - case 8: - case 9: - { // read a random value, but drop the task - Console.Write('g'); - if (m_alive.Count == 0) break; - int p = rnd.Next(m_alive.Count); - var tr = m_alive[p]; - - int x = rnd.Next(R); - var t = tr.GetAsync(location.Keys.Encode(x)).ContinueWith((_) => Console.Write('!'), TaskContinuationOptions.NotOnRanToCompletion); - // => t is not stored - break; + + if ((++time) % 10 == 0 && DateTime.UtcNow - lastCheck >= TimeSpan.FromSeconds(1)) + { + Log(line.ToString()); + line.Clear(); + Log("State: {0}", alive.Count); + //Log("Performing full GC"); + //GC.Collect(2); + //GC.WaitForPendingFinalizers(); + //GC.Collect(2); + lastCheck = DateTime.UtcNow; } - } - if ((time++) % 80 == 0) - { - Console.WriteLine(); - Log("State: {0}", m_alive.Count); - Console.Write('C'); - GC.Collect(); - GC.WaitForPendingFinalizers(); - GC.Collect(); + //await Task.Delay(1); + } + GC.Collect(); + GC.WaitForPendingFinalizers(); + GC.Collect(); } - - GC.Collect(); - GC.WaitForPendingFinalizers(); - GC.Collect(); + } + finally + { + Log("Test methods completed!"); } } } - } diff --git a/FoundationDB.Tests/TransactionalFacts.cs b/FoundationDB.Tests/TransactionalFacts.cs index f3a55e4d3..397ca89bf 100644 --- a/FoundationDB.Tests/TransactionalFacts.cs +++ b/FoundationDB.Tests/TransactionalFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -82,13 +82,13 @@ public async Task Test_Transactionals_Rethrow_Regular_Exceptions() // ReadAsync should return a failed Task, and not bubble up the exception. var task = db.ReadAsync((tr) => { - Assert.That(called, Is.EqualTo(0), "ReadAsync should not retry on regular exceptions"); + Assert.That(called, Is.Zero, "ReadAsync should not retry on regular exceptions"); ++called; throw new InvalidOperationException("Boom"); }, this.Cancellation); Assert.That(task, Is.Not.Null); // the exception should be unwrapped (ie: we should not see an AggregateException, but the actual exception) - Assert.Throws(async () => await task, "ReadAsync should rethrow any regular exceptions"); + Assert.That(async () => await task, Throws.InstanceOf(), "ReadAsync should rethrow any regular exceptions"); } } @@ -141,6 +141,7 @@ public async Task Test_Transactionals_Retries_On_Transient_Errors() } [Test][Category("LongRunning")] + [Ignore("This tests a bug in an old version (v2.0.7) and takes a long time to run!")] public async Task Test_Transactionals_Retries_Do_Not_Leak_When_Reading_Too_Much() { // we have a transaction that tries to read too much data, and will always take more than 5 seconds to execute @@ -159,19 +160,19 @@ public async Task Test_Transactionals_Retries_Do_Not_Leak_When_Reading_Too_Much( // insert a good amount of test data var sw = Stopwatch.StartNew(); - Console.WriteLine("Inserting test data (this may take a few minutes)..."); + Log("Inserting test data (this may take a few minutes)..."); var rnd = new Random(); - await Fdb.Bulk.WriteAsync(db, Enumerable.Range(0, 100 * 1000).Select(i => new KeyValuePair(location.Keys.Encode(i), Slice.Random(rnd, 4096))), this.Cancellation); + await Fdb.Bulk.WriteAsync(db, Enumerable.Range(0, 100 * 1000).Select(i => (location.Keys.Encode(i), Slice.Random(rnd, 4096))), this.Cancellation); sw.Stop(); - Console.WriteLine("> done in " + sw.Elapsed); + Log("> done in " + sw.Elapsed); - using (var timer = new System.Threading.Timer((_) => { Console.WriteLine("WorkingSet: {0:N0}, Managed: {1:N0}", Environment.WorkingSet, GC.GetTotalMemory(false)); }, null, 1000, 1000)) + using (var timer = new System.Threading.Timer((_) => { Log("WorkingSet: {0:N0}, Managed: {1:N0}", Environment.WorkingSet, GC.GetTotalMemory(false)); }, null, 1000, 1000)) { try { var result = await db.ReadAsync((tr) => { - Console.WriteLine("Retry #" + tr.Context.Retries + " @ " + tr.Context.ElapsedTotal); + Log("Retry #" + tr.Context.Retries + " @ " + tr.Context.ElapsedTotal); return tr.GetRange(location.Keys.ToRange()).ToListAsync(); }, this.Cancellation); @@ -187,18 +188,18 @@ public async Task Test_Transactionals_Retries_Do_Not_Leak_When_Reading_Too_Much( } } // to help see the effect in a profiler, dispose the transaction first, wait 5 sec then do a full GC, and then wait a bit before exiting the process - Console.WriteLine("Transaction destroyed!"); + Log("Transaction destroyed!"); Thread.Sleep(5000); - Console.WriteLine("Cleaning managed memory"); + Log("Cleaning managed memory"); GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); - Console.WriteLine("Waiting..."); + Log("Waiting..."); Thread.Sleep(5000); - Console.WriteLine("byte"); + Log("byte"); } } @@ -217,7 +218,7 @@ public async Task Test_Transactionals_Should_Not_Execute_If_Already_Canceled() var t = db.ReadAsync((tr) => { called = true; - Console.WriteLine("FAILED"); + Log("FAILED"); throw new InvalidOperationException("Failed"); }, go.Token); @@ -252,7 +253,7 @@ public async Task Test_Transactionals_ReadOnly_Should_Deny_Write_Attempts() return Task.FromResult(123); }, this.Cancellation); - Assert.Throws(async () => await t, "Forcing writes on a read-only transaction should fail"); + Assert.That(async () => await t, Throws.InstanceOf(), "Forcing writes on a read-only transaction should fail"); } } diff --git a/FoundationDB.Tests/Utils/ConversionFacts.cs b/FoundationDB.Tests/Utils/ConversionFacts.cs index b964b8f8d..9d116ebfa 100644 --- a/FoundationDB.Tests/Utils/ConversionFacts.cs +++ b/FoundationDB.Tests/Utils/ConversionFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,17 +28,13 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Client.Converters.Tests { - using FoundationDB.Client; - using FoundationDB.Client.Utils; - using NUnit.Framework; using System; - using System.Collections.Generic; - using System.Globalization; - using System.Linq; - using System.Text; + using Doxense.Runtime.Converters; + using FoundationDB.Client.Tests; + using NUnit.Framework; [TestFixture] - public class ConversionFacts + public class ConversionFacts : FdbTest { [Test] diff --git a/FoundationDB.Tests/Utils/FdbConvertersFacts.cs b/FoundationDB.Tests/Utils/FdbConvertersFacts.cs deleted file mode 100644 index 9f9cd1863..000000000 --- a/FoundationDB.Tests/Utils/FdbConvertersFacts.cs +++ /dev/null @@ -1,174 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2015, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Client.Converters.Tests -{ - using NUnit.Framework; - using System; - - [TestFixture] - public class FdbConvertersFacts - { - - [Test] - public void Test_Can_Convert_Numbers_To_Bool() - { - Assert.That(FdbConverters.Convert(0), Is.False); - Assert.That(FdbConverters.Convert(0), Is.False); - Assert.That(FdbConverters.Convert(0), Is.False); - Assert.That(FdbConverters.Convert(0), Is.False); - Assert.That(FdbConverters.Convert(0), Is.False); - Assert.That(FdbConverters.Convert(0), Is.False); - Assert.That(FdbConverters.Convert(0), Is.False); - Assert.That(FdbConverters.Convert(0), Is.False); - Assert.That(FdbConverters.Convert(0.0f), Is.False); - Assert.That(FdbConverters.Convert(float.NaN), Is.False); - Assert.That(FdbConverters.Convert(0.0d), Is.False); - Assert.That(FdbConverters.Convert(double.NaN), Is.False); - - Assert.That(FdbConverters.Convert(123), Is.True); - Assert.That(FdbConverters.Convert(123), Is.True); - Assert.That(FdbConverters.Convert(123), Is.True); - Assert.That(FdbConverters.Convert(123), Is.True); - Assert.That(FdbConverters.Convert(123), Is.True); - Assert.That(FdbConverters.Convert(123), Is.True); - Assert.That(FdbConverters.Convert(123), Is.True); - Assert.That(FdbConverters.Convert(123), Is.True); - Assert.That(FdbConverters.Convert(123.0f), Is.True); - Assert.That(FdbConverters.Convert(123.0d), Is.True); - } - - [Test] - public void Test_Can_Convert_Numbers_To_Int32() - { - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123)); - Assert.That(FdbConverters.Convert(123.0f), Is.EqualTo(123)); - Assert.That(FdbConverters.Convert(123.0d), Is.EqualTo(123)); - } - - [Test] - public void Test_Can_Convert_Numbers_To_UInt32() - { - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123U)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123U)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123U)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123U)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123U)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123U)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123U)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123U)); - Assert.That(FdbConverters.Convert(123.0f), Is.EqualTo(123U)); - Assert.That(FdbConverters.Convert(123.0d), Is.EqualTo(123U)); - } - - [Test] - public void Test_Can_Convert_Numbers_To_Int64() - { - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123L)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123L)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123L)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123L)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123L)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123L)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123L)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123L)); - Assert.That(FdbConverters.Convert(123.0f), Is.EqualTo(123L)); - Assert.That(FdbConverters.Convert(123.0d), Is.EqualTo(123L)); - } - - [Test] - public void Test_Can_Convert_Numbers_To_UInt64() - { - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123UL)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123UL)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123UL)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123UL)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123UL)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123UL)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123UL)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123UL)); - Assert.That(FdbConverters.Convert(123.0f), Is.EqualTo(123UL)); - Assert.That(FdbConverters.Convert(123.0d), Is.EqualTo(123UL)); - } - - [Test] - public void Test_Can_Convert_Numbers_To_Single() - { - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123f)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123f)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123f)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123f)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123f)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123f)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123f)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123f)); - Assert.That(FdbConverters.Convert(123.0f), Is.EqualTo(123f)); - Assert.That(FdbConverters.Convert(123.0d), Is.EqualTo(123f)); - } - - [Test] - public void Test_Can_Convert_Numbers_To_Double() - { - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123d)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123d)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123d)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123d)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123d)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123d)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123d)); - Assert.That(FdbConverters.Convert(123), Is.EqualTo(123d)); - Assert.That(FdbConverters.Convert(123.0f), Is.EqualTo(123d)); - Assert.That(FdbConverters.Convert(123.0d), Is.EqualTo(123d)); - } - - [Test] - public void Test_Can_Convert_Numbers_To_String() - { - Assert.That(FdbConverters.Convert(123), Is.EqualTo("123")); - Assert.That(FdbConverters.Convert(123), Is.EqualTo("123")); - Assert.That(FdbConverters.Convert(123), Is.EqualTo("123")); - Assert.That(FdbConverters.Convert(123), Is.EqualTo("123")); - Assert.That(FdbConverters.Convert(123), Is.EqualTo("123")); - Assert.That(FdbConverters.Convert(123), Is.EqualTo("123")); - Assert.That(FdbConverters.Convert(123), Is.EqualTo("123")); - Assert.That(FdbConverters.Convert(123), Is.EqualTo("123")); - Assert.That(FdbConverters.Convert(123.0f), Is.EqualTo("123")); - Assert.That(FdbConverters.Convert(123.4f), Is.EqualTo("123.4")); - Assert.That(FdbConverters.Convert(123.0d), Is.EqualTo("123")); - Assert.That(FdbConverters.Convert(123.4d), Is.EqualTo("123.4")); - } - - } -} diff --git a/FoundationDB.Tests/Utils/SliceComparerFacts.cs b/FoundationDB.Tests/Utils/SliceComparerFacts.cs index 499fcfcd1..9e4daec6e 100644 --- a/FoundationDB.Tests/Utils/SliceComparerFacts.cs +++ b/FoundationDB.Tests/Utils/SliceComparerFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,10 +28,9 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Client.Tests { - using FoundationDB.Client; - using NUnit.Framework; using System; using System.Text; + using NUnit.Framework; [TestFixture] public class SliceComparerFacts : FdbTest @@ -42,9 +41,9 @@ public class SliceComparerFacts : FdbTest [Test] public void Test_SliceComparer_Equals() { - var cmp = SliceComparer.Default; + var cmp = Slice.Comparer.Default; Assert.That(cmp, Is.Not.Null); - Assert.That(SliceComparer.Default, Is.SameAs(cmp)); + Assert.That(Slice.Comparer.Default, Is.SameAs(cmp)); Assert.That(cmp.Equals(Slice.Nil, Slice.Nil), Is.True); Assert.That(cmp.Equals(Slice.Empty, Slice.Empty), Is.True); @@ -52,26 +51,26 @@ public void Test_SliceComparer_Equals() Assert.That(cmp.Equals(Slice.Empty, Slice.Nil), Is.False); Assert.That(cmp.Equals(Slice.FromByte(42), Slice.FromByte(42)), Is.True); - Assert.That(cmp.Equals(Slice.FromByte(42), Slice.Create(new byte[] { 42 })), Is.True); + Assert.That(cmp.Equals(Slice.FromByte(42), new byte[] { 42 }.AsSlice()), Is.True); Assert.That(cmp.Equals(Slice.FromByte(42), Slice.FromByte(77)), Is.False); - Assert.That(cmp.Equals(Slice.Create(new byte[] { 65, 66, 67 }), Slice.FromString("ABC")), Is.True); - Assert.That(cmp.Equals(Slice.Create(new byte[] { 65, 66, 67, 68 }), Slice.FromString("ABC")), Is.False); + Assert.That(cmp.Equals(new byte[] { 65, 66, 67 }.AsSlice(), Slice.FromString("ABC")), Is.True); + Assert.That(cmp.Equals(new byte[] { 65, 66, 67, 68 }.AsSlice(), Slice.FromString("ABC")), Is.False); var buf1 = Encoding.ASCII.GetBytes("ABBAABA"); var buf2 = Encoding.ASCII.GetBytes("ABBAABA"); - Assert.That(cmp.Equals(Slice.Create(buf1, 0, 2), Slice.Create(buf1, 0, 2)), Is.True); - Assert.That(cmp.Equals(Slice.Create(buf1, 0, 2), Slice.Create(buf1, 0, 3)), Is.False); - Assert.That(cmp.Equals(Slice.Create(buf1, 0, 2), Slice.Create(buf1, 4, 2)), Is.True); - Assert.That(cmp.Equals(Slice.Create(buf1, 0, 3), Slice.Create(buf1, 4, 3)), Is.False); - Assert.That(cmp.Equals(Slice.Create(buf1, 0, 2), Slice.Create(buf2, 4, 2)), Is.True); - Assert.That(cmp.Equals(Slice.Create(buf1, 0, 3), Slice.Create(buf2, 4, 3)), Is.False); + Assert.That(cmp.Equals(buf1.AsSlice(0, 2), buf1.AsSlice(0, 2)), Is.True); + Assert.That(cmp.Equals(buf1.AsSlice(0, 2), buf1.AsSlice(0, 3)), Is.False); + Assert.That(cmp.Equals(buf1.AsSlice(0, 2), buf1.AsSlice(4, 2)), Is.True); + Assert.That(cmp.Equals(buf1.AsSlice(0, 3), buf1.AsSlice(4, 3)), Is.False); + Assert.That(cmp.Equals(buf1.AsSlice(0, 2), buf2.AsSlice(4, 2)), Is.True); + Assert.That(cmp.Equals(buf1.AsSlice(0, 3), buf2.AsSlice(4, 3)), Is.False); } [Test] public void Test_SliceComparer_GetHashCode_Should_Return_Same_As_Slice() { - var cmp = SliceComparer.Default; + var cmp = Slice.Comparer.Default; Assert.That(cmp, Is.Not.Null); Assert.That(cmp.GetHashCode(Slice.Nil), Is.EqualTo(Slice.Nil.GetHashCode())); @@ -89,16 +88,16 @@ public void Test_SliceComparer_GetHashCode_Should_Return_Same_As_Slice() [Test] public void Test_SliceComparer_Compare() { - var cmp = SliceComparer.Default; + var cmp = Slice.Comparer.Default; Assert.That(cmp, Is.Not.Null); - Assert.That(cmp.Compare(Slice.Nil, Slice.Nil), Is.EqualTo(0)); - Assert.That(cmp.Compare(Slice.Empty, Slice.Empty), Is.EqualTo(0)); - Assert.That(cmp.Compare(Slice.FromByte(42), Slice.FromByte(42)), Is.EqualTo(0)); + Assert.That(cmp.Compare(Slice.Nil, Slice.Nil), Is.Zero); + Assert.That(cmp.Compare(Slice.Empty, Slice.Empty), Is.Zero); + Assert.That(cmp.Compare(Slice.FromByte(42), Slice.FromByte(42)), Is.Zero); //REVIEW: Inconsistency: compare(nil, empty) == 0, but Equals(nil, empty) == false - Assert.That(cmp.Compare(Slice.Nil, Slice.Empty), Is.EqualTo(0), "Nil and Empty are considered similar regarding ordering"); - Assert.That(cmp.Compare(Slice.Empty, Slice.Nil), Is.EqualTo(0), "Nil and Empty are considered similar regarding ordering"); + Assert.That(cmp.Compare(Slice.Nil, Slice.Empty), Is.Zero, "Nil and Empty are considered similar regarding ordering"); + Assert.That(cmp.Compare(Slice.Empty, Slice.Nil), Is.Zero, "Nil and Empty are considered similar regarding ordering"); Assert.That(cmp.Compare(Slice.FromByte(42), Slice.FromByte(77)), Is.LessThan(0)); Assert.That(cmp.Compare(Slice.FromByte(42), Slice.FromByte(21)), Is.GreaterThan(0)); diff --git a/FoundationDB.Tests/Utils/SliceFacts.cs b/FoundationDB.Tests/Utils/SliceFacts.cs index 448554d92..8dec93914 100644 --- a/FoundationDB.Tests/Utils/SliceFacts.cs +++ b/FoundationDB.Tests/Utils/SliceFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,21 +26,42 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client.Tests +namespace Doxense.Memory.Tests { - using FoundationDB.Client; + //README:IMPORTANT! This source file is expected to be stored as UTF-8! If the encoding is changed, some tests below may fail because they rely on specific code points! + using NUnit.Framework; using System; using System.Collections.Generic; + using System.Diagnostics.CodeAnalysis; using System.IO; using System.Linq; using System.Text; - using System.Threading.Tasks; + using FoundationDB.Client.Tests; [TestFixture] public class SliceFacts : FdbTest { +#if MEASURE + [TestFixtureTearDown] + public void DumpStats() + { + Log("# MemCopy:"); + for (int i = 0; i < SliceHelpers.CopyHistogram.Length; i++) + { + if (SliceHelpers.CopyHistogram[i] == 0) continue; + Log("# {0} : {1:N0} ({2:N1} ns, {3:N3} ns/byte)", i, SliceHelpers.CopyHistogram[i], SliceHelpers.CopyDurations[i] / SliceHelpers.CopyHistogram[i], SliceHelpers.CopyDurations[i] / (SliceHelpers.CopyHistogram[i] * i)); + } + Log("# MemCompare:"); + for (int i = 0; i < SliceHelpers.CompareHistogram.Length; i++) + { + if (SliceHelpers.CompareHistogram[i] == 0) continue; + Log("# {0} : {1:N0} ({2:N1} ns, {3:N3} ns/byte)", i, SliceHelpers.CompareHistogram[i], SliceHelpers.CompareDurations[i] / SliceHelpers.CompareHistogram[i], SliceHelpers.CompareDurations[i] / (SliceHelpers.CompareHistogram[i] * i)); + } + } +#endif + [Test] public void Test_Slice_Nil() { @@ -57,9 +78,10 @@ public void Test_Slice_Nil() Assert.That(Slice.Nil.IsPresent, Is.False); Assert.That(Slice.Nil.GetBytes(), Is.Null); - Assert.That(Slice.Nil.ToAscii(), Is.Null); + Assert.That(Slice.Nil.GetBytesOrEmpty(), Is.Not.Null.And.Length.EqualTo(0)); + Assert.That(Slice.Nil.ToByteString(), Is.Null); Assert.That(Slice.Nil.ToUnicode(), Is.Null); - Assert.That(Slice.Nil.ToAsciiOrHexaString(), Is.EqualTo(String.Empty)); + Assert.That(Slice.Nil.PrettyPrint(), Is.EqualTo(String.Empty)); } [Test] @@ -70,7 +92,7 @@ public void Test_Slice_Empty() Assert.That(Slice.Empty.Count, Is.EqualTo(0)); Assert.That(Slice.Empty.Offset, Is.EqualTo(0)); Assert.That(Slice.Empty.Array, Is.Not.Null); - Assert.That(Slice.Empty.Array.Length, Is.EqualTo(0)); + Assert.That(Slice.Empty.Array.Length, Is.GreaterThan(0), "The backing array for Slice.Empty should not be empty, in order to work properly with the fixed() operator!"); Assert.That(Slice.Empty.IsNull, Is.False); Assert.That(Slice.Empty.HasValue, Is.True); @@ -78,16 +100,17 @@ public void Test_Slice_Empty() Assert.That(Slice.Empty.IsNullOrEmpty, Is.True); Assert.That(Slice.Empty.IsPresent, Is.False); - Assert.That(Slice.Empty.GetBytes(), Is.EqualTo(new byte[0])); - Assert.That(Slice.Empty.ToAscii(), Is.EqualTo(String.Empty)); + Assert.That(Slice.Empty.GetBytes(), Is.Not.Null.And.Length.EqualTo(0)); + Assert.That(Slice.Empty.GetBytesOrEmpty(), Is.Not.Null.And.Length.EqualTo(0)); + Assert.That(Slice.Empty.ToByteString(), Is.EqualTo(String.Empty)); Assert.That(Slice.Empty.ToUnicode(), Is.EqualTo(String.Empty)); - Assert.That(Slice.Empty.ToAsciiOrHexaString(), Is.EqualTo("''")); + Assert.That(Slice.Empty.PrettyPrint(), Is.EqualTo("''")); } [Test] public void Test_Slice_With_Content() { - Slice slice = Slice.FromAscii("ABC"); + Slice slice = Slice.FromStringAscii("ABC"); Assert.That(slice.Count, Is.EqualTo(3)); Assert.That(slice.Offset, Is.EqualTo(0)); @@ -101,9 +124,10 @@ public void Test_Slice_With_Content() Assert.That(slice.IsPresent, Is.True); Assert.That(slice.GetBytes(), Is.EqualTo(new byte[3] { 65, 66, 67 })); - Assert.That(slice.ToAscii(), Is.EqualTo("ABC")); + Assert.That(slice.GetBytesOrEmpty(), Is.EqualTo(new byte[3] { 65, 66, 67 })); + Assert.That(slice.ToByteString(), Is.EqualTo("ABC")); Assert.That(slice.ToUnicode(), Is.EqualTo("ABC")); - Assert.That(slice.ToAsciiOrHexaString(), Is.EqualTo("'ABC'")); + Assert.That(slice.PrettyPrint(), Is.EqualTo("'ABC'")); } [Test] @@ -118,13 +142,13 @@ public void Test_Slice_Create_With_Capacity() [Test] public void Test_Slice_Create_With_Byte_Array() { - Assert.That(Slice.Create(default(byte[])).GetBytes(), Is.EqualTo(null)); - Assert.That(Slice.Create(new byte[0]).GetBytes(), Is.EqualTo(new byte[0])); - Assert.That(Slice.Create(new byte[] { 1, 2, 3 }).GetBytes(), Is.EqualTo(new byte[] { 1, 2, 3 })); + Assert.That(default(byte[]).AsSlice().GetBytes(), Is.EqualTo(null)); + Assert.That(new byte[0].AsSlice().GetBytes(), Is.EqualTo(new byte[0])); + Assert.That(new byte[] { 1, 2, 3 }.AsSlice().GetBytes(), Is.EqualTo(new byte[] { 1, 2, 3 })); // the array return by GetBytes() should not be the same array that was passed to Create ! byte[] tmp = Guid.NewGuid().ToByteArray(); // create a 16-byte array - var slice = Slice.Create(tmp); + var slice = tmp.AsSlice(); Assert.That(slice.Array, Is.SameAs(tmp)); Assert.That(slice.Offset, Is.EqualTo(0)); Assert.That(slice.Count, Is.EqualTo(tmp.Length)); @@ -133,7 +157,7 @@ public void Test_Slice_Create_With_Byte_Array() Assert.That(slice.GetBytes(), Is.Not.SameAs(tmp)); // create from a slice of the array - slice = Slice.Create(tmp, 4, 7); + slice = tmp.AsSlice(4, 7); Assert.That(slice.Array, Is.SameAs(tmp)); Assert.That(slice.Offset, Is.EqualTo(4)); Assert.That(slice.Count, Is.EqualTo(7)); @@ -141,42 +165,42 @@ public void Test_Slice_Create_With_Byte_Array() Array.Copy(tmp, 4, buf, 0, 7); Assert.That(slice.GetBytes(), Is.EqualTo(buf)); - Assert.That(Slice.Create(default(byte[])), Is.EqualTo(Slice.Nil)); - Assert.That(Slice.Create(new byte[0]), Is.EqualTo(Slice.Empty)); + Assert.That(default(byte[]).AsSlice(), Is.EqualTo(Slice.Nil)); + Assert.That(new byte[0].AsSlice(), Is.EqualTo(Slice.Empty)); } [Test] public void Test_Slice_Create_Validates_Arguments() { // null array only allowed with offset=0 and count=0 - Assert.That(() => Slice.Create(null, 0, 1), Throws.InstanceOf()); - Assert.That(() => Slice.Create(null, 1, 0), Throws.InstanceOf()); - Assert.That(() => Slice.Create(null, 1, 1), Throws.InstanceOf()); + // ReSharper disable AssignNullToNotNullAttribute + Assert.That(() => default(byte[]).AsSlice(0, 1), Throws.InstanceOf()); + Assert.That(() => default(byte[]).AsSlice(1, 0), Throws.Nothing, "Count 0 ignores offset"); + Assert.That(() => default(byte[]).AsSlice(1, 1), Throws.InstanceOf()); + // ReSharper restore AssignNullToNotNullAttribute // empty array only allowed with offset=0 and count=0 - Assert.That(() => Slice.Create(new byte[0], 0, 1), Throws.InstanceOf()); - Assert.That(() => Slice.Create(new byte[0], 1, 0), Throws.InstanceOf()); - Assert.That(() => Slice.Create(new byte[0], 1, 1), Throws.InstanceOf()); + Assert.That(() => new byte[0].AsSlice(0, 1), Throws.InstanceOf()); + Assert.That(() => new byte[0].AsSlice(1, 0), Throws.Nothing, "Count 0 ignores offset"); + Assert.That(() => new byte[0].AsSlice(1, 1), Throws.InstanceOf()); // last item must fit in the buffer - Assert.That(() => Slice.Create(new byte[3], 0, 4), Throws.InstanceOf()); - Assert.That(() => Slice.Create(new byte[3], 1, 3), Throws.InstanceOf()); - Assert.That(() => Slice.Create(new byte[3], 3, 1), Throws.InstanceOf()); + Assert.That(() => new byte[3].AsSlice(0, 4), Throws.InstanceOf()); + Assert.That(() => new byte[3].AsSlice(1, 3), Throws.InstanceOf()); + Assert.That(() => new byte[3].AsSlice(3, 1), Throws.InstanceOf()); // negative arguments - //TODO: should we allow negative indexing where Slice.Create(..., -1, 1) would mean "the last byte" ? - Assert.That(() => Slice.Create(new byte[3], -1, 1), Throws.InstanceOf()); - Assert.That(() => Slice.Create(new byte[3], 0, -1), Throws.InstanceOf()); - Assert.That(() => Slice.Create(new byte[3], -1, -1), Throws.InstanceOf()); + Assert.That(() => new byte[3].AsSlice(-1, 1), Throws.InstanceOf()); + Assert.That(() => new byte[3].AsSlice(0, -1), Throws.InstanceOf()); + Assert.That(() => new byte[3].AsSlice(-1, -1), Throws.InstanceOf()); } [Test] public void Test_Slice_Create_With_ArraySegment() { - Slice slice; byte[] tmp = Guid.NewGuid().ToByteArray(); - slice = Slice.Create(new ArraySegment(tmp)); + Slice slice = new ArraySegment(tmp).AsSlice(); Assert.That(slice.Array, Is.SameAs(tmp)); Assert.That(slice.Offset, Is.EqualTo(0)); Assert.That(slice.Count, Is.EqualTo(tmp.Length)); @@ -184,7 +208,7 @@ public void Test_Slice_Create_With_ArraySegment() Assert.That(slice.GetBytes(), Is.EqualTo(tmp)); Assert.That(slice.GetBytes(), Is.Not.SameAs(tmp)); - slice = Slice.Create(new ArraySegment(tmp, 4, 7)); + slice = new ArraySegment(tmp, 4, 7).AsSlice(); Assert.That(slice.Array, Is.SameAs(tmp)); Assert.That(slice.Offset, Is.EqualTo(4)); Assert.That(slice.Count, Is.EqualTo(7)); @@ -192,17 +216,16 @@ public void Test_Slice_Create_With_ArraySegment() Array.Copy(tmp, 4, buf, 0, 7); Assert.That(slice.GetBytes(), Is.EqualTo(buf)); - Assert.That(Slice.Create(default(ArraySegment)), Is.EqualTo(Slice.Nil)); - Assert.That(Slice.Create(new ArraySegment(new byte[0])), Is.EqualTo(Slice.Empty)); + Assert.That(default(ArraySegment).AsSlice(), Is.EqualTo(Slice.Nil)); + Assert.That(new ArraySegment(new byte[0]).AsSlice(), Is.EqualTo(Slice.Empty)); } [Test] public void Test_Slice_Pseudo_Random() { - Slice slice; var rng = new Random(); - slice = Slice.Random(rng, 16); + Slice slice = Slice.Random(rng, 16); Assert.That(slice.Array, Is.Not.Null); Assert.That(slice.Array.Length, Is.GreaterThanOrEqualTo(16)); Assert.That(slice.Offset, Is.EqualTo(0)); @@ -212,18 +235,18 @@ public void Test_Slice_Pseudo_Random() Assert.That(Slice.Random(rng, 0), Is.EqualTo(Slice.Empty)); - Assert.That(() => Slice.Random(default(System.Random), 16), Throws.InstanceOf()); - Assert.That(() => Slice.Random(rng, -1), Throws.InstanceOf()); + // ReSharper disable once AssignNullToNotNullAttribute + Assert.That(() => Slice.Random(default(Random), 16), Throws.ArgumentNullException); + Assert.That(() => Slice.Random(rng, -1), Throws.InstanceOf()); } [Test] public void Test_Slice_Cryptographic_Random() { - Slice slice; var rng = System.Security.Cryptography.RandomNumberGenerator.Create(); // normal - slice = Slice.Random(rng, 16); + Slice slice = Slice.Random(rng, 16); Assert.That(slice.Array, Is.Not.Null); Assert.That(slice.Array.Length, Is.GreaterThanOrEqualTo(16)); Assert.That(slice.Offset, Is.EqualTo(0)); @@ -242,32 +265,102 @@ public void Test_Slice_Cryptographic_Random() } Assert.That(Slice.Random(rng, 0), Is.EqualTo(Slice.Empty)); - Assert.That(() => Slice.Random(default(System.Security.Cryptography.RandomNumberGenerator), 16), Throws.InstanceOf()); + // ReSharper disable once AssignNullToNotNullAttribute + Assert.That(() => Slice.Random(default(System.Security.Cryptography.RandomNumberGenerator), 16), Throws.ArgumentNullException); Assert.That(() => Slice.Random(rng, -1), Throws.InstanceOf()); } [Test] - public void Test_Slice_FromAscii() + public void Test_Slice_FromStringAscii() + { + Assert.That(Slice.FromStringAscii(default(string)).GetBytes(), Is.Null); + Assert.That(Slice.FromStringAscii(string.Empty).GetBytes(), Is.EqualTo(new byte[0])); + Assert.That(Slice.FromStringAscii("A").GetBytes(), Is.EqualTo(new byte[] { 0x41 })); + Assert.That(Slice.FromStringAscii("AB").GetBytes(), Is.EqualTo(new byte[] { 0x41, 0x42 })); + Assert.That(Slice.FromStringAscii("ABC").GetBytes(), Is.EqualTo(new byte[] { 0x41, 0x42, 0x43 })); + Assert.That(Slice.FromStringAscii("ABCD").GetBytes(), Is.EqualTo(new byte[] { 0x41, 0x42, 0x43, 0x44 })); + Assert.That(Slice.FromStringAscii("\xFF/ABC").GetBytes(), Is.EqualTo(new byte[] { 0xFF, 0x2F, 0x41, 0x42, 0x43 })); + Assert.That(Slice.FromStringAscii("héllô").GetBytes(), Is.EqualTo(new byte[] { (byte)'h', 0xE9, (byte)'l', (byte)'l', 0xF4 })); + Assert.That(Slice.FromStringAscii("This is a test of the emergency encoding system").GetBytes(), Is.EqualTo(Encoding.ASCII.GetBytes("This is a test of the emergency encoding system"))); + + // if the string contains non-ASCII chars, it would be corrupted so FromAscii() should throw + // note: the line below should contain two kanjis. If your editor displays '??' or squares, it is probably not able to display unicode chars properly + Assert.That(() => Slice.FromStringAscii("hello 世界"), Throws.Exception, "String that contains code points >= 0x80 should throw"); + } + + [Test] + public void Test_Slice_ToStringAscii() + { + Assert.That(Slice.Nil.ToStringAscii(), Is.Null); + Assert.That(Slice.Empty.ToStringAscii(), Is.EqualTo(String.Empty)); + Assert.That(new byte[] { 0x41 }.AsSlice().ToStringAscii(), Is.EqualTo("A")); + Assert.That(new byte[] { 0x41, 0x42 }.AsSlice().ToStringAscii(), Is.EqualTo("AB")); + Assert.That(new byte[] { 0x41, 0x42, 0x43 }.AsSlice().ToStringAscii(), Is.EqualTo("ABC")); + Assert.That(new byte[] { 0x41, 0x42, 0x43, 0x44 }.AsSlice().ToStringAscii(), Is.EqualTo("ABCD")); + Assert.That(new byte[] { 0x7F, 0x00, 0x1F }.AsSlice().ToStringAscii(), Is.EqualTo("\x7F\x00\x1F")); + Assert.That(new byte[] { 0x41, 0x42, 0x43, 0x44, 0x45, 0x46 }.AsSlice(2, 3).ToStringAscii(), Is.EqualTo("CDE")); + Assert.That(Encoding.ASCII.GetBytes("This is a test of the emergency encoding system").AsSlice().ToStringAscii(), Is.EqualTo("This is a test of the emergency encoding system")); + + // If the slice contain anything other than 7+bit ASCII, it should throw! + Assert.That(() => new byte[] { 0xFF, 0x41, 0x42, 0x43 }.AsSlice().ToStringAscii(), Throws.Exception, "\\xFF is not valid in 7-bit ASCII strings!"); + Assert.That(() => Encoding.Default.GetBytes("héllô").AsSlice().ToStringAscii(), Throws.Exception, "String that contain code points >= 0x80 should trow"); + Assert.That(() => Encoding.UTF8.GetBytes("héllo 世界").AsSlice().ToStringAscii(), Throws.Exception, "String that contains code points >= 0x80 should throw"); + } + + [Test] + public void Test_Slice_FromByteString() { - Assert.That(Slice.FromAscii(default(string)).GetBytes(), Is.Null); - Assert.That(Slice.FromAscii(String.Empty).GetBytes(), Is.EqualTo(new byte[0])); - Assert.That(Slice.FromAscii("ABC").GetBytes(), Is.EqualTo(new byte[] { 0x41, 0x42, 0x43 })); + Assert.That(Slice.FromByteString(default(string)).GetBytes(), Is.Null); + Assert.That(Slice.FromByteString(string.Empty).GetBytes(), Is.EqualTo(new byte[0])); + Assert.That(Slice.FromByteString("ABC").GetBytes(), Is.EqualTo(new [] { (byte) 'A', (byte) 'B', (byte) 'C' })); + Assert.That(Slice.FromByteString("\xFF/ABC").GetBytes(), Is.EqualTo(new [] { (byte) 0xFF, (byte) '/', (byte) 'A', (byte) 'B', (byte) 'C' })); + Assert.That(Slice.FromByteString("héllô").GetBytes(), Is.EqualTo(new byte[] { (byte)'h', 0xE9, (byte)'l', (byte)'l', 0xF4 })); + + // if the caller likes to live dangerously and call, then the data should be corrupted + var slice = Slice.FromByteString("hello 世界"); // DON'T EVER DO THAT! + Assume.That('世' & 0xFF, Is.EqualTo(0x16)); + Assume.That('界' & 0xFF, Is.EqualTo(0x4C)); + Assert.That(slice, Is.EqualTo(Slice.Unescape("hello <16><4C>"))); + Assert.That(slice.ToByteString(), Is.EqualTo("hello \x16L"), "non-ASCII chars should be corrupted after decoding"); + Assert.That(slice.Count, Is.EqualTo(8)); + + } + + [Test] + public void Test_Slice_FromStringAnsi() + { + Assert.That(Slice.FromStringAnsi(default(string)).GetBytes(), Is.Null); + Assert.That(Slice.FromStringAnsi(string.Empty).GetBytes(), Is.EqualTo(new byte[0])); + Assert.That(Slice.FromStringAnsi("ABC").GetBytes(), Is.EqualTo(new byte[] { 0x41, 0x42, 0x43 })); + Assert.That(Slice.FromStringAnsi("\xFF/ABC").GetBytes(), Is.EqualTo(new[] { (byte)0xFF, (byte)'/', (byte)'A', (byte)'B', (byte)'C' })); + Assert.That(Slice.FromStringAnsi("héllô").GetBytes(), Is.EqualTo(Encoding.Default.GetBytes("héllô"))); //note: this depends on your OS locale! // if the string contains non-ASCII chars, it will be corrupted // note: the line below should contain two kanjis. If your editor displays '??' or squares, it is probably not able to display unicode chars properly - var slice = Slice.FromAscii("hello 世界"); // 8 'letters' - Assert.That(slice.GetBytes(), Is.EqualTo(Encoding.Default.GetBytes("hello 世界"))); - Assert.That(slice.ToAscii(), Is.EqualTo("hello ??"), "non-ASCII chars should be converted to '?'"); - Assert.That(slice.Count, Is.EqualTo(8)); + var slice = Slice.FromStringAnsi("hello 世界"); // 8 'letters' + Assert.That(slice.GetBytes(), Is.EqualTo(Encoding.Default.GetBytes("hello 世界"))); //note: this depends on your OS locale! + Assert.That(slice.ToStringAnsi(), Is.EqualTo("hello ??"), "non-ANSI chars should be converted to '?'"); + } + + [Test] + public void Test_Slice_ToStringAnsi() + { + Assert.That(Slice.Nil.ToStringAnsi(), Is.Null); + Assert.That(Slice.Empty.ToStringAnsi(), Is.EqualTo(String.Empty)); + Assert.That(new[] { (byte) 'A', (byte) 'B', (byte) 'C' }.AsSlice().ToStringAnsi(), Is.EqualTo("ABC")); + Assert.That(Encoding.Default.GetBytes("héllô").AsSlice().ToStringAnsi(), Is.EqualTo("héllô")); //note: this depends on your OS locale! + Assert.That(new[] { (byte) 0xFF, (byte) '/', (byte) 'A', (byte) 'B', (byte) 'C' }.AsSlice().ToStringAnsi(), Is.EqualTo("\xFF/ABC")); - //REVIEW: should FromAscii() throw an exception on non-ASCII chars? It will silently corrupt strings if nobody checks the value.... + // if the string contains non-ANSI chars, it will be corrupted + // note: the line below should contain two kanjis. If your editor displays '??' or squares, it is probably not able to display unicode chars properly + Assert.That(Encoding.UTF8.GetBytes("héllô 世界").AsSlice().ToStringAnsi(), Is.EqualTo("h\xC3\xA9ll\xC3\xB4 \xE4\xB8\u2013\xE7\u2022\u0152")); //note: this may change depending on your locale! } [Test] public void Test_Slice_FromString_Uses_UTF8() { Assert.That(Slice.FromString(default(string)).GetBytes(), Is.Null); - Assert.That(Slice.FromString(String.Empty).GetBytes(), Is.EqualTo(new byte[0])); + Assert.That(Slice.FromString(string.Empty).GetBytes(), Is.EqualTo(new byte[0])); Assert.That(Slice.FromString("ABC").GetBytes(), Is.EqualTo(new byte[] { 0x41, 0x42, 0x43 })); Assert.That(Slice.FromString("é").GetBytes(), Is.EqualTo(new byte[] { 0xC3, 0xA9 })); @@ -277,364 +370,1179 @@ public void Test_Slice_FromString_Uses_UTF8() Assert.That(slice.GetBytes(), Is.EqualTo(Encoding.UTF8.GetBytes("héllø 世界"))); Assert.That(slice.ToUnicode(), Is.EqualTo("héllø 世界"), "non-ASCII chars should not be corrupted"); Assert.That(slice.Count, Is.EqualTo(14)); + + // UTF8 does not map \xFF or \xFE directly to a single byte (but at least it should round-trip) + Assert.That(Slice.FromString("\xFF").GetBytes(), Is.EqualTo(new byte[] { 0xC3, 0xBF })); + Assert.That(Slice.FromString("\xFE").GetBytes(), Is.EqualTo(new byte[] { 0xC3, 0xBE })); + Assert.That(new byte[] { 0xC3, 0xBF }.AsSlice().ToUnicode(), Is.EqualTo("\xFF")); + Assert.That(new byte[] { 0xC3, 0xBE }.AsSlice().ToUnicode(), Is.EqualTo("\xFE")); + } + + [Test] + public void Test_Slice_FromStringUtf8() + { + Assert.That(Slice.FromStringUtf8(default(string)).GetBytes(), Is.Null); + Assert.That(Slice.FromStringUtf8(string.Empty).GetBytes(), Is.EqualTo(new byte[0])); + Assert.That(Slice.FromStringUtf8("ABC").GetBytes(), Is.EqualTo(new byte[] { 0x41, 0x42, 0x43 })); + Assert.That(Slice.FromStringUtf8("é").GetBytes(), Is.EqualTo(new byte[] { 0xC3, 0xA9 })); + + // if the string contains UTF-8 characters, it should be encoded properly + // note: the line below should contain two kanjis. If your editor displays '??' or squares, it is probably not able to display unicode chars properly + var slice = Slice.FromStringUtf8("héllø 世界"); // 8 'letters' + Assert.That(slice.GetBytes(), Is.EqualTo(Encoding.UTF8.GetBytes("héllø 世界"))); + Assert.That(slice.ToStringUtf8(), Is.EqualTo("héllø 世界"), "non-ASCII chars should not be corrupted"); + Assert.That(slice.ToUnicode(), Is.EqualTo("héllø 世界"), "non-ASCII chars should not be corrupted"); + Assert.That(slice.Count, Is.EqualTo(14)); + + // UTF8 does not map \xFF or \xFE directly to a single byte (but at least it should round-trip) + Assert.That(Slice.FromStringUtf8("\xFF").GetBytes(), Is.EqualTo(new byte[] { 0xC3, 0xBF })); + Assert.That(Slice.FromStringUtf8("\xFE").GetBytes(), Is.EqualTo(new byte[] { 0xC3, 0xBE })); + Assert.That(new byte[] { 0xC3, 0xBF }.AsSlice().ToStringUtf8(), Is.EqualTo("\xFF")); + Assert.That(new byte[] { 0xC3, 0xBF }.AsSlice().ToUnicode(), Is.EqualTo("\xFF")); + Assert.That(new byte[] { 0xC3, 0xBE }.AsSlice().ToStringUtf8(), Is.EqualTo("\xFE")); + Assert.That(new byte[] { 0xC3, 0xBE }.AsSlice().ToUnicode(), Is.EqualTo("\xFE")); + } + + [Test] + public void Test_Slice_ToStringUtf8() + { + Assert.That(Slice.Nil.ToStringUtf8(), Is.Null); + Assert.That(Slice.Empty.ToStringUtf8(), Is.EqualTo(String.Empty)); + Assert.That(new[] { (byte) 'A', (byte) 'B', (byte) 'C' }.AsSlice().ToStringUtf8(), Is.EqualTo("ABC")); + Assert.That(Encoding.UTF8.GetBytes("héllô").AsSlice().ToStringUtf8(), Is.EqualTo("héllô")); //note: this depends on your OS locale! + Assert.That(Encoding.UTF8.GetBytes("世界").AsSlice().ToStringUtf8(), Is.EqualTo("世界")); + + // should remove the bom! + Assert.That(new byte[] { 0xEF, 0xBB, 0xBF, (byte) 'A', (byte) 'B', (byte) 'C' }.AsSlice().ToStringUtf8(), Is.EqualTo("ABC"), "BOM should be removed"); + Assert.That(new byte[] { 0xEF, 0xBB, 0xBF }.AsSlice().ToStringUtf8(), Is.EqualTo(String.Empty), "BOM should also be removed for empty string"); + Assert.That(new byte[] { 0xEF, 0xBB, 0xBF, 0xEF, 0xBB, 0xBF, (byte) 'A', (byte) 'B', (byte) 'C' }.AsSlice().ToStringUtf8(), Is.EqualTo("\uFEFFABC"), "Only one BOM should be removed"); + + // custom case for 0xFF and 0xFE + Assert.That(new byte[] { 0xFF, (byte) '/', (byte) 'A', (byte) 'B', (byte) 'C' }.AsSlice().ToStringAnsi(), Is.EqualTo("\xFF/ABC")); + Assert.That(new byte[] { 0xFE, (byte) '/', (byte) 'A', (byte) 'B', (byte) 'C' }.AsSlice().ToStringAnsi(), Is.EqualTo("\xFE/ABC")); + + // corrupted UTF-8 + Assert.That(() => new byte[] { 0xEF, 0xBB }.AsSlice().ToStringUtf8(), Throws.Exception, "Partial BOM should fail to decode"); + Assert.That(() => new byte[] { (byte) 'A', 0xc3, 0x28, (byte) 'B' }.AsSlice().ToStringUtf8(), Throws.Exception, "Invalid 2-byte sequence"); + Assert.That(() => new byte[] { (byte) 'A', 0xe2, 0x28, 0xa1, (byte) 'B' }.AsSlice().ToStringUtf8(), Throws.Exception, "Invalid 3-byte sequence"); + Assert.That(() => new byte[] { (byte) 'A', 0xf0, 0x28, 0x8c, 0x28, (byte) 'B' }.AsSlice().ToStringUtf8(), Throws.Exception, "Invalid 4-byte sequence"); + Assert.That(() => new byte[] { (byte) 'A', 0xf0, 0x28, /*..SNIP..*/ }.AsSlice().ToStringUtf8(), Throws.Exception, "Truncated 4-byte sequence"); + } + + [Test] + public void Test_Slice_FromStringUtf8WithBom() + { + Assert.That(Slice.FromStringUtf8WithBom(default(string)).GetBytes(), Is.Null); + Assert.That(Slice.FromStringUtf8WithBom(string.Empty).GetBytes(), Is.EqualTo(new byte[] { 0xEF, 0xBB, 0xBF })); + Assert.That(Slice.FromStringUtf8WithBom("ABC").GetBytes(), Is.EqualTo(new byte[] { 0xEF, 0xBB, 0xBF, 0x41, 0x42, 0x43 })); + Assert.That(Slice.FromStringUtf8WithBom("é").GetBytes(), Is.EqualTo(new byte[] { 0xEF, 0xBB, 0xBF, 0xC3, 0xA9 })); + + // if the string contains UTF-8 characters, it should be encoded properly + // note: the line below should contain two kanjis. If your editor displays '??' or squares, it is probably not able to display unicode chars properly + var slice = Slice.FromStringUtf8WithBom("héllø 世界"); // 8 'letters' + Assert.That(slice.GetBytes(), Is.EqualTo(new byte[] { 0xEF, 0xBB, 0xBF }.Concat(Encoding.UTF8.GetBytes("héllø 世界")).ToArray())); + Assert.That(slice.ToStringUtf8(), Is.EqualTo("héllø 世界"), "The BOM should be removed"); + Assert.That(slice.ToUnicode(), Is.EqualTo("\xFEFFhéllø 世界"), "The BOM should be preserved"); + Assert.That(slice.Count, Is.EqualTo(3 + 14)); + + // UTF8 does not map \xFF or \xFE directly to a single byte (but at least it should round-trip) + Assert.That(Slice.FromStringUtf8WithBom("\xFF").GetBytes(), Is.EqualTo(new byte[] { 0xEF, 0xBB, 0xBF, 0xC3, 0xBF })); + Assert.That(Slice.FromStringUtf8WithBom("\xFE").GetBytes(), Is.EqualTo(new byte[] { 0xEF, 0xBB, 0xBF, 0xC3, 0xBE })); + Assert.That(new byte[] { 0xEF, 0xBB, 0xBF, 0xC3, 0xBF }.AsSlice().ToStringUtf8(), Is.EqualTo("\xFF")); + Assert.That(new byte[] { 0xEF, 0xBB, 0xBF, 0xC3, 0xBF }.AsSlice().ToUnicode(), Is.EqualTo("\uFEFF\xFF")); + Assert.That(new byte[] { 0xEF, 0xBB, 0xBF, 0xC3, 0xBE }.AsSlice().ToStringUtf8(), Is.EqualTo("\xFE")); + Assert.That(new byte[] { 0xEF, 0xBB, 0xBF, 0xC3, 0xBE }.AsSlice().ToUnicode(), Is.EqualTo("\uFEFF\xFE")); + } + + [Test] + public void Test_Slice_FromChar_Uses_UTF8() + { + // from 0 to 127 is regular single-byte ASCII + Assert.That(Slice.FromChar('\0').GetBytes(), Is.EqualTo(new byte[] { 0 })); + Assert.That(Slice.FromChar('\x01').GetBytes(), Is.EqualTo(new byte[] { 1 })); + Assert.That(Slice.FromChar('0').GetBytes(), Is.EqualTo(new byte[] { 48 })); + Assert.That(Slice.FromChar('A').GetBytes(), Is.EqualTo(new byte[] { 65 })); + Assert.That(Slice.FromChar('a').GetBytes(), Is.EqualTo(new byte[] { 97 })); + Assert.That(Slice.FromChar('~').GetBytes(), Is.EqualTo(new byte[] { 126 })); + Assert.That(Slice.FromChar('\x7F').GetBytes(), Is.EqualTo(new byte[] { 127 })); + + // 128 and above is multi-byte UTF-8 + Assert.That(Slice.FromChar('\x80').GetBytes(), Is.EqualTo(new byte[] { 0xC2, 0x80 })); + Assert.That(Slice.FromChar('é').GetBytes(), Is.EqualTo(new byte[] { 0xC3, 0xA9 })); + Assert.That(Slice.FromChar('\u221E').GetBytes(), Is.EqualTo(new byte[] { 0xE2, 0x88, 0x9E })); + Assert.That(Slice.FromChar('\uFFFE').GetBytes(), Is.EqualTo(new byte[] { 0xEF, 0xBF, 0xBE})); } + #region Signed... + + #region 24-bits + + #region Little-Endian + + [Test] + public void Test_Slice_ToInt24() + { + Assert.That(new byte[] { 0x12 }.AsSlice().ToInt24(), Is.EqualTo(0x12)); + Assert.That(new byte[] { 0x34, 0x12 }.AsSlice().ToInt24(), Is.EqualTo(0x1234)); + Assert.That(new byte[] { 0x34, 0x12, 0x00 }.AsSlice().ToInt24(), Is.EqualTo(0x1234)); + Assert.That(new byte[] { 0x56, 0x34, 0x12 }.AsSlice().ToInt24(), Is.EqualTo(0x123456)); + + Assert.That(new byte[] { }.AsSlice().ToInt24(), Is.EqualTo(0)); + Assert.That(new byte[] { 0 }.AsSlice().ToInt24(), Is.EqualTo(0)); + Assert.That(new byte[] { 127 }.AsSlice().ToInt24(), Is.EqualTo(127)); + Assert.That(new byte[] { 255 }.AsSlice().ToInt24(), Is.EqualTo(255)); + Assert.That(new byte[] { 0, 1 }.AsSlice().ToInt24(), Is.EqualTo(256)); + Assert.That(new byte[] { 255, 127 }.AsSlice().ToInt24(), Is.EqualTo(32767)); + Assert.That(new byte[] { 255, 255 }.AsSlice().ToInt24(), Is.EqualTo(65535)); + Assert.That(new byte[] { 0, 0, 1 }.AsSlice().ToInt24(), Is.EqualTo(1 << 16)); + Assert.That(new byte[] { 255, 255, 127 }.AsSlice().ToInt24(), Is.EqualTo((1 << 23) - 1)); + Assert.That(new byte[] { 255, 255, 255 }.AsSlice().ToInt24(), Is.EqualTo((1 << 24) - 1)); + + Assert.That(() => Slice.Create(4).ToInt24(), Throws.InstanceOf()); + } + + #endregion + + #region Big Endian + + [Test] + public void Test_Slice_ToInt24BE() + { + Assert.That(new byte[] { 0x12 }.AsSlice().ToInt24BE(), Is.EqualTo(0x12)); + Assert.That(new byte[] { 0x12, 0x34 }.AsSlice().ToInt24BE(), Is.EqualTo(0x1234)); + Assert.That(new byte[] { 0x12, 0x34, 0x56 }.AsSlice().ToInt24BE(), Is.EqualTo(0x123456)); + + Assert.That(new byte[] { }.AsSlice().ToInt24BE(), Is.EqualTo(0)); + Assert.That(new byte[] { 0 }.AsSlice().ToInt24BE(), Is.EqualTo(0)); + Assert.That(new byte[] { 127 }.AsSlice().ToInt24BE(), Is.EqualTo(127)); + Assert.That(new byte[] { 255 }.AsSlice().ToInt24BE(), Is.EqualTo(255)); + Assert.That(new byte[] { 1, 0 }.AsSlice().ToInt24BE(), Is.EqualTo(256)); + Assert.That(new byte[] { 127, 255 }.AsSlice().ToInt24BE(), Is.EqualTo(32767)); + Assert.That(new byte[] { 255, 255 }.AsSlice().ToInt24BE(), Is.EqualTo(65535)); + Assert.That(new byte[] { 1, 0, 0 }.AsSlice().ToInt24BE(), Is.EqualTo(1 << 16)); + Assert.That(new byte[] { 127, 255, 255 }.AsSlice().ToInt24BE(), Is.EqualTo((1 << 23) - 1)); + Assert.That(new byte[] { 255, 255, 255 }.AsSlice().ToInt24BE(), Is.EqualTo((1 << 24) - 1)); + + Assert.That(() => Slice.Create(4).ToInt24BE(), Throws.InstanceOf()); + } + + #endregion + + #endregion + + #region 32-bits + + #region Little-Endian + [Test] public void Test_Slice_FromInt32() { // 32-bit integers should be encoded in little endian, and with 1, 2 or 4 bytes - // 0x12 -> { 12 } - // 0x1234 -> { 34 12 } - // 0x123456 -> { 56 34 12 00 } - // 0x12345678 -> { 78 56 34 12 } - Assert.That(Slice.FromInt32(0x12).ToHexaString(), Is.EqualTo("12")); - Assert.That(Slice.FromInt32(0x1234).ToHexaString(), Is.EqualTo("3412")); - Assert.That(Slice.FromInt32(0x123456).ToHexaString(), Is.EqualTo("56341200")); - Assert.That(Slice.FromInt32(0x12345678).ToHexaString(), Is.EqualTo("78563412")); + void Verify(int value, string expected) + { + Assert.That(Slice.FromInt32(value).ToHexaString(), Is.EqualTo(expected), "Invalid encoding for {0}", value); + } + + Verify(0x12, "12"); + Verify(0x1234, "3412"); + Verify(0x123456, "563412"); + Verify(0x12345678, "78563412"); + + Verify(0, "00"); + Verify(1, "01"); + Verify(255, "FF"); + Verify(256, "0001"); + Verify(65535, "FFFF"); + Verify(65536, "000001"); + Verify(16777215, "FFFFFF"); + Verify(16777216, "00000001"); + Verify(int.MaxValue, "FFFFFF7F"); + Verify(int.MinValue, "00000080"); + } + + [Test] + public void Test_Slice_FromFixed32() + { + // FromFixed32 always produce 4 bytes and uses Little Endian + + Assert.That(Slice.FromFixed32(0).GetBytes(), Is.EqualTo(new byte[4])); + Assert.That(Slice.FromFixed32(1).GetBytes(), Is.EqualTo(new byte[] { 1, 0, 0, 0 })); + Assert.That(Slice.FromFixed32(256).GetBytes(), Is.EqualTo(new byte[] { 0, 1, 0, 0 })); + Assert.That(Slice.FromFixed32(65536).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 1, 0 })); + Assert.That(Slice.FromFixed32(16777216).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 1 })); + Assert.That(Slice.FromFixed32(short.MaxValue).GetBytes(), Is.EqualTo(new byte[] { 255, 127, 0, 0 })); + Assert.That(Slice.FromFixed32(int.MaxValue).GetBytes(), Is.EqualTo(new byte[] { 255, 255, 255, 127 })); - Assert.That(Slice.FromInt32(0).ToHexaString(), Is.EqualTo("00")); - Assert.That(Slice.FromInt32(1).ToHexaString(), Is.EqualTo("01")); - Assert.That(Slice.FromInt32(255).ToHexaString(), Is.EqualTo("ff")); - Assert.That(Slice.FromInt32(256).ToHexaString(), Is.EqualTo("0001")); - Assert.That(Slice.FromInt32(65535).ToHexaString(), Is.EqualTo("ffff")); - Assert.That(Slice.FromInt32(65536).ToHexaString(), Is.EqualTo("00000100")); - Assert.That(Slice.FromInt32(int.MaxValue).ToHexaString(), Is.EqualTo("ffffff7f")); - Assert.That(Slice.FromInt32(int.MinValue).ToHexaString(), Is.EqualTo("00000080")); + Assert.That(Slice.FromFixed32(-1).GetBytes(), Is.EqualTo(new byte[] { 255, 255, 255, 255 })); + Assert.That(Slice.FromFixed32(-256).GetBytes(), Is.EqualTo(new byte[] { 0, 255, 255, 255 })); + Assert.That(Slice.FromFixed32(-65536).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 255, 255 })); + Assert.That(Slice.FromFixed32(-16777216).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 255 })); + Assert.That(Slice.FromFixed32(int.MinValue).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 128 })); + + var rnd = new Random(); + for (int i = 0; i < 1000; i++) + { + int x = rnd.Next() * (rnd.Next(2) == 0 ? +1 : -1); + Slice s = Slice.FromFixed32(x); + Assert.That(s.Count, Is.EqualTo(4)); + Assert.That(s.ToInt32(), Is.EqualTo(x)); + } } [Test] public void Test_Slice_ToInt32() { - Assert.That(Slice.Create(new byte[] { 0x12 }).ToInt32(), Is.EqualTo(0x12)); - Assert.That(Slice.Create(new byte[] { 0x34, 0x12 }).ToInt32(), Is.EqualTo(0x1234)); - Assert.That(Slice.Create(new byte[] { 0x56, 0x34, 0x12 }).ToInt32(), Is.EqualTo(0x123456)); - Assert.That(Slice.Create(new byte[] { 0x56, 0x34, 0x12, 0x00 }).ToInt32(), Is.EqualTo(0x123456)); - Assert.That(Slice.Create(new byte[] { 0x78, 0x56, 0x34, 0x12 }).ToInt32(), Is.EqualTo(0x12345678)); + Assert.That(new byte[] { 0x12 }.AsSlice().ToInt32(), Is.EqualTo(0x12)); + Assert.That(new byte[] { 0x34, 0x12 }.AsSlice().ToInt32(), Is.EqualTo(0x1234)); + Assert.That(new byte[] { 0x56, 0x34, 0x12 }.AsSlice().ToInt32(), Is.EqualTo(0x123456)); + Assert.That(new byte[] { 0x56, 0x34, 0x12, 0x00 }.AsSlice().ToInt32(), Is.EqualTo(0x123456)); + Assert.That(new byte[] { 0x78, 0x56, 0x34, 0x12 }.AsSlice().ToInt32(), Is.EqualTo(0x12345678)); + + Assert.That(new byte[] { }.AsSlice().ToInt32(), Is.EqualTo(0)); + Assert.That(new byte[] { 0 }.AsSlice().ToInt32(), Is.EqualTo(0)); + Assert.That(new byte[] { 255 }.AsSlice().ToInt32(), Is.EqualTo(255)); + Assert.That(new byte[] { 0, 1 }.AsSlice().ToInt32(), Is.EqualTo(256)); + Assert.That(new byte[] { 255, 255 }.AsSlice().ToInt32(), Is.EqualTo(65535)); + Assert.That(new byte[] { 0, 0, 1 }.AsSlice().ToInt32(), Is.EqualTo(1 << 16)); + Assert.That(new byte[] { 0, 0, 1, 0 }.AsSlice().ToInt32(), Is.EqualTo(1 << 16)); + Assert.That(new byte[] { 255, 255, 255 }.AsSlice().ToInt32(), Is.EqualTo((1 << 24) - 1)); + Assert.That(new byte[] { 0, 0, 0, 1 }.AsSlice().ToInt32(), Is.EqualTo(1 << 24)); + Assert.That(new byte[] { 255, 255, 255, 127 }.AsSlice().ToInt32(), Is.EqualTo(int.MaxValue)); + + Assert.That(() => Slice.Create(5).ToInt32(), Throws.InstanceOf()); + } - Assert.That(Slice.Create(new byte[] { 0 }).ToInt32(), Is.EqualTo(0)); - Assert.That(Slice.Create(new byte[] { 255 }).ToInt32(), Is.EqualTo(255)); - Assert.That(Slice.Create(new byte[] { 0, 1 }).ToInt32(), Is.EqualTo(256)); - Assert.That(Slice.Create(new byte[] { 255, 255 }).ToInt32(), Is.EqualTo(65535)); - Assert.That(Slice.Create(new byte[] { 0, 0, 1 }).ToInt32(), Is.EqualTo(1 << 16)); - Assert.That(Slice.Create(new byte[] { 0, 0, 1, 0 }).ToInt32(), Is.EqualTo(1 << 16)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255 }).ToInt32(), Is.EqualTo((1 << 24) - 1)); - Assert.That(Slice.Create(new byte[] { 0, 0, 0, 1 }).ToInt32(), Is.EqualTo(1 << 24)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255, 127 }).ToInt32(), Is.EqualTo(int.MaxValue)); + #endregion + + #region Big Endian + + [Test] + public void Test_Slice_FromInt32BE() + { + // 32-bit integers should be encoded in little endian, and with 1, 2 or 4 bytes + + void Verify(int value, string expected) + { + Assert.That(Slice.FromInt32BE(value).ToHexaString(), Is.EqualTo(expected), "Invalid encoding for {0}", value); + } + + Verify(0x12, "12"); + Verify(0x1234, "1234"); + Verify(0x123456, "123456"); + Verify(0x12345678, "12345678"); + + Verify(0, "00"); + Verify(1, "01"); + Verify(255, "FF"); + Verify(256, "0100"); + Verify(65535, "FFFF"); + Verify(65536, "010000"); + Verify(16777215, "FFFFFF"); + Verify(16777216, "01000000"); + Verify(int.MaxValue, "7FFFFFFF"); + Verify(int.MinValue, "80000000"); } [Test] - public void Test_Slice_ToInt32BE() + public void Test_Slice_FromFixed32BE() { - Assert.That(Slice.Create(new byte[] { 0x12 }).ToInt32BE(), Is.EqualTo(0x12)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34 }).ToInt32BE(), Is.EqualTo(0x1234)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34, 0x56 }).ToInt32BE(), Is.EqualTo(0x123456)); - Assert.That(Slice.Create(new byte[] { 0x00, 0x12, 0x34, 0x56 }).ToInt32BE(), Is.EqualTo(0x123456)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34, 0x56, 0x78 }).ToInt32BE(), Is.EqualTo(0x12345678)); + // FromFixed32 always produce 4 bytes and uses Little Endian - Assert.That(Slice.Create(new byte[] { 0 }).ToInt32BE(), Is.EqualTo(0)); - Assert.That(Slice.Create(new byte[] { 255 }).ToInt32BE(), Is.EqualTo(255)); - Assert.That(Slice.Create(new byte[] { 1, 0 }).ToInt32BE(), Is.EqualTo(256)); - Assert.That(Slice.Create(new byte[] { 255, 255 }).ToInt32BE(), Is.EqualTo(65535)); - Assert.That(Slice.Create(new byte[] { 1, 0, 0 }).ToInt32BE(), Is.EqualTo(1 << 16)); - Assert.That(Slice.Create(new byte[] { 0, 1, 0, 0 }).ToInt32BE(), Is.EqualTo(1 << 16)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255 }).ToInt32BE(), Is.EqualTo((1 << 24) - 1)); - Assert.That(Slice.Create(new byte[] { 1, 0, 0, 0 }).ToInt32BE(), Is.EqualTo(1 << 24)); - Assert.That(Slice.Create(new byte[] { 127, 255, 255, 255 }).ToInt32BE(), Is.EqualTo(int.MaxValue)); + Assert.That(Slice.FromFixed32BE(0).GetBytes(), Is.EqualTo(new byte[4])); + Assert.That(Slice.FromFixed32BE(1).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 1 })); + Assert.That(Slice.FromFixed32BE(256).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 1, 0 })); + Assert.That(Slice.FromFixed32BE(65536).GetBytes(), Is.EqualTo(new byte[] { 0, 1, 0, 0 })); + Assert.That(Slice.FromFixed32BE(16777216).GetBytes(), Is.EqualTo(new byte[] { 1, 0, 0, 0 })); + Assert.That(Slice.FromFixed32BE(short.MaxValue).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 127, 255 })); + Assert.That(Slice.FromFixed32BE(int.MaxValue).GetBytes(), Is.EqualTo(new byte[] { 127, 255, 255, 255 })); + + Assert.That(Slice.FromFixed32BE(-1).GetBytes(), Is.EqualTo(new byte[] { 255, 255, 255, 255 })); + Assert.That(Slice.FromFixed32BE(-256).GetBytes(), Is.EqualTo(new byte[] { 255, 255, 255, 0 })); + Assert.That(Slice.FromFixed32BE(-65536).GetBytes(), Is.EqualTo(new byte[] { 255, 255, 0, 0 })); + Assert.That(Slice.FromFixed32BE(-16777216).GetBytes(), Is.EqualTo(new byte[] { 255, 0, 0, 0 })); + Assert.That(Slice.FromFixed32BE(int.MinValue).GetBytes(), Is.EqualTo(new byte[] { 128, 0, 0, 0 })); + + var rnd = new Random(); + for (int i = 0; i < 1000; i++) + { + int x = rnd.Next() * (rnd.Next(2) == 0 ? +1 : -1); + Slice s = Slice.FromFixed32BE(x); + Assert.That(s.Count, Is.EqualTo(4)); + Assert.That(s.ToInt32BE(), Is.EqualTo(x)); + } + } + + [Test] + public void Test_Slice_ToInt32BE() + { + Assert.That(new byte[] { 0x12 }.AsSlice().ToInt32BE(), Is.EqualTo(0x12)); + Assert.That(new byte[] { 0x12, 0x34 }.AsSlice().ToInt32BE(), Is.EqualTo(0x1234)); + Assert.That(new byte[] { 0x12, 0x34, 0x56 }.AsSlice().ToInt32BE(), Is.EqualTo(0x123456)); + Assert.That(new byte[] { 0x00, 0x12, 0x34, 0x56 }.AsSlice().ToInt32BE(), Is.EqualTo(0x123456)); + Assert.That(new byte[] { 0x12, 0x34, 0x56, 0x78 }.AsSlice().ToInt32BE(), Is.EqualTo(0x12345678)); + + Assert.That(new byte[] { }.AsSlice().ToInt32BE(), Is.EqualTo(0)); + Assert.That(new byte[] { 0 }.AsSlice().ToInt32BE(), Is.EqualTo(0)); + Assert.That(new byte[] { 255 }.AsSlice().ToInt32BE(), Is.EqualTo(255)); + Assert.That(new byte[] { 1, 0 }.AsSlice().ToInt32BE(), Is.EqualTo(256)); + Assert.That(new byte[] { 255, 255 }.AsSlice().ToInt32BE(), Is.EqualTo(65535)); + Assert.That(new byte[] { 1, 0, 0 }.AsSlice().ToInt32BE(), Is.EqualTo(1 << 16)); + Assert.That(new byte[] { 0, 1, 0, 0 }.AsSlice().ToInt32BE(), Is.EqualTo(1 << 16)); + Assert.That(new byte[] { 255, 255, 255 }.AsSlice().ToInt32BE(), Is.EqualTo((1 << 24) - 1)); + Assert.That(new byte[] { 1, 0, 0, 0 }.AsSlice().ToInt32BE(), Is.EqualTo(1 << 24)); + Assert.That(new byte[] { 127, 255, 255, 255 }.AsSlice().ToInt32BE(), Is.EqualTo(int.MaxValue)); + + Assert.That(() => Slice.Create(5).ToInt32BE(), Throws.InstanceOf()); } + #endregion + + #endregion + + #region 64-bits + + #region Little-Endian + [Test] public void Test_Slice_FromInt64() { // 64-bit integers should be encoded in little endian, and with 1, 2, 4 or 8 bytes - // 0x12 -> { 12 } - // 0x1234 -> { 34 12 } - // 0x123456 -> { 56 34 12 00 } - // 0x12345678 -> { 78 56 34 12 } - // 0x123456789A -> { 9A 78 56 34 12 00 00 00} - // 0x123456789ABC -> { BC 9A 78 56 34 12 00 00} - // 0x123456789ABCDE -> { DE BC 9A 78 56 34 12 00} - // 0x123456789ABCDEF0 -> { F0 DE BC 9A 78 56 34 12 } - - Assert.That(Slice.FromInt64(0x12).ToHexaString(), Is.EqualTo("12")); - Assert.That(Slice.FromInt64(0x1234).ToHexaString(), Is.EqualTo("3412")); - Assert.That(Slice.FromInt64(0x123456).ToHexaString(), Is.EqualTo("56341200")); - Assert.That(Slice.FromInt64(0x12345678).ToHexaString(), Is.EqualTo("78563412")); - Assert.That(Slice.FromInt64(0x123456789A).ToHexaString(), Is.EqualTo("9a78563412000000")); - Assert.That(Slice.FromInt64(0x123456789ABC).ToHexaString(), Is.EqualTo("bc9a785634120000")); - Assert.That(Slice.FromInt64(0x123456789ABCDE).ToHexaString(), Is.EqualTo("debc9a7856341200")); - Assert.That(Slice.FromInt64(0x123456789ABCDEF0).ToHexaString(), Is.EqualTo("f0debc9a78563412")); - - Assert.That(Slice.FromInt64(0).ToHexaString(), Is.EqualTo("00")); - Assert.That(Slice.FromInt64(1).ToHexaString(), Is.EqualTo("01")); - Assert.That(Slice.FromInt64(255).ToHexaString(), Is.EqualTo("ff")); - Assert.That(Slice.FromInt64(256).ToHexaString(), Is.EqualTo("0001")); - Assert.That(Slice.FromInt64(65535).ToHexaString(), Is.EqualTo("ffff")); - Assert.That(Slice.FromInt64(65536).ToHexaString(), Is.EqualTo("00000100")); - Assert.That(Slice.FromInt64(int.MaxValue).ToHexaString(), Is.EqualTo("ffffff7f")); - Assert.That(Slice.FromInt64(int.MinValue).ToHexaString(), Is.EqualTo("00000080ffffffff")); - Assert.That(Slice.FromInt64(1L + int.MaxValue).ToHexaString(), Is.EqualTo("0000008000000000")); - Assert.That(Slice.FromInt64(long.MaxValue).ToHexaString(), Is.EqualTo("ffffffffffffff7f")); - Assert.That(Slice.FromInt64(long.MinValue).ToHexaString(), Is.EqualTo("0000000000000080")); + void Verify(long value, string expected) + { + Assert.That(Slice.FromInt64(value).ToHexaString(), Is.EqualTo(expected), "Invalid encoding for {0}", value); + } + + Verify(0x12, "12"); + Verify(0x1234, "3412"); + Verify(0x123456, "563412"); + Verify(0x12345678, "78563412"); + Verify(0x123456789A, "9A78563412"); + Verify(0x123456789ABC, "BC9A78563412"); + Verify(0x123456789ABCDE, "DEBC9A78563412"); + Verify(0x123456789ABCDEF0, "F0DEBC9A78563412"); + + Verify(0, "00"); + Verify(1, "01"); + Verify(255, "FF"); + Verify(256, "0001"); + Verify(65535, "FFFF"); + Verify(65536, "000001"); + Verify(16777215, "FFFFFF"); + Verify(16777216, "00000001"); + Verify(int.MaxValue, "FFFFFF7F"); + Verify(int.MinValue, "00000080FFFFFFFF"); + Verify(1L + int.MaxValue, "00000080"); + Verify(long.MaxValue, "FFFFFFFFFFFFFF7F"); + Verify(long.MinValue, "0000000000000080"); + + } + + [Test] + public void Test_Slice_FromFixed64() + { + // FromFixed64 always produce 8 bytes and uses Little Endian + + void Verify(long value, byte[] expected) + { + Assert.That(Slice.FromFixed64(value).GetBytes(), Is.EqualTo(expected), "Invalid encoding for {0}", value); + } + + Verify(0L, new byte[8]); + Verify(1L, new byte[] { 1, 0, 0, 0, 0, 0, 0, 0 }); + Verify(1L << 8, new byte[] { 0, 1, 0, 0, 0, 0, 0, 0 }); + Verify(1L << 16, new byte[] { 0, 0, 1, 0, 0, 0, 0, 0 }); + Verify(1L << 24, new byte[] { 0, 0, 0, 1, 0, 0, 0, 0 }); + Verify(1L << 32, new byte[] { 0, 0, 0, 0, 1, 0, 0, 0 }); + Verify(1L << 40, new byte[] { 0, 0, 0, 0, 0, 1, 0, 0 }); + Verify(1L << 48, new byte[] { 0, 0, 0, 0, 0, 0, 1, 0 }); + Verify(1L << 56, new byte[] { 0, 0, 0, 0, 0, 0, 0, 1 }); + Verify(short.MaxValue, new byte[] { 255, 127, 0, 0, 0, 0, 0, 0 }); + Verify(int.MaxValue, new byte[] { 255, 255, 255, 127, 0, 0, 0, 0 }); + Verify(long.MaxValue, new byte[] { 255, 255, 255, 255, 255, 255, 255, 127 }); + + Verify(-1L, new byte[] { 255, 255, 255, 255, 255, 255, 255, 255 }); + Verify(-256L, new byte[] { 0, 255, 255, 255, 255, 255, 255, 255 }); + Verify(-65536L, new byte[] { 0, 0, 255, 255, 255, 255, 255, 255 }); + Verify(-16777216L, new byte[] { 0, 0, 0, 255, 255, 255, 255, 255 }); + Verify(-4294967296L, new byte[] { 0, 0, 0, 0, 255, 255, 255, 255 }); + Verify(long.MinValue, new byte[] { 0, 0, 0, 0, 0, 0, 0, 128 }); + + var rnd = new Random(); + for (int i = 0; i < 1000; i++) + { + long x = (long)rnd.Next() * rnd.Next() * (rnd.Next(2) == 0 ? +1 : -1); + Slice s = Slice.FromFixed64(x); + Assert.That(s.Count, Is.EqualTo(8)); + Assert.That(s.ToInt64(), Is.EqualTo(x)); + } } [Test] public void Test_Slice_ToInt64() { - Assert.That(Slice.Create(new byte[] { 0x12 }).ToInt64(), Is.EqualTo(0x12)); - Assert.That(Slice.Create(new byte[] { 0x34, 0x12 }).ToInt64(), Is.EqualTo(0x1234)); - Assert.That(Slice.Create(new byte[] { 0x56, 0x34, 0x12 }).ToInt64(), Is.EqualTo(0x123456)); - Assert.That(Slice.Create(new byte[] { 0x56, 0x34, 0x12, 0x00 }).ToInt64(), Is.EqualTo(0x123456)); - Assert.That(Slice.Create(new byte[] { 0x78, 0x56, 0x34, 0x12 }).ToInt64(), Is.EqualTo(0x12345678)); - Assert.That(Slice.Create(new byte[] { 0x9A, 0x78, 0x56, 0x34, 0x12 }).ToInt64(), Is.EqualTo(0x123456789A)); - Assert.That(Slice.Create(new byte[] { 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12 }).ToInt64(), Is.EqualTo(0x123456789ABC)); - Assert.That(Slice.Create(new byte[] { 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12 }).ToInt64(), Is.EqualTo(0x123456789ABCDE)); - Assert.That(Slice.Create(new byte[] { 0xF0, 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12 }).ToInt64(), Is.EqualTo(0x123456789ABCDEF0)); - - Assert.That(Slice.Create(new byte[] { 0 }).ToInt64(), Is.EqualTo(0L)); - Assert.That(Slice.Create(new byte[] { 255 }).ToInt64(), Is.EqualTo(255L)); - Assert.That(Slice.Create(new byte[] { 0, 1 }).ToInt64(), Is.EqualTo(256L)); - Assert.That(Slice.Create(new byte[] { 255, 255 }).ToInt64(), Is.EqualTo(65535L)); - Assert.That(Slice.Create(new byte[] { 0, 0, 1 }).ToInt64(), Is.EqualTo(1L << 16)); - Assert.That(Slice.Create(new byte[] { 0, 0, 1, 0 }).ToInt64(), Is.EqualTo(1L << 16)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255 }).ToInt64(), Is.EqualTo((1L << 24) - 1)); - Assert.That(Slice.Create(new byte[] { 0, 0, 0, 1 }).ToInt64(), Is.EqualTo(1L << 24)); - Assert.That(Slice.Create(new byte[] { 0, 0, 0, 0, 1 }).ToInt64(), Is.EqualTo(1L << 32)); - Assert.That(Slice.Create(new byte[] { 0, 0, 0, 0, 0, 1 }).ToInt64(), Is.EqualTo(1L << 40)); - Assert.That(Slice.Create(new byte[] { 0, 0, 0, 0, 0, 0, 1 }).ToInt64(), Is.EqualTo(1L << 48)); - Assert.That(Slice.Create(new byte[] { 0, 0, 0, 0, 0, 0, 0, 1 }).ToInt64(), Is.EqualTo(1L << 56)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255, 127 }).ToInt64(), Is.EqualTo(int.MaxValue)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255, 255, 255, 255, 255, 127 }).ToInt64(), Is.EqualTo(long.MaxValue)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255, 255, 255, 255, 255, 255 }).ToInt64(), Is.EqualTo(-1L)); + Assert.That(new byte[] { 0x12 }.AsSlice().ToInt64(), Is.EqualTo(0x12)); + Assert.That(new byte[] { 0x34, 0x12 }.AsSlice().ToInt64(), Is.EqualTo(0x1234)); + Assert.That(new byte[] { 0x56, 0x34, 0x12 }.AsSlice().ToInt64(), Is.EqualTo(0x123456)); + Assert.That(new byte[] { 0x56, 0x34, 0x12, 0x00 }.AsSlice().ToInt64(), Is.EqualTo(0x123456)); + Assert.That(new byte[] { 0x78, 0x56, 0x34, 0x12 }.AsSlice().ToInt64(), Is.EqualTo(0x12345678)); + Assert.That(new byte[] { 0x9A, 0x78, 0x56, 0x34, 0x12 }.AsSlice().ToInt64(), Is.EqualTo(0x123456789A)); + Assert.That(new byte[] { 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12 }.AsSlice().ToInt64(), Is.EqualTo(0x123456789ABC)); + Assert.That(new byte[] { 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12 }.AsSlice().ToInt64(), Is.EqualTo(0x123456789ABCDE)); + Assert.That(new byte[] { 0xF0, 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12 }.AsSlice().ToInt64(), Is.EqualTo(0x123456789ABCDEF0)); + + Assert.That(new byte[] { }.AsSlice().ToInt64(), Is.EqualTo(0L)); + Assert.That(new byte[] { 0 }.AsSlice().ToInt64(), Is.EqualTo(0L)); + Assert.That(new byte[] { 255 }.AsSlice().ToInt64(), Is.EqualTo(255L)); + Assert.That(new byte[] { 0, 1 }.AsSlice().ToInt64(), Is.EqualTo(256L)); + Assert.That(new byte[] { 255, 255 }.AsSlice().ToInt64(), Is.EqualTo(65535L)); + Assert.That(new byte[] { 0, 0, 1 }.AsSlice().ToInt64(), Is.EqualTo(1L << 16)); + Assert.That(new byte[] { 0, 0, 1, 0 }.AsSlice().ToInt64(), Is.EqualTo(1L << 16)); + Assert.That(new byte[] { 255, 255, 255 }.AsSlice().ToInt64(), Is.EqualTo((1L << 24) - 1)); + Assert.That(new byte[] { 0, 0, 0, 1 }.AsSlice().ToInt64(), Is.EqualTo(1L << 24)); + Assert.That(new byte[] { 0, 0, 0, 0, 1 }.AsSlice().ToInt64(), Is.EqualTo(1L << 32)); + Assert.That(new byte[] { 0, 0, 0, 0, 0, 1 }.AsSlice().ToInt64(), Is.EqualTo(1L << 40)); + Assert.That(new byte[] { 0, 0, 0, 0, 0, 0, 1 }.AsSlice().ToInt64(), Is.EqualTo(1L << 48)); + Assert.That(new byte[] { 0, 0, 0, 0, 0, 0, 0, 1 }.AsSlice().ToInt64(), Is.EqualTo(1L << 56)); + Assert.That(new byte[] { 255, 255, 255, 127 }.AsSlice().ToInt64(), Is.EqualTo(int.MaxValue)); + Assert.That(new byte[] { 255, 255, 255, 255, 255, 255, 255, 127 }.AsSlice().ToInt64(), Is.EqualTo(long.MaxValue)); + Assert.That(new byte[] { 255, 255, 255, 255, 255, 255, 255, 255 }.AsSlice().ToInt64(), Is.EqualTo(-1L)); // should validate the arguments - var x = Slice.Create(new byte[] { 0xF0, 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12 }); + var x = new byte[] { 0xF0, 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12 }.AsSlice(); Assert.That(() => MutateOffset(x, -1).ToInt64(), Throws.InstanceOf()); Assert.That(() => MutateCount(x, 9).ToInt64(), Throws.InstanceOf()); Assert.That(() => MutateArray(x, null).ToInt64(), Throws.InstanceOf()); } + #endregion + + #region Big-Endian + + [Test] + public void Test_Slice_FromInt64BE() + { + // 64-bit integers should be encoded in little endian, and with 1, 2, 4 or 8 bytes + + void Verify(long value, string expected) + { + Assert.That(Slice.FromInt64BE(value).ToHexaString(), Is.EqualTo(expected), "Invalid encoding for {0}", value); + } + + Verify(0x12, "12"); + Verify(0x1234, "1234"); + Verify(0x123456, "123456"); + Verify(0x12345678, "12345678"); + Verify(0x123456789A, "123456789A"); + Verify(0x123456789ABC, "123456789ABC"); + Verify(0x123456789ABCDE, "123456789ABCDE"); + Verify(0x123456789ABCDEF0, "123456789ABCDEF0"); + + Verify(0, "00"); + Verify(1, "01"); + Verify(127, "7F"); + Verify(128, "80"); + + Verify(1L << 8, "0100"); + Verify(1L << 16, "010000"); + Verify(1L << 24, "01000000"); + Verify(1L << 32, "0100000000"); + Verify(1L << 40, "010000000000"); + Verify(1L << 48, "01000000000000"); + Verify(1L << 56, "0100000000000000"); + + Verify((1L << 8) - 1, "FF"); + Verify((1L << 16) - 1, "FFFF"); + Verify((1L << 24) - 1, "FFFFFF"); + Verify((1L << 32) - 1, "FFFFFFFF"); + Verify((1L << 40) - 1, "FFFFFFFFFF"); + Verify((1L << 48) - 1, "FFFFFFFFFFFF"); + Verify((1L << 56) - 1, "FFFFFFFFFFFFFF"); + Verify(long.MaxValue, "7FFFFFFFFFFFFFFF"); + + Verify(-1, "FFFFFFFFFFFFFFFF"); + Verify(-2, "FFFFFFFFFFFFFFFE"); + Verify(-256, "FFFFFFFFFFFFFF00"); + Verify(-65536, "FFFFFFFFFFFF0000"); + Verify(-16777216, "FFFFFFFFFF000000"); + Verify(int.MinValue, "FFFFFFFF80000000"); + Verify(long.MinValue, "8000000000000000"); + + } + + [Test] + public void Test_Slice_FromFixed64BE() + { + // FromFixed64 always produce 8 bytes and uses Little Endian + + void Verify(long value, byte[] expected) + { + Assert.That(Slice.FromFixed64BE(value).GetBytes(), Is.EqualTo(expected), "Invalid encoding for {0}", value); + } + + Verify(0L, new byte[8]); + Verify(1L, new byte[] { 0, 0, 0, 0, 0, 0, 0, 1 }); + Verify(1L << 8, new byte[] { 0, 0, 0, 0, 0, 0, 1, 0 }); + Verify(1L << 16, new byte[] { 0, 0, 0, 0, 0, 1, 0, 0 }); + Verify(1L << 24, new byte[] { 0, 0, 0, 0, 1, 0, 0, 0 }); + Verify(1L << 32, new byte[] { 0, 0, 0, 1, 0, 0, 0, 0 }); + Verify(1L << 40, new byte[] { 0, 0, 1, 0, 0, 0, 0, 0 }); + Verify(1L << 48, new byte[] { 0, 1, 0, 0, 0, 0, 0, 0 }); + Verify(1L << 56, new byte[] { 1, 0, 0, 0, 0, 0, 0, 0 }); + Verify(short.MaxValue, new byte[] { 0, 0, 0, 0, 0, 0, 127, 255 }); + Verify(int.MaxValue, new byte[] { 0, 0, 0, 0, 127, 255, 255, 255 }); + Verify(long.MaxValue, new byte[] { 127, 255, 255, 255, 255, 255, 255, 255 }); + + Verify(-1L, new byte[] { 255, 255, 255, 255, 255, 255, 255, 255 }); + Verify(-256L, new byte[] { 255, 255, 255, 255, 255, 255, 255, 0 }); + Verify(-65536L, new byte[] { 255, 255, 255, 255, 255, 255, 0, 0 }); + Verify(-16777216L, new byte[] { 255, 255, 255, 255, 255, 0, 0, 0 }); + Verify(-4294967296L, new byte[] { 255, 255, 255, 255, 0, 0, 0, 0 }); + Verify(long.MinValue, new byte[] { 128, 0, 0, 0, 0, 0, 0, 0 }); + + var rnd = new Random(); + for (int i = 0; i < 1000; i++) + { + long x = (long)rnd.Next() * rnd.Next() * (rnd.Next(2) == 0 ? +1 : -1); + Slice s = Slice.FromFixed64BE(x); + Assert.That(s.Count, Is.EqualTo(8)); + Assert.That(s.ToInt64BE(), Is.EqualTo(x)); + } + } + [Test] public void Test_Slice_ToInt64BE() { - Assert.That(Slice.Create(new byte[] { 0x12 }).ToInt64BE(), Is.EqualTo(0x12)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34 }).ToInt64BE(), Is.EqualTo(0x1234)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34, 0x56 }).ToInt64BE(), Is.EqualTo(0x123456)); - Assert.That(Slice.Create(new byte[] { 0x00, 0x12, 0x34, 0x56 }).ToInt64BE(), Is.EqualTo(0x123456)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34, 0x56, 0x78 }).ToInt64BE(), Is.EqualTo(0x12345678)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A }).ToInt64BE(), Is.EqualTo(0x123456789A)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC }).ToInt64BE(), Is.EqualTo(0x123456789ABC)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE }).ToInt64BE(), Is.EqualTo(0x123456789ABCDE)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0 }).ToInt64BE(), Is.EqualTo(0x123456789ABCDEF0)); - - Assert.That(Slice.Create(new byte[] { 0 }).ToInt64BE(), Is.EqualTo(0L)); - Assert.That(Slice.Create(new byte[] { 255 }).ToInt64BE(), Is.EqualTo(255L)); - Assert.That(Slice.Create(new byte[] { 1, 0 }).ToInt64BE(), Is.EqualTo(256L)); - Assert.That(Slice.Create(new byte[] { 255, 255 }).ToInt64BE(), Is.EqualTo(65535L)); - Assert.That(Slice.Create(new byte[] { 1, 0, 0 }).ToInt64BE(), Is.EqualTo(1L << 16)); - Assert.That(Slice.Create(new byte[] { 0, 1, 0, 0 }).ToInt64BE(), Is.EqualTo(1L << 16)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255 }).ToInt64BE(), Is.EqualTo((1L << 24) - 1)); - Assert.That(Slice.Create(new byte[] { 1, 0, 0, 0 }).ToInt64BE(), Is.EqualTo(1L << 24)); - Assert.That(Slice.Create(new byte[] { 1, 0, 0, 0, 0 }).ToInt64BE(), Is.EqualTo(1L << 32)); - Assert.That(Slice.Create(new byte[] { 1, 0, 0, 0, 0, 0 }).ToInt64BE(), Is.EqualTo(1L << 40)); - Assert.That(Slice.Create(new byte[] { 1, 0, 0, 0, 0, 0, 0 }).ToInt64BE(), Is.EqualTo(1L << 48)); - Assert.That(Slice.Create(new byte[] { 1, 0, 0, 0, 0, 0, 0, 0 }).ToInt64BE(), Is.EqualTo(1L << 56)); - Assert.That(Slice.Create(new byte[] { 127, 255, 255, 255 }).ToInt64BE(), Is.EqualTo(int.MaxValue)); - Assert.That(Slice.Create(new byte[] { 127, 255, 255, 255, 255, 255, 255, 255 }).ToInt64BE(), Is.EqualTo(long.MaxValue)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255, 255, 255, 255, 255, 255 }).ToInt64BE(), Is.EqualTo(-1L)); + Assert.That(new byte[] { 0x12 }.AsSlice().ToInt64BE(), Is.EqualTo(0x12)); + Assert.That(new byte[] { 0x12, 0x34 }.AsSlice().ToInt64BE(), Is.EqualTo(0x1234)); + Assert.That(new byte[] { 0x12, 0x34, 0x56 }.AsSlice().ToInt64BE(), Is.EqualTo(0x123456)); + Assert.That(new byte[] { 0x00, 0x12, 0x34, 0x56 }.AsSlice().ToInt64BE(), Is.EqualTo(0x123456)); + Assert.That(new byte[] { 0x12, 0x34, 0x56, 0x78 }.AsSlice().ToInt64BE(), Is.EqualTo(0x12345678)); + Assert.That(new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A }.AsSlice().ToInt64BE(), Is.EqualTo(0x123456789A)); + Assert.That(new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC }.AsSlice().ToInt64BE(), Is.EqualTo(0x123456789ABC)); + Assert.That(new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE }.AsSlice().ToInt64BE(), Is.EqualTo(0x123456789ABCDE)); + Assert.That(new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0 }.AsSlice().ToInt64BE(), Is.EqualTo(0x123456789ABCDEF0)); + + Assert.That(new byte[] { }.AsSlice().ToInt64BE(), Is.EqualTo(0L)); + Assert.That(new byte[] { 0 }.AsSlice().ToInt64BE(), Is.EqualTo(0L)); + Assert.That(new byte[] { 255 }.AsSlice().ToInt64BE(), Is.EqualTo(255L)); + Assert.That(new byte[] { 1, 0 }.AsSlice().ToInt64BE(), Is.EqualTo(256L)); + Assert.That(new byte[] { 255, 255 }.AsSlice().ToInt64BE(), Is.EqualTo(65535L)); + Assert.That(new byte[] { 1, 0, 0 }.AsSlice().ToInt64BE(), Is.EqualTo(1L << 16)); + Assert.That(new byte[] { 0, 1, 0, 0 }.AsSlice().ToInt64BE(), Is.EqualTo(1L << 16)); + Assert.That(new byte[] { 255, 255, 255 }.AsSlice().ToInt64BE(), Is.EqualTo((1L << 24) - 1)); + Assert.That(new byte[] { 1, 0, 0, 0 }.AsSlice().ToInt64BE(), Is.EqualTo(1L << 24)); + Assert.That(new byte[] { 1, 0, 0, 0, 0 }.AsSlice().ToInt64BE(), Is.EqualTo(1L << 32)); + Assert.That(new byte[] { 1, 0, 0, 0, 0, 0 }.AsSlice().ToInt64BE(), Is.EqualTo(1L << 40)); + Assert.That(new byte[] { 1, 0, 0, 0, 0, 0, 0 }.AsSlice().ToInt64BE(), Is.EqualTo(1L << 48)); + Assert.That(new byte[] { 1, 0, 0, 0, 0, 0, 0, 0 }.AsSlice().ToInt64BE(), Is.EqualTo(1L << 56)); + Assert.That(new byte[] { 127, 255, 255, 255 }.AsSlice().ToInt64BE(), Is.EqualTo(int.MaxValue)); + Assert.That(new byte[] { 127, 255, 255, 255, 255, 255, 255, 255 }.AsSlice().ToInt64BE(), Is.EqualTo(long.MaxValue)); + Assert.That(new byte[] { 255, 255, 255, 255, 255, 255, 255, 255 }.AsSlice().ToInt64BE(), Is.EqualTo(-1L)); // should validate the arguments - var x = Slice.Create(new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0 }); + var x = new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0 }.AsSlice(); Assert.That(() => MutateOffset(x, -1).ToInt64BE(), Throws.InstanceOf()); Assert.That(() => MutateCount(x, 9).ToInt64BE(), Throws.InstanceOf()); Assert.That(() => MutateArray(x, null).ToInt64BE(), Throws.InstanceOf()); } + #endregion + + #endregion + + #endregion + + #region Unsigned... + + #region 32-bits + + #region Little-Endian + [Test] public void Test_Slice_FromUInt32() { // 32-bit integers should be encoded in little endian, and with 1, 2 or 4 bytes - // 0x12 -> { 12 } - // 0x1234 -> { 34 12 } - // 0x123456 -> { 56 34 12 00 } - // 0x12345678 -> { 78 56 34 12 } - Assert.That(Slice.FromUInt32(0x12).ToHexaString(), Is.EqualTo("12")); - Assert.That(Slice.FromUInt32(0x1234).ToHexaString(), Is.EqualTo("3412")); - Assert.That(Slice.FromUInt32(0x123456).ToHexaString(), Is.EqualTo("56341200")); - Assert.That(Slice.FromUInt32(0x12345678).ToHexaString(), Is.EqualTo("78563412")); + void Verify(uint value, string expected) + { + Assert.That(Slice.FromUInt32(value).ToHexaString(), Is.EqualTo(expected), "Invalid encoding for {0}", value); + } - Assert.That(Slice.FromUInt32(0).ToHexaString(), Is.EqualTo("00")); - Assert.That(Slice.FromUInt32(1).ToHexaString(), Is.EqualTo("01")); - Assert.That(Slice.FromUInt32(255).ToHexaString(), Is.EqualTo("ff")); - Assert.That(Slice.FromUInt32(256).ToHexaString(), Is.EqualTo("0001")); - Assert.That(Slice.FromUInt32(65535).ToHexaString(), Is.EqualTo("ffff")); - Assert.That(Slice.FromUInt32(65536).ToHexaString(), Is.EqualTo("00000100")); - Assert.That(Slice.FromUInt32(int.MaxValue).ToHexaString(), Is.EqualTo("ffffff7f")); - Assert.That(Slice.FromUInt32(uint.MaxValue).ToHexaString(), Is.EqualTo("ffffffff")); + Verify(0x12, "12"); + Verify(0x1234, "3412"); + Verify(0x123456, "563412"); + Verify(0x12345678, "78563412"); + + Verify(0, "00"); + Verify(1, "01"); + Verify(255, "FF"); + Verify(256, "0001"); + Verify(65535, "FFFF"); + Verify(65536, "000001"); + Verify(int.MaxValue, "FFFFFF7F"); + Verify(uint.MaxValue, "FFFFFFFF"); + } + + [Test] + public void Test_Slice_FromFixedU32() + { + // FromFixed32 always produce 4 bytes and uses Little Endian + + void Verify(uint value, byte[] expected) + { + Assert.That(Slice.FromFixedU32(value).GetBytes(), Is.EqualTo(expected), "Invalid encoding for {0}", value); + } + + Verify(0, new byte[4]); + Verify(1, new byte[] { 1, 0, 0, 0 }); + Verify(256, new byte[] { 0, 1, 0, 0 }); + Verify(ushort.MaxValue, new byte[] { 255, 255, 0, 0 }); + Verify(65536, new byte[] { 0, 0, 1, 0 }); + Verify(16777216, new byte[] { 0, 0, 0, 1 }); + Verify(int.MaxValue, new byte[] { 255, 255, 255, 127 }); + Verify(uint.MaxValue, new byte[] { 255, 255, 255, 255 }); + + var rnd = new Random(); + for (int i = 0; i < 1000; i++) + { + uint x = (uint)rnd.Next() + (uint)rnd.Next(); + Slice s = Slice.FromFixedU32(x); + Assert.That(s.Count, Is.EqualTo(4)); + Assert.That(s.ToUInt32(), Is.EqualTo(x)); + } } [Test] public void Test_Slice_ToUInt32() { - Assert.That(Slice.Create(new byte[] { 0x12 }).ToUInt32(), Is.EqualTo(0x12U)); - Assert.That(Slice.Create(new byte[] { 0x34, 0x12 }).ToUInt32(), Is.EqualTo(0x1234U)); - Assert.That(Slice.Create(new byte[] { 0x56, 0x34, 0x12 }).ToUInt32(), Is.EqualTo(0x123456U)); - Assert.That(Slice.Create(new byte[] { 0x56, 0x34, 0x12, 0x00 }).ToUInt32(), Is.EqualTo(0x123456U)); - Assert.That(Slice.Create(new byte[] { 0x78, 0x56, 0x34, 0x12 }).ToUInt32(), Is.EqualTo(0x12345678U)); + Assert.That(new byte[] { 0x12 }.AsSlice().ToUInt32(), Is.EqualTo(0x12U)); + Assert.That(new byte[] { 0x34, 0x12 }.AsSlice().ToUInt32(), Is.EqualTo(0x1234U)); + Assert.That(new byte[] { 0x56, 0x34, 0x12 }.AsSlice().ToUInt32(), Is.EqualTo(0x123456U)); + Assert.That(new byte[] { 0x56, 0x34, 0x12, 0x00 }.AsSlice().ToUInt32(), Is.EqualTo(0x123456U)); + Assert.That(new byte[] { 0x78, 0x56, 0x34, 0x12 }.AsSlice().ToUInt32(), Is.EqualTo(0x12345678U)); + + Assert.That(new byte[] { }.AsSlice().ToUInt32(), Is.EqualTo(0U)); + Assert.That(new byte[] { 0 }.AsSlice().ToUInt32(), Is.EqualTo(0U)); + Assert.That(new byte[] { 255 }.AsSlice().ToUInt32(), Is.EqualTo(255U)); + Assert.That(new byte[] { 0, 1 }.AsSlice().ToUInt32(), Is.EqualTo(256U)); + Assert.That(new byte[] { 255, 255 }.AsSlice().ToUInt32(), Is.EqualTo(65535U)); + Assert.That(new byte[] { 0, 0, 1 }.AsSlice().ToUInt32(), Is.EqualTo(1U << 16)); + Assert.That(new byte[] { 0, 0, 1, 0 }.AsSlice().ToUInt32(), Is.EqualTo(1U << 16)); + Assert.That(new byte[] { 255, 255, 255 }.AsSlice().ToUInt32(), Is.EqualTo((1U << 24) - 1U)); + Assert.That(new byte[] { 0, 0, 0, 1 }.AsSlice().ToUInt32(), Is.EqualTo(1U << 24)); + Assert.That(new byte[] { 255, 255, 255, 127 }.AsSlice().ToUInt32(), Is.EqualTo((uint)int.MaxValue)); + Assert.That(new byte[] { 255, 255, 255, 255 }.AsSlice().ToUInt32(), Is.EqualTo(uint.MaxValue)); + + Assert.That(() => Slice.Create(5).ToUInt32(), Throws.InstanceOf()); + } + + #endregion + + #region Big-Endian - Assert.That(Slice.Create(new byte[] { 0 }).ToUInt32(), Is.EqualTo(0U)); - Assert.That(Slice.Create(new byte[] { 255 }).ToUInt32(), Is.EqualTo(255U)); - Assert.That(Slice.Create(new byte[] { 0, 1 }).ToUInt32(), Is.EqualTo(256U)); - Assert.That(Slice.Create(new byte[] { 255, 255 }).ToUInt32(), Is.EqualTo(65535U)); - Assert.That(Slice.Create(new byte[] { 0, 0, 1 }).ToUInt32(), Is.EqualTo(1U << 16)); - Assert.That(Slice.Create(new byte[] { 0, 0, 1, 0 }).ToUInt32(), Is.EqualTo(1U << 16)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255 }).ToUInt32(), Is.EqualTo((1U << 24) - 1U)); - Assert.That(Slice.Create(new byte[] { 0, 0, 0, 1 }).ToUInt32(), Is.EqualTo(1U << 24)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255, 127 }).ToUInt32(), Is.EqualTo((uint)int.MaxValue)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255, 255 }).ToUInt32(), Is.EqualTo(uint.MaxValue)); + [Test] + public void Test_Slice_FromUInt32BE() + { + // 32-bit integers should be encoded in big endian, and with 1, 2 or 4 bytes + + void Verify(uint value, string expected) + { + Assert.That(Slice.FromUInt32BE(value).ToHexaString(), Is.EqualTo(expected), "Invalid encoding for {0}", value); + } + + Verify(0x12, "12"); + Verify(0x1234, "1234"); + Verify(0x123456, "123456"); + Verify(0x12345678, "12345678"); + + Verify(0, "00"); + Verify(1, "01"); + Verify(255, "FF"); + Verify(256, "0100"); + Verify(65535, "FFFF"); + Verify(65536, "010000"); + Verify(int.MaxValue, "7FFFFFFF"); + Verify(uint.MaxValue, "FFFFFFFF"); } [Test] - public void Test_Slice_ToUInt32BE() + public void Test_Slice_FromFixedU32BE() { - Assert.That(Slice.Create(new byte[] { 0x12 }).ToUInt32BE(), Is.EqualTo(0x12U)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34 }).ToUInt32BE(), Is.EqualTo(0x1234U)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34, 0x56 }).ToUInt32BE(), Is.EqualTo(0x123456U)); - Assert.That(Slice.Create(new byte[] { 0x00, 0x12, 0x34, 0x56 }).ToUInt32BE(), Is.EqualTo(0x123456U)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34, 0x56, 0x78 }).ToUInt32BE(), Is.EqualTo(0x12345678U)); + // FromFixedU32BE always produce 4 bytes and uses Big Endian - Assert.That(Slice.Create(new byte[] { 0 }).ToUInt32BE(), Is.EqualTo(0U)); - Assert.That(Slice.Create(new byte[] { 255 }).ToUInt32BE(), Is.EqualTo(255U)); - Assert.That(Slice.Create(new byte[] { 1, 0 }).ToUInt32BE(), Is.EqualTo(256U)); - Assert.That(Slice.Create(new byte[] { 255, 255 }).ToUInt32BE(), Is.EqualTo(65535U)); - Assert.That(Slice.Create(new byte[] { 1, 0, 0 }).ToUInt32BE(), Is.EqualTo(1U << 16)); - Assert.That(Slice.Create(new byte[] { 0, 1, 0, 0 }).ToUInt32BE(), Is.EqualTo(1U << 16)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255 }).ToUInt32BE(), Is.EqualTo((1U << 24) - 1U)); - Assert.That(Slice.Create(new byte[] { 1, 0, 0, 0 }).ToUInt32BE(), Is.EqualTo(1U << 24)); - Assert.That(Slice.Create(new byte[] { 127, 255, 255, 255 }).ToUInt32BE(), Is.EqualTo((uint)int.MaxValue)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255, 255 }).ToUInt32BE(), Is.EqualTo(uint.MaxValue)); + void Verify(uint value, byte[] expected) + { + Assert.That(Slice.FromFixedU32BE(value).GetBytes(), Is.EqualTo(expected), "Invalid encoding for {0}", value); + } + + Verify(0, new byte[4]); + Verify(1, new byte[] { 0, 0, 0, 1 }); + Verify(256, new byte[] { 0, 0, 1, 0 }); + Verify(ushort.MaxValue, new byte[] { 0, 0, 255, 255 }); + Verify(65536, new byte[] { 0, 1, 0, 0 }); + Verify(16777216, new byte[] { 1, 0, 0, 0 }); + Verify(int.MaxValue, new byte[] { 127, 255, 255, 255 }); + Verify(uint.MaxValue, new byte[] { 255, 255, 255, 255 }); + + var rnd = new Random(); + for (int i = 0; i < 1000; i++) + { + uint x = (uint)rnd.Next() + (uint)rnd.Next(); + Slice s = Slice.FromFixedU32BE(x); + Assert.That(s.Count, Is.EqualTo(4)); + Assert.That(s.ToUInt32BE(), Is.EqualTo(x)); + } } + [Test] + public void Test_Slice_ToUInt32BE() + { + Assert.That(new byte[] { 0x12 }.AsSlice().ToUInt32BE(), Is.EqualTo(0x12U)); + Assert.That(new byte[] { 0x12, 0x34 }.AsSlice().ToUInt32BE(), Is.EqualTo(0x1234U)); + Assert.That(new byte[] { 0x12, 0x34, 0x56 }.AsSlice().ToUInt32BE(), Is.EqualTo(0x123456U)); + Assert.That(new byte[] { 0x00, 0x12, 0x34, 0x56 }.AsSlice().ToUInt32BE(), Is.EqualTo(0x123456U)); + Assert.That(new byte[] { 0x12, 0x34, 0x56, 0x78 }.AsSlice().ToUInt32BE(), Is.EqualTo(0x12345678U)); + + Assert.That(new byte[] { }.AsSlice().ToUInt32BE(), Is.EqualTo(0U)); + Assert.That(new byte[] { 0 }.AsSlice().ToUInt32BE(), Is.EqualTo(0U)); + Assert.That(new byte[] { 255 }.AsSlice().ToUInt32BE(), Is.EqualTo(255U)); + Assert.That(new byte[] { 1, 0 }.AsSlice().ToUInt32BE(), Is.EqualTo(256U)); + Assert.That(new byte[] { 255, 255 }.AsSlice().ToUInt32BE(), Is.EqualTo(65535U)); + Assert.That(new byte[] { 1, 0, 0 }.AsSlice().ToUInt32BE(), Is.EqualTo(1U << 16)); + Assert.That(new byte[] { 0, 1, 0, 0 }.AsSlice().ToUInt32BE(), Is.EqualTo(1U << 16)); + Assert.That(new byte[] { 255, 255, 255 }.AsSlice().ToUInt32BE(), Is.EqualTo((1U << 24) - 1U)); + Assert.That(new byte[] { 1, 0, 0, 0 }.AsSlice().ToUInt32BE(), Is.EqualTo(1U << 24)); + Assert.That(new byte[] { 127, 255, 255, 255 }.AsSlice().ToUInt32BE(), Is.EqualTo((uint)int.MaxValue)); + Assert.That(new byte[] { 255, 255, 255, 255 }.AsSlice().ToUInt32BE(), Is.EqualTo(uint.MaxValue)); + + Assert.That(() => Slice.Create(5).ToUInt32BE(), Throws.InstanceOf()); + } + + #endregion + + #endregion + + #region 64-bits + [Test] public void Test_Slice_FromUInt64() { // 64-bit integers should be encoded in little endian, and with 1, 2, 4 or 8 bytes - // 0x12 -> { 12 } - // 0x1234 -> { 34 12 } - // 0x123456 -> { 56 34 12 00 } - // 0x12345678 -> { 78 56 34 12 } - // 0x123456789A -> { 9A 78 56 34 12 00 00 00} - // 0x123456789ABC -> { BC 9A 78 56 34 12 00 00} - // 0x123456789ABCDE -> { DE BC 9A 78 56 34 12 00} - // 0x123456789ABCDEF0 -> { F0 DE BC 9A 78 56 34 12 } - - Assert.That(Slice.FromUInt64(0x12UL).ToHexaString(), Is.EqualTo("12")); - Assert.That(Slice.FromUInt64(0x1234UL).ToHexaString(), Is.EqualTo("3412")); - Assert.That(Slice.FromUInt64(0x123456UL).ToHexaString(), Is.EqualTo("56341200")); - Assert.That(Slice.FromUInt64(0x12345678UL).ToHexaString(), Is.EqualTo("78563412")); - Assert.That(Slice.FromUInt64(0x123456789AUL).ToHexaString(), Is.EqualTo("9a78563412000000")); - Assert.That(Slice.FromUInt64(0x123456789ABCUL).ToHexaString(), Is.EqualTo("bc9a785634120000")); - Assert.That(Slice.FromUInt64(0x123456789ABCDEUL).ToHexaString(), Is.EqualTo("debc9a7856341200")); - Assert.That(Slice.FromUInt64(0x123456789ABCDEF0UL).ToHexaString(), Is.EqualTo("f0debc9a78563412")); - - Assert.That(Slice.FromUInt64(0UL).ToHexaString(), Is.EqualTo("00")); - Assert.That(Slice.FromUInt64(1UL).ToHexaString(), Is.EqualTo("01")); - Assert.That(Slice.FromUInt64(255UL).ToHexaString(), Is.EqualTo("ff")); - Assert.That(Slice.FromUInt64(256UL).ToHexaString(), Is.EqualTo("0001")); - Assert.That(Slice.FromUInt64(ushort.MaxValue).ToHexaString(), Is.EqualTo("ffff")); - Assert.That(Slice.FromUInt64(65536UL).ToHexaString(), Is.EqualTo("00000100")); - Assert.That(Slice.FromUInt64(int.MaxValue).ToHexaString(), Is.EqualTo("ffffff7f")); - Assert.That(Slice.FromUInt64(uint.MaxValue).ToHexaString(), Is.EqualTo("ffffffff")); - Assert.That(Slice.FromUInt64(long.MaxValue).ToHexaString(), Is.EqualTo("ffffffffffffff7f")); - Assert.That(Slice.FromUInt64(ulong.MaxValue).ToHexaString(), Is.EqualTo("ffffffffffffffff")); + void Verify(ulong value, string expected) + { + Assert.That(Slice.FromUInt64(value).ToHexaString(), Is.EqualTo(expected), "Invalid encoding for {0}", value); + } + + Verify(0x12UL, "12"); + Verify(0x1234UL, "3412"); + Verify(0x123456UL, "563412"); + Verify(0x12345678UL, "78563412"); + Verify(0x123456789AUL, "9A78563412"); + Verify(0x123456789ABCUL, "BC9A78563412"); + Verify(0x123456789ABCDEUL, "DEBC9A78563412"); + Verify(0x123456789ABCDEF0UL, "F0DEBC9A78563412"); + + Verify(0UL, "00"); + Verify(1UL, "01"); + Verify(255UL, "FF"); + Verify(256UL, "0001"); + Verify(ushort.MaxValue, "FFFF"); + Verify(65536UL, "000001"); + Verify(16777215UL, "FFFFFF"); + Verify(16777216UL, "00000001"); + Verify(int.MaxValue, "FFFFFF7F"); + Verify(16777216UL, "00000001"); + Verify(uint.MaxValue + 1UL, "0000000001"); + Verify(long.MaxValue, "FFFFFFFFFFFFFF7F"); + Verify(ulong.MaxValue, "FFFFFFFFFFFFFFFF"); + + } + + [Test] + public void Test_Slice_FromFixedU64() + { + // FromFixed64 always produce 8 bytes and uses Little Endian + + void Verify(ulong value, byte[] expected) + { + Assert.That(Slice.FromFixedU64(value).GetBytes(), Is.EqualTo(expected), "Invalid encoding for {0}", value); + } + + Verify(0UL, new byte[8]); + Verify(1UL, new byte[] { 1, 0, 0, 0, 0, 0, 0, 0 }); + Verify(1UL << 8, new byte[] { 0, 1, 0, 0, 0, 0, 0, 0 }); + Verify(1UL << 16, new byte[] { 0, 0, 1, 0, 0, 0, 0, 0 }); + Verify(1UL << 24, new byte[] { 0, 0, 0, 1, 0, 0, 0, 0 }); + Verify(1UL << 32, new byte[] { 0, 0, 0, 0, 1, 0, 0, 0 }); + Verify(1UL << 40, new byte[] { 0, 0, 0, 0, 0, 1, 0, 0 }); + Verify(1UL << 48, new byte[] { 0, 0, 0, 0, 0, 0, 1, 0 }); + Verify(1UL << 56, new byte[] { 0, 0, 0, 0, 0, 0, 0, 1 }); + Verify(ushort.MaxValue, new byte[] { 255, 255, 0, 0, 0, 0, 0, 0 }); + Verify(int.MaxValue, new byte[] { 255, 255, 255, 127, 0, 0, 0, 0 }); + Verify(uint.MaxValue, new byte[] { 255, 255, 255, 255, 0, 0, 0, 0 }); + Verify(long.MaxValue, new byte[] { 255, 255, 255, 255, 255, 255, 255, 127 }); + Verify(ulong.MaxValue, new byte[] { 255, 255, 255, 255, 255, 255, 255, 255 }); + + var rnd = new Random(); + for (int i = 0; i < 1000; i++) + { + ulong x = (ulong)rnd.Next() * (ulong)rnd.Next(); + Slice s = Slice.FromFixedU64(x); + Assert.That(s.Count, Is.EqualTo(8)); + Assert.That(s.ToUInt64(), Is.EqualTo(x)); + } } [Test] public void Test_Slice_ToUInt64() { - Assert.That(Slice.Create(new byte[] { 0x12 }).ToUInt64(), Is.EqualTo(0x12)); - Assert.That(Slice.Create(new byte[] { 0x34, 0x12 }).ToUInt64(), Is.EqualTo(0x1234)); - Assert.That(Slice.Create(new byte[] { 0x56, 0x34, 0x12 }).ToUInt64(), Is.EqualTo(0x123456)); - Assert.That(Slice.Create(new byte[] { 0x56, 0x34, 0x12, 00 }).ToUInt64(), Is.EqualTo(0x123456)); - Assert.That(Slice.Create(new byte[] { 0x78, 0x56, 0x34, 0x12 }).ToUInt64(), Is.EqualTo(0x12345678)); - Assert.That(Slice.Create(new byte[] { 0x9A, 0x78, 0x56, 0x34, 0x12 }).ToUInt64(), Is.EqualTo(0x123456789A)); - Assert.That(Slice.Create(new byte[] { 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12 }).ToUInt64(), Is.EqualTo(0x123456789ABC)); - Assert.That(Slice.Create(new byte[] { 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12 }).ToUInt64(), Is.EqualTo(0x123456789ABCDE)); - Assert.That(Slice.Create(new byte[] { 0xF0, 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12 }).ToUInt64(), Is.EqualTo(0x123456789ABCDEF0)); - - Assert.That(Slice.Create(new byte[] { 0 }).ToUInt64(), Is.EqualTo(0UL)); - Assert.That(Slice.Create(new byte[] { 255 }).ToUInt64(), Is.EqualTo(255UL)); - Assert.That(Slice.Create(new byte[] { 0, 1 }).ToUInt64(), Is.EqualTo(256UL)); - Assert.That(Slice.Create(new byte[] { 255, 255 }).ToUInt64(), Is.EqualTo(65535UL)); - Assert.That(Slice.Create(new byte[] { 0, 0, 1 }).ToUInt64(), Is.EqualTo(1UL << 16)); - Assert.That(Slice.Create(new byte[] { 0, 0, 1, 0 }).ToUInt64(), Is.EqualTo(1UL << 16)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255 }).ToUInt64(), Is.EqualTo((1UL << 24) - 1)); - Assert.That(Slice.Create(new byte[] { 0, 0, 0, 1 }).ToUInt64(), Is.EqualTo(1UL << 24)); - Assert.That(Slice.Create(new byte[] { 0, 0, 0, 0, 1 }).ToUInt64(), Is.EqualTo(1UL << 32)); - Assert.That(Slice.Create(new byte[] { 0, 0, 0, 0, 0, 1 }).ToUInt64(), Is.EqualTo(1UL << 40)); - Assert.That(Slice.Create(new byte[] { 0, 0, 0, 0, 0, 0, 1 }).ToUInt64(), Is.EqualTo(1UL << 48)); - Assert.That(Slice.Create(new byte[] { 0, 0, 0, 0, 0, 0, 0, 1 }).ToUInt64(), Is.EqualTo(1UL << 56)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255, 127 }).ToUInt64(), Is.EqualTo(int.MaxValue)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255, 255 }).ToUInt64(), Is.EqualTo(uint.MaxValue)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255, 255, 255, 255, 255, 127 }).ToUInt64(), Is.EqualTo(long.MaxValue)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255, 255, 255, 255, 255, 255 }).ToUInt64(), Is.EqualTo(ulong.MaxValue)); + Assert.That(new byte[] { 0x12 }.AsSlice().ToUInt64(), Is.EqualTo(0x12)); + Assert.That(new byte[] { 0x34, 0x12 }.AsSlice().ToUInt64(), Is.EqualTo(0x1234)); + Assert.That(new byte[] { 0x56, 0x34, 0x12 }.AsSlice().ToUInt64(), Is.EqualTo(0x123456)); + Assert.That(new byte[] { 0x56, 0x34, 0x12, 00 }.AsSlice().ToUInt64(), Is.EqualTo(0x123456)); + Assert.That(new byte[] { 0x78, 0x56, 0x34, 0x12 }.AsSlice().ToUInt64(), Is.EqualTo(0x12345678)); + Assert.That(new byte[] { 0x9A, 0x78, 0x56, 0x34, 0x12 }.AsSlice().ToUInt64(), Is.EqualTo(0x123456789A)); + Assert.That(new byte[] { 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12 }.AsSlice().ToUInt64(), Is.EqualTo(0x123456789ABC)); + Assert.That(new byte[] { 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12 }.AsSlice().ToUInt64(), Is.EqualTo(0x123456789ABCDE)); + Assert.That(new byte[] { 0xF0, 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12 }.AsSlice().ToUInt64(), Is.EqualTo(0x123456789ABCDEF0)); + + Assert.That(new byte[] { }.AsSlice().ToUInt64(), Is.EqualTo(0UL)); + Assert.That(new byte[] { 0 }.AsSlice().ToUInt64(), Is.EqualTo(0UL)); + Assert.That(new byte[] { 255 }.AsSlice().ToUInt64(), Is.EqualTo(255UL)); + Assert.That(new byte[] { 0, 1 }.AsSlice().ToUInt64(), Is.EqualTo(256UL)); + Assert.That(new byte[] { 255, 255 }.AsSlice().ToUInt64(), Is.EqualTo(65535UL)); + Assert.That(new byte[] { 0, 0, 1 }.AsSlice().ToUInt64(), Is.EqualTo(1UL << 16)); + Assert.That(new byte[] { 0, 0, 1, 0 }.AsSlice().ToUInt64(), Is.EqualTo(1UL << 16)); + Assert.That(new byte[] { 255, 255, 255 }.AsSlice().ToUInt64(), Is.EqualTo((1UL << 24) - 1)); + Assert.That(new byte[] { 0, 0, 0, 1 }.AsSlice().ToUInt64(), Is.EqualTo(1UL << 24)); + Assert.That(new byte[] { 0, 0, 0, 0, 1 }.AsSlice().ToUInt64(), Is.EqualTo(1UL << 32)); + Assert.That(new byte[] { 0, 0, 0, 0, 0, 1 }.AsSlice().ToUInt64(), Is.EqualTo(1UL << 40)); + Assert.That(new byte[] { 0, 0, 0, 0, 0, 0, 1 }.AsSlice().ToUInt64(), Is.EqualTo(1UL << 48)); + Assert.That(new byte[] { 0, 0, 0, 0, 0, 0, 0, 1 }.AsSlice().ToUInt64(), Is.EqualTo(1UL << 56)); + Assert.That(new byte[] { 255, 255, 255, 127 }.AsSlice().ToUInt64(), Is.EqualTo(int.MaxValue)); + Assert.That(new byte[] { 255, 255, 255, 255 }.AsSlice().ToUInt64(), Is.EqualTo(uint.MaxValue)); + Assert.That(new byte[] { 255, 255, 255, 255, 255, 255, 255, 127 }.AsSlice().ToUInt64(), Is.EqualTo(long.MaxValue)); + Assert.That(new byte[] { 255, 255, 255, 255, 255, 255, 255, 255 }.AsSlice().ToUInt64(), Is.EqualTo(ulong.MaxValue)); // should validate the arguments - var x = Slice.Create(new byte[] { 0x78, 0x56, 0x34, 0x12 }); + var x = new byte[] { 0x78, 0x56, 0x34, 0x12 }.AsSlice(); Assert.That(() => MutateOffset(x, -1).ToUInt64(), Throws.InstanceOf()); Assert.That(() => MutateCount(x, 5).ToUInt64(), Throws.InstanceOf()); - Assert.That(() => MutateArray(x, null).ToUInt64(), Throws.InstanceOf()); + Assert.That(() => MutateArray(x, null).ToUInt64(), Throws.InstanceOf()); + } + + [Test] + public void Test_Slice_FromUInt64BE() + { + // 64-bit integers should be encoded in big endian, and using from 1 to 8 bytes + + void Verify(ulong value, string expected) + { + Assert.That(Slice.FromUInt64BE(value).ToHexaString(), Is.EqualTo(expected), "Invalid encoding for {0}", value); + } + + Verify(0x12UL, "12"); + Verify(0x1234UL, "1234"); + Verify(0x123456UL, "123456"); + Verify(0x12345678UL, "12345678"); + Verify(0x123456789AUL, "123456789A"); + Verify(0x123456789ABCUL, "123456789ABC"); + Verify(0x123456789ABCDEUL, "123456789ABCDE"); + Verify(0x123456789ABCDEF0UL, "123456789ABCDEF0"); + + Verify(0UL, "00"); + Verify(1UL, "01"); + Verify(255UL, "FF"); + Verify(256UL, "0100"); + Verify(ushort.MaxValue, "FFFF"); + Verify(65536UL, "010000"); + Verify(16777215UL, "FFFFFF"); + Verify(16777216UL, "01000000"); + Verify(int.MaxValue, "7FFFFFFF"); + Verify(16777216UL, "01000000"); + Verify(uint.MaxValue + 1UL, "0100000000"); + Verify(long.MaxValue, "7FFFFFFFFFFFFFFF"); + Verify(ulong.MaxValue, "FFFFFFFFFFFFFFFF"); + + } + + [Test] + public void Test_Slice_FromFixedU64BE() + { + // FromFixed64 always produce 8 bytes and uses Big Endian + + void Verify(ulong value, byte[] expected) + { + Assert.That(Slice.FromFixedU64BE(value).GetBytes(), Is.EqualTo(expected), "Invalid encoding for {0}", value); + } + + Verify(0UL, new byte[8]); + Verify(1L, new byte[] { 0, 0, 0, 0, 0, 0, 0, 1 }); + Verify(1L << 8, new byte[] { 0, 0, 0, 0, 0, 0, 1, 0 }); + Verify(1L << 16, new byte[] { 0, 0, 0, 0, 0, 1, 0, 0 }); + Verify(1L << 24, new byte[] { 0, 0, 0, 0, 1, 0, 0, 0 }); + Verify(1L << 32, new byte[] { 0, 0, 0, 1, 0, 0, 0, 0 }); + Verify(1L << 40, new byte[] { 0, 0, 1, 0, 0, 0, 0, 0 }); + Verify(1L << 48, new byte[] { 0, 1, 0, 0, 0, 0, 0, 0 }); + Verify(1L << 56, new byte[] { 1, 0, 0, 0, 0, 0, 0, 0 }); + Verify(ushort.MaxValue, new byte[] { 0, 0, 0, 0, 0, 0, 255, 255 }); + Verify(int.MaxValue, new byte[] { 0, 0, 0, 0, 127, 255, 255, 255 }); + Verify(uint.MaxValue, new byte[] { 0, 0, 0, 0, 255, 255, 255, 255 }); + Verify(long.MaxValue, new byte[] { 127, 255, 255, 255, 255, 255, 255, 255 }); + Verify(ulong.MaxValue, new byte[] { 255, 255, 255, 255, 255, 255, 255, 255 }); + + var rnd = new Random(); + for (int i = 0; i < 1000; i++) + { + ulong x = (ulong)rnd.Next() * (ulong)rnd.Next(); + Slice s = Slice.FromFixedU64BE(x); + Assert.That(s.Count, Is.EqualTo(8)); + Assert.That(s.ToUInt64BE(), Is.EqualTo(x)); + } } [Test] public void Test_Slice_ToUInt64BE() { - Assert.That(Slice.Create(new byte[] { 0x12 }).ToUInt64BE(), Is.EqualTo(0x12)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34 }).ToUInt64BE(), Is.EqualTo(0x1234)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34, 0x56 }).ToUInt64BE(), Is.EqualTo(0x123456)); - Assert.That(Slice.Create(new byte[] { 0x00, 0x12, 0x34, 0x56 }).ToUInt64BE(), Is.EqualTo(0x123456)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34, 0x56, 0x78 }).ToUInt64BE(), Is.EqualTo(0x12345678)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A }).ToUInt64BE(), Is.EqualTo(0x123456789A)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC }).ToUInt64BE(), Is.EqualTo(0x123456789ABC)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE }).ToUInt64BE(), Is.EqualTo(0x123456789ABCDE)); - Assert.That(Slice.Create(new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0 }).ToUInt64BE(), Is.EqualTo(0x123456789ABCDEF0)); - - Assert.That(Slice.Create(new byte[] { 0 }).ToUInt64BE(), Is.EqualTo(0L)); - Assert.That(Slice.Create(new byte[] { 255 }).ToUInt64BE(), Is.EqualTo(255L)); - Assert.That(Slice.Create(new byte[] { 1, 0 }).ToUInt64BE(), Is.EqualTo(256L)); - Assert.That(Slice.Create(new byte[] { 255, 255 }).ToUInt64BE(), Is.EqualTo(65535L)); - Assert.That(Slice.Create(new byte[] { 1, 0, 0 }).ToUInt64BE(), Is.EqualTo(1L << 16)); - Assert.That(Slice.Create(new byte[] { 0, 1, 0, 0 }).ToUInt64BE(), Is.EqualTo(1L << 16)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255 }).ToUInt64BE(), Is.EqualTo((1L << 24) - 1)); - Assert.That(Slice.Create(new byte[] { 1, 0, 0, 0 }).ToUInt64BE(), Is.EqualTo(1L << 24)); - Assert.That(Slice.Create(new byte[] { 1, 0, 0, 0, 0 }).ToUInt64BE(), Is.EqualTo(1L << 32)); - Assert.That(Slice.Create(new byte[] { 1, 0, 0, 0, 0, 0 }).ToUInt64BE(), Is.EqualTo(1L << 40)); - Assert.That(Slice.Create(new byte[] { 1, 0, 0, 0, 0, 0, 0 }).ToUInt64BE(), Is.EqualTo(1L << 48)); - Assert.That(Slice.Create(new byte[] { 1, 0, 0, 0, 0, 0, 0, 0 }).ToUInt64BE(), Is.EqualTo(1L << 56)); - Assert.That(Slice.Create(new byte[] { 127, 255, 255, 255 }).ToUInt64BE(), Is.EqualTo(int.MaxValue)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255, 255 }).ToUInt64BE(), Is.EqualTo(uint.MaxValue)); - Assert.That(Slice.Create(new byte[] { 127, 255, 255, 255, 255, 255, 255, 255 }).ToUInt64BE(), Is.EqualTo(long.MaxValue)); - Assert.That(Slice.Create(new byte[] { 255, 255, 255, 255, 255, 255, 255, 255 }).ToUInt64BE(), Is.EqualTo(ulong.MaxValue)); + Assert.That(new byte[] { 0x12 }.AsSlice().ToUInt64BE(), Is.EqualTo(0x12)); + Assert.That(new byte[] { 0x12, 0x34 }.AsSlice().ToUInt64BE(), Is.EqualTo(0x1234)); + Assert.That(new byte[] { 0x12, 0x34, 0x56 }.AsSlice().ToUInt64BE(), Is.EqualTo(0x123456)); + Assert.That(new byte[] { 0x00, 0x12, 0x34, 0x56 }.AsSlice().ToUInt64BE(), Is.EqualTo(0x123456)); + Assert.That(new byte[] { 0x12, 0x34, 0x56, 0x78 }.AsSlice().ToUInt64BE(), Is.EqualTo(0x12345678)); + Assert.That(new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A }.AsSlice().ToUInt64BE(), Is.EqualTo(0x123456789A)); + Assert.That(new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC }.AsSlice().ToUInt64BE(), Is.EqualTo(0x123456789ABC)); + Assert.That(new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE }.AsSlice().ToUInt64BE(), Is.EqualTo(0x123456789ABCDE)); + Assert.That(new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0 }.AsSlice().ToUInt64BE(), Is.EqualTo(0x123456789ABCDEF0)); + + Assert.That(new byte[] { }.AsSlice().ToUInt64BE(), Is.EqualTo(0L)); + Assert.That(new byte[] { 0 }.AsSlice().ToUInt64BE(), Is.EqualTo(0L)); + Assert.That(new byte[] { 255 }.AsSlice().ToUInt64BE(), Is.EqualTo(255L)); + Assert.That(new byte[] { 1, 0 }.AsSlice().ToUInt64BE(), Is.EqualTo(256L)); + Assert.That(new byte[] { 255, 255 }.AsSlice().ToUInt64BE(), Is.EqualTo(65535L)); + Assert.That(new byte[] { 1, 0, 0 }.AsSlice().ToUInt64BE(), Is.EqualTo(1L << 16)); + Assert.That(new byte[] { 0, 1, 0, 0 }.AsSlice().ToUInt64BE(), Is.EqualTo(1L << 16)); + Assert.That(new byte[] { 255, 255, 255 }.AsSlice().ToUInt64BE(), Is.EqualTo((1L << 24) - 1)); + Assert.That(new byte[] { 1, 0, 0, 0 }.AsSlice().ToUInt64BE(), Is.EqualTo(1L << 24)); + Assert.That(new byte[] { 1, 0, 0, 0, 0 }.AsSlice().ToUInt64BE(), Is.EqualTo(1L << 32)); + Assert.That(new byte[] { 1, 0, 0, 0, 0, 0 }.AsSlice().ToUInt64BE(), Is.EqualTo(1L << 40)); + Assert.That(new byte[] { 1, 0, 0, 0, 0, 0, 0 }.AsSlice().ToUInt64BE(), Is.EqualTo(1L << 48)); + Assert.That(new byte[] { 1, 0, 0, 0, 0, 0, 0, 0 }.AsSlice().ToUInt64BE(), Is.EqualTo(1L << 56)); + Assert.That(new byte[] { 127, 255, 255, 255 }.AsSlice().ToUInt64BE(), Is.EqualTo(int.MaxValue)); + Assert.That(new byte[] { 255, 255, 255, 255 }.AsSlice().ToUInt64BE(), Is.EqualTo(uint.MaxValue)); + Assert.That(new byte[] { 127, 255, 255, 255, 255, 255, 255, 255 }.AsSlice().ToUInt64BE(), Is.EqualTo(long.MaxValue)); + Assert.That(new byte[] { 255, 255, 255, 255, 255, 255, 255, 255 }.AsSlice().ToUInt64BE(), Is.EqualTo(ulong.MaxValue)); // should validate the arguments - var x = Slice.Create(new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0 }); + var x = new byte[] { 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0 }.AsSlice(); Assert.That(() => MutateOffset(x, -1).ToUInt64BE(), Throws.InstanceOf()); Assert.That(() => MutateCount(x, 9).ToUInt64BE(), Throws.InstanceOf()); Assert.That(() => MutateArray(x, null).ToUInt64BE(), Throws.InstanceOf()); } + #endregion + + #endregion + + #region Floating Point... + + private static string SwapHexa(string hexa) + { + char[] res = new char[hexa.Length]; + int p = 0; + for (int i = hexa.Length - 2; i >= 0; i -= 2, p += 2) + { + res[i + 0] = hexa[p + 0]; + res[i + 1] = hexa[p + 1]; + } + return new string(res); + } + + [Test] + public void Test_Slice_FromSingle() + { + void Verify(float value, string expected) + { + Assert.That(Slice.FromSingle(value).ToHexaString(), Is.EqualTo(expected), "Invalid encoding for {0} (Little Endian)", value); + Assert.That(Slice.FromSingleBE(value).ToHexaString(), Is.EqualTo(SwapHexa(expected)), "Invalid encoding for {0} (Big Endian)", value); + } + + Verify(0f, "00000000"); + Verify(1f, "0000803F"); + Verify(-1f, "000080BF"); + Verify(10f, "00002041"); + Verify(0.1f, "CDCCCC3D"); + Verify(0.5f, "0000003F"); + + Verify(1f / 3f, "ABAAAA3E"); + Verify((float) Math.PI, "DB0F4940"); + Verify((float) Math.E, "54F82D40"); + + Verify(float.NaN, "0000C0FF"); + Verify(float.Epsilon, "01000000"); + Verify(float.MaxValue, "FFFF7F7F"); + Verify(float.MinValue, "FFFF7FFF"); + Verify(float.PositiveInfinity, "0000807F"); + Verify(float.NegativeInfinity, "000080FF"); + } + + [Test] + public void Test_Slice_ToSingle() + { + void Verify(string value, float expected) + { + Assert.That(Slice.FromHexa(value).ToSingle(), Is.EqualTo(expected), "Invalid decoding for '{0}' (Little Endian)", value); + Assert.That(Slice.FromHexa(SwapHexa(value)).ToSingleBE(), Is.EqualTo(expected), "Invalid decoding for '{0}' (Big Endian)", value); + } + + Assert.That(Slice.Empty.ToSingle(), Is.EqualTo(0d)); + Verify("00000000", 0f); + Verify("0000803F", 1f); + Verify("000080BF", -1f); + Verify("00002041", 10f); + Verify("CDCCCC3D", 0.1f); + Verify("0000003F", 0.5f); + + Verify("ABAAAA3E", 1f / 3f); + Verify("DB0F4940", (float) Math.PI); + Verify("54F82D40", (float) Math.E); + + Verify("0000C0FF", float.NaN); + Verify("01000000", float.Epsilon); + Verify("FFFF7F7F", float.MaxValue); + Verify("FFFF7FFF", float.MinValue); + Verify("0000807F", float.PositiveInfinity); + Verify("000080FF", float.NegativeInfinity); + + Assert.That(() => Slice.Create(5).ToSingle(), Throws.InstanceOf()); + Assert.That(() => Slice.Create(3).ToSingle(), Throws.InstanceOf()); + } + + [Test] + public void Test_Slice_FromDouble() + { + void Verify(double value, string expected) + { + Assert.That(Slice.FromDouble(value).ToHexaString(), Is.EqualTo(expected), "Invalid encoding for {0} (Little Endian)", value); + Assert.That(Slice.FromDoubleBE(value).ToHexaString(), Is.EqualTo(SwapHexa(expected)), "Invalid encoding for {0} (Big Endian)", value); + } + + Verify(0d, "0000000000000000"); + Verify(1d, "000000000000F03F"); + Verify(-1d, "000000000000F0BF"); + Verify(10d, "0000000000002440"); + Verify(0.1d, "9A9999999999B93F"); + Verify(0.5d, "000000000000E03F"); + + Verify(1d / 3d, "555555555555D53F"); + Verify(Math.PI, "182D4454FB210940"); + Verify(Math.E, "6957148B0ABF0540"); + + Verify(double.NaN, "000000000000F8FF"); + Verify(double.Epsilon, "0100000000000000"); + Verify(double.MaxValue, "FFFFFFFFFFFFEF7F"); + Verify(double.MinValue, "FFFFFFFFFFFFEFFF"); + Verify(double.PositiveInfinity, "000000000000F07F"); + Verify(double.NegativeInfinity, "000000000000F0FF"); + + } + + [Test] + public void Test_Slice_ToDouble() + { + void Verify(string value, double expected) + { + Assert.That(Slice.FromHexa(value).ToDouble(), Is.EqualTo(expected), "Invalid decoding for '{0}' (Little Endian)", value); + Assert.That(Slice.FromHexa(SwapHexa(value)).ToDoubleBE(), Is.EqualTo(expected), "Invalid decoding for '{0}' (Big Endian)", value); + } + + Verify("", 0d); + Verify("0000000000000000", 0d); + Verify("000000000000F03F", 1d); + Verify("000000000000F0BF", -1d); + Verify("0000000000002440", 10d); + Verify("9A9999999999B93F", 0.1d); + Verify("000000000000E03F", 0.5d); + + Verify("555555555555D53F", 1d / 3d); + Verify("182D4454FB210940", Math.PI); + Verify("6957148B0ABF0540", Math.E); + + Verify("000000000000F8FF", double.NaN); + Verify("0100000000000000", double.Epsilon); + Verify("FFFFFFFFFFFFEF7F", double.MaxValue); + Verify("FFFFFFFFFFFFEFFF", double.MinValue); + Verify("000000000000F07F", double.PositiveInfinity); + Verify("000000000000F0FF", double.NegativeInfinity); + + Assert.That(() => Slice.Create(9).ToDouble(), Throws.InstanceOf()); + Assert.That(() => Slice.Create(7).ToDouble(), Throws.InstanceOf()); + } + + [Test] + public void Test_Slice_FromDecimal() + { + void Verify(decimal value, string expected) + { + Assert.That(Slice.FromDecimal(value).ToHexaString(), Is.EqualTo(expected), "Invalid encoding for {0}", value); + } + + Verify(0m, "00000000000000000000000000000000"); + Verify(1m, "00000000000000000100000000000000"); + Verify(-1m, "00000080000000000100000000000000"); + Verify(10m, "00000000000000000A00000000000000"); + Verify(0.1m, "00000100000000000100000000000000"); + Verify(0.5m, "00000100000000000500000000000000"); + + Verify(1m / 3m, "00001C00CA44C50A55555505CB00B714"); + Verify((decimal) Math.PI, "00000E000000000083246AE7B91D0100"); + Verify((decimal)Math.E, "00000E0000000000D04947EE39F70000"); + + Verify(decimal.MaxValue, "00000000FFFFFFFFFFFFFFFFFFFFFFFF"); + Verify(decimal.MinValue, "00000080FFFFFFFFFFFFFFFFFFFFFFFF"); + + } + + [Test] + public void Test_Slice_ToDecimal() + { + void Verify(string value, decimal expected) + { + Assert.That(Slice.FromHexa(value).ToDecimal(), Is.EqualTo(expected), "Invalid decoding for '{0}'", value); + } + + Verify("", 0m); + Verify("00000000000000000000000000000000", 0m); + Verify("00000000000000000100000000000000", 1m); + Verify("00000080000000000100000000000000", -1m); + Verify("00000000000000000A00000000000000", 10m); + Verify("00000100000000000100000000000000", 0.1m); + Verify("00000100000000000500000000000000", 0.5m); + + Verify("00001C00CA44C50A55555505CB00B714", 1m / 3m); + Verify("00000E000000000083246AE7B91D0100", (decimal) Math.PI); + Verify("00000E0000000000D04947EE39F70000", (decimal) Math.E); + + Verify("00000000FFFFFFFFFFFFFFFFFFFFFFFF", decimal.MaxValue); + Verify("00000080FFFFFFFFFFFFFFFFFFFFFFFF", decimal.MinValue); + + Assert.That(() => Slice.Create(15).ToDecimal(), Throws.InstanceOf()); + Assert.That(() => Slice.Create(17).ToDecimal(), Throws.InstanceOf()); + } + + #endregion + + #region UUIDs... + [Test] public void Test_Slice_FromGuid() { // Verify that System.GUID are stored as UUIDs using RFC 4122, and not their natural in-memory format - Slice slice; - // empty guid should be all zeroes - slice = Slice.FromGuid(Guid.Empty); + Slice slice = Slice.FromGuid(Guid.Empty); Assert.That(slice.ToHexaString(), Is.EqualTo("00000000000000000000000000000000")); // GUIDs should be stored using RFC 4122 (big endian) @@ -642,35 +1550,36 @@ public void Test_Slice_FromGuid() // byte order should follow the string! slice = Slice.FromGuid(guid); - Assert.That(slice.ToHexaString(), Is.EqualTo("00112233445566778899aabbccddeeff"), "Slice.FromGuid() should use the RFC 4122 encoding"); + Assert.That(slice.ToHexaString(), Is.EqualTo("00112233445566778899AABBCCDDEEFF"), "Slice.FromGuid() should use the RFC 4122 encoding"); // but guid in memory should follow MS format - slice = Slice.Create(guid.ToByteArray()); // <-- this is BAD, don't try this at home ! - Assert.That(slice.ToHexaString(), Is.EqualTo("33221100554477668899aabbccddeeff")); + slice = guid.ToByteArray().AsSlice(); // <-- this is BAD, don't try this at home ! + Assert.That(slice.ToHexaString(), Is.EqualTo("33221100554477668899AABBCCDDEEFF")); } [Test] public void Test_Slice_ToGuid() { - Slice slice; - Guid guid; + // nil or empty should return Guid.Empty + Assert.That(Slice.Nil.ToGuid(), Is.EqualTo(Guid.Empty)); + Assert.That(Slice.Empty.ToGuid(), Is.EqualTo(Guid.Empty)); - // all zeroes should return Guid.Empty - slice = Slice.Create(16); + // all zeroes should also return Guid.Empty + Slice slice = Slice.Create(16); Assert.That(slice.ToGuid(), Is.EqualTo(Guid.Empty)); // RFC 4122 encoded UUIDs should be properly reversed when converted to System.GUID slice = Slice.FromHexa("00112233445566778899aabbccddeeff"); - guid = slice.ToGuid(); + Guid guid = slice.ToGuid(); Assert.That(guid.ToString(), Is.EqualTo("00112233-4455-6677-8899-aabbccddeeff"), "slice.ToGuid() should convert RFC 4122 encoded UUIDs into native System.Guid"); // round-trip guid = Guid.NewGuid(); Assert.That(Slice.FromGuid(guid).ToGuid(), Is.EqualTo(guid)); - Assert.That(Slice.FromAscii(guid.ToString()).ToGuid(), Is.EqualTo(guid), "String literals should also be converted if they match the expected format"); + Assert.That(Slice.FromStringAscii(guid.ToString()).ToGuid(), Is.EqualTo(guid), "String literals should also be converted if they match the expected format"); - Assert.That(() => Slice.FromAscii("random text").ToGuid(), Throws.InstanceOf()); + Assert.That(() => Slice.FromStringAscii("random text").ToGuid(), Throws.InstanceOf()); // should validate the arguments var x = Slice.FromGuid(guid); @@ -685,10 +1594,8 @@ public void Test_Slice_FromUuid128() { // Verify that FoundationDb.Client.Uuid are stored as 128-bit UUIDs using RFC 4122 - Slice slice; - // empty guid should be all zeroes - slice = Slice.FromUuid128(Uuid128.Empty); + Slice slice = Slice.FromUuid128(Uuid128.Empty); Assert.That(slice.ToHexaString(), Is.EqualTo("00000000000000000000000000000000")); // UUIDs should be stored using RFC 4122 (big endian) @@ -696,21 +1603,24 @@ public void Test_Slice_FromUuid128() // byte order should follow the string! slice = Slice.FromUuid128(uuid); - Assert.That(slice.ToHexaString(), Is.EqualTo("00112233445566778899aabbccddeeff"), "Slice.FromUuid() should preserve RFC 4122 ordering"); + Assert.That(slice.ToHexaString(), Is.EqualTo("00112233445566778899AABBCCDDEEFF"), "Slice.FromUuid() should preserve RFC 4122 ordering"); // ToByteArray() should also be safe - slice = Slice.Create(uuid.ToByteArray()); - Assert.That(slice.ToHexaString(), Is.EqualTo("00112233445566778899aabbccddeeff")); + slice = uuid.ToByteArray().AsSlice(); + Assert.That(slice.ToHexaString(), Is.EqualTo("00112233445566778899AABBCCDDEEFF")); } [Test] public void Test_Slice_ToUuid128() { - Slice slice; - Uuid128 uuid; - - // all zeroes should return Uuid.Empty - slice = Slice.Create(16); + // nil or empty should return Uuid128.Empty + Uuid128 uuid = Slice.Nil.ToUuid128(); + Assert.That(uuid, Is.EqualTo(Uuid128.Empty)); + uuid = Slice.Empty.ToUuid128(); + Assert.That(uuid, Is.EqualTo(Uuid128.Empty)); + + // all zeroes should also return Uuid128.Empty + Slice slice = Slice.Create(16); Assert.That(slice.ToUuid128(), Is.EqualTo(Uuid128.Empty)); // RFC 4122 encoded UUIDs should not keep the byte ordering @@ -722,9 +1632,9 @@ public void Test_Slice_ToUuid128() uuid = Uuid128.NewUuid(); Assert.That(Slice.FromUuid128(uuid).ToUuid128(), Is.EqualTo(uuid)); - Assert.That(Slice.FromAscii(uuid.ToString()).ToUuid128(), Is.EqualTo(uuid), "String literals should also be converted if they match the expected format"); + Assert.That(Slice.FromStringAscii(uuid.ToString()).ToUuid128(), Is.EqualTo(uuid), "String literals should also be converted if they match the expected format"); - Assert.That(() => Slice.FromAscii("random text").ToUuid128(), Throws.InstanceOf()); + Assert.That(() => Slice.FromStringAscii("random text").ToUuid128(), Throws.InstanceOf()); // should validate the arguments var x = Slice.FromUuid128(uuid); @@ -738,30 +1648,32 @@ public void Test_Slice_FromUuid64() { // Verify that FoundationDb.Client.Uuid64 are stored as 64-bit UUIDs in big-endian - Slice slice; - // empty guid should be all zeroes - slice = Slice.FromUuid64(Uuid64.Empty); + Slice slice = Slice.FromUuid64(Uuid64.Empty); Assert.That(slice.ToHexaString(), Is.EqualTo("0000000000000000")); // UUIDs should be stored in lexicographical order - var uuid = new Uuid64("01234567-89abcdef"); + var uuid = Uuid64.Parse("01234567-89abcdef"); // byte order should follow the string! slice = Slice.FromUuid64(uuid); - Assert.That(slice.ToHexaString(), Is.EqualTo("0123456789abcdef"), "Slice.FromUuid64() should preserve ordering"); + Assert.That(slice.ToHexaString(), Is.EqualTo("0123456789ABCDEF"), "Slice.FromUuid64() should preserve ordering"); // ToByteArray() should also be safe - slice = Slice.Create(uuid.ToByteArray()); - Assert.That(slice.ToHexaString(), Is.EqualTo("0123456789abcdef")); + slice = uuid.ToByteArray().AsSlice(); + Assert.That(slice.ToHexaString(), Is.EqualTo("0123456789ABCDEF")); } [Test] public void Test_Slice_ToUuid64() { - Uuid64 uuid; + // nil or empty should return Uuid64.Empty + Uuid64 uuid = Slice.Nil.ToUuid64(); + Assert.That(uuid, Is.EqualTo(Uuid64.Empty)); + uuid = Slice.Empty.ToUuid64(); + Assert.That(uuid, Is.EqualTo(Uuid64.Empty)); - // all zeroes should return Uuid.Empty + // all zeroes should also return Uuid64.Empty uuid = Slice.Create(8).ToUuid64(); Assert.That(uuid, Is.EqualTo(Uuid64.Empty)); @@ -773,9 +1685,9 @@ public void Test_Slice_ToUuid64() uuid = Uuid64.NewUuid(); Assert.That(Slice.FromUuid64(uuid).ToUuid64(), Is.EqualTo(uuid)); - Assert.That(Slice.FromAscii(uuid.ToString()).ToUuid64(), Is.EqualTo(uuid), "String literals should also be converted if they match the expected format"); + Assert.That(Slice.FromStringAscii(uuid.ToString()).ToUuid64(), Is.EqualTo(uuid), "String literals should also be converted if they match the expected format"); - Assert.That(() => Slice.FromAscii("random text").ToUuid64(), Throws.InstanceOf()); + Assert.That(() => Slice.FromStringAscii("random text").ToUuid64(), Throws.InstanceOf()); // should validate the arguments var x = Slice.FromUuid64(uuid); @@ -784,111 +1696,44 @@ public void Test_Slice_ToUuid64() Assert.That(() => MutateArray(x, null).ToUuid64(), Throws.InstanceOf()); } - [Test] - public void Test_Slice_FromFixed32() - { - // FromFixed32 always produce 4 bytes and uses Little Endian - - Assert.That(Slice.FromFixed32(0).GetBytes(), Is.EqualTo(new byte[4])); - Assert.That(Slice.FromFixed32(1).GetBytes(), Is.EqualTo(new byte[] { 1, 0, 0, 0 })); - Assert.That(Slice.FromFixed32(256).GetBytes(), Is.EqualTo(new byte[] { 0, 1, 0, 0 })); - Assert.That(Slice.FromFixed32(65536).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 1, 0 })); - Assert.That(Slice.FromFixed32(16777216).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 1 })); - Assert.That(Slice.FromFixed32(short.MaxValue).GetBytes(), Is.EqualTo(new byte[] { 255, 127, 0, 0 })); - Assert.That(Slice.FromFixed32(int.MaxValue).GetBytes(), Is.EqualTo(new byte[] { 255, 255, 255, 127 })); - - Assert.That(Slice.FromFixed32(-1).GetBytes(), Is.EqualTo(new byte[] { 255, 255, 255, 255 })); - Assert.That(Slice.FromFixed32(-256).GetBytes(), Is.EqualTo(new byte[] { 0, 255, 255, 255 })); - Assert.That(Slice.FromFixed32(-65536).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 255, 255 })); - Assert.That(Slice.FromFixed32(-16777216).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 255 })); - Assert.That(Slice.FromFixed32(int.MinValue).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 128 })); - - var rnd = new Random(); - for (int i = 0; i < 1000; i++) - { - int x = rnd.Next() * (rnd.Next(2) == 0 ? +1 : -1); - Slice s = Slice.FromFixed32(x); - Assert.That(s.Count, Is.EqualTo(4)); - Assert.That(s.ToInt32(), Is.EqualTo(x)); - } - } + #endregion [Test] - public void Test_Slice_FromFixed64() + public void Test_Slice_FromBase64() { - // FromFixed64 always produce 8 bytes and uses Little Endian + // numl string is Nil slice + Slice slice = Slice.FromBase64(default(string)); + Assert.That(slice, Is.EqualTo(Slice.Nil)); - Assert.That(Slice.FromFixed64(0L).GetBytes(), Is.EqualTo(new byte[8])); - Assert.That(Slice.FromFixed64(1L).GetBytes(), Is.EqualTo(new byte[] { 1, 0, 0, 0, 0, 0, 0, 0 })); - Assert.That(Slice.FromFixed64(1L << 8).GetBytes(), Is.EqualTo(new byte[] { 0, 1, 0, 0, 0, 0, 0, 0 })); - Assert.That(Slice.FromFixed64(1L << 16).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 1, 0, 0, 0, 0, 0 })); - Assert.That(Slice.FromFixed64(1L << 24).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 1, 0, 0, 0, 0 })); - Assert.That(Slice.FromFixed64(1L << 32).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 0, 1, 0, 0, 0 })); - Assert.That(Slice.FromFixed64(1L << 40).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 0, 0, 1, 0, 0 })); - Assert.That(Slice.FromFixed64(1L << 48).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 0, 0, 0, 1, 0 })); - Assert.That(Slice.FromFixed64(1L << 56).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 0, 0, 0, 0, 1 })); - Assert.That(Slice.FromFixed64(short.MaxValue).GetBytes(), Is.EqualTo(new byte[] { 255, 127, 0, 0, 0, 0, 0, 0 })); - Assert.That(Slice.FromFixed64(int.MaxValue).GetBytes(), Is.EqualTo(new byte[] { 255, 255, 255, 127, 0, 0, 0, 0 })); - Assert.That(Slice.FromFixed64(long.MaxValue).GetBytes(), Is.EqualTo(new byte[] { 255, 255, 255, 255, 255, 255, 255, 127 })); - - Assert.That(Slice.FromFixed64(-1L).GetBytes(), Is.EqualTo(new byte[] { 255, 255, 255, 255, 255, 255, 255, 255 })); - Assert.That(Slice.FromFixed64(-256L).GetBytes(), Is.EqualTo(new byte[] { 0, 255, 255, 255, 255, 255, 255, 255 })); - Assert.That(Slice.FromFixed64(-65536L).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 255, 255, 255, 255, 255, 255 })); - Assert.That(Slice.FromFixed64(-16777216L).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 255, 255, 255, 255, 255 })); - Assert.That(Slice.FromFixed64(-4294967296L).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 0, 255, 255, 255, 255 })); - Assert.That(Slice.FromFixed64(long.MinValue).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 0, 0, 0, 0, 128 })); + // empty string is empty slice + slice = Slice.FromBase64(""); + Assert.That(slice, Is.EqualTo(Slice.Empty)); - var rnd = new Random(); - for (int i = 0; i < 1000; i++) - { - long x = (long)rnd.Next() * rnd.Next() * (rnd.Next(2) == 0 ? +1 : -1); - Slice s = Slice.FromFixed64(x); - Assert.That(s.Count, Is.EqualTo(8)); - Assert.That(s.ToInt64(), Is.EqualTo(x)); - } - } - - [Test] - public void Test_Slice_FromFixedU64() - { - // FromFixed64 always produce 8 bytes and uses Little Endian - - Assert.That(Slice.FromFixedU64(0UL).GetBytes(), Is.EqualTo(new byte[8])); - Assert.That(Slice.FromFixedU64(1UL).GetBytes(), Is.EqualTo(new byte[] { 1, 0, 0, 0, 0, 0, 0, 0 })); - Assert.That(Slice.FromFixedU64(1UL << 8).GetBytes(), Is.EqualTo(new byte[] { 0, 1, 0, 0, 0, 0, 0, 0 })); - Assert.That(Slice.FromFixedU64(1UL << 16).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 1, 0, 0, 0, 0, 0 })); - Assert.That(Slice.FromFixedU64(1UL << 24).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 1, 0, 0, 0, 0 })); - Assert.That(Slice.FromFixedU64(1UL << 32).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 0, 1, 0, 0, 0 })); - Assert.That(Slice.FromFixedU64(1UL << 40).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 0, 0, 1, 0, 0 })); - Assert.That(Slice.FromFixedU64(1UL << 48).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 0, 0, 0, 1, 0 })); - Assert.That(Slice.FromFixedU64(1UL << 56).GetBytes(), Is.EqualTo(new byte[] { 0, 0, 0, 0, 0, 0, 0, 1 })); - Assert.That(Slice.FromFixedU64(ushort.MaxValue).GetBytes(), Is.EqualTo(new byte[] { 255, 255, 0, 0, 0, 0, 0, 0 })); - Assert.That(Slice.FromFixedU64(int.MaxValue).GetBytes(), Is.EqualTo(new byte[] { 255, 255, 255, 127, 0, 0, 0, 0 })); - Assert.That(Slice.FromFixedU64(uint.MaxValue).GetBytes(), Is.EqualTo(new byte[] { 255, 255, 255, 255, 0, 0, 0, 0 })); - Assert.That(Slice.FromFixedU64(long.MaxValue).GetBytes(), Is.EqualTo(new byte[] { 255, 255, 255, 255, 255, 255, 255, 127 })); - Assert.That(Slice.FromFixedU64(ulong.MaxValue).GetBytes(), Is.EqualTo(new byte[] { 255, 255, 255, 255, 255, 255, 255, 255 })); + // UUIDs should be stored in lexicographical order + slice = Slice.FromBase64(Convert.ToBase64String(Encoding.UTF8.GetBytes("Hello, World!"))); + Assert.That(slice.ToUnicode(), Is.EqualTo("Hello, World!")); - var rnd = new Random(); - for (int i = 0; i < 1000; i++) - { - ulong x = (ulong)rnd.Next() * (ulong)rnd.Next(); - Slice s = Slice.FromFixedU64(x); - Assert.That(s.Count, Is.EqualTo(8)); - Assert.That(s.ToUInt64(), Is.EqualTo(x)); - } + // malformed + Assert.That(() => Slice.FromBase64(Convert.ToBase64String(Encoding.UTF8.GetBytes("Hello, World!")).Substring(1)), Throws.InstanceOf()); + Assert.That(() => Slice.FromBase64("This is not a base64 string!"), Throws.InstanceOf()); } + #region Equality / Comparison / HashCodes... + [Test] + [SuppressMessage("ReSharper", "EqualExpressionComparison")] public void Test_Slice_Equality() { - - var a = Slice.Create(new byte[] { 1, 2, 3 }); - var b = Slice.Create(new byte[] { 1, 2, 3 }); - var c = Slice.Create(new byte[] { 0, 1, 2, 3, 4 }, 1, 3); - var x = Slice.Create(new byte[] { 4, 5, 6 }); - var y = Slice.Create(new byte[] { 1, 2, 3 }, 0, 2); - var z = Slice.Create(new byte[] { 1, 2, 3, 4 }); - +#pragma warning disable 1718 + // a == b == c && x != y && a != x + var a = new byte[] { 1, 2, 3 }.AsSlice(); + var b = new byte[] { 1, 2, 3 }.AsSlice(); + var c = new byte[] { 0, 1, 2, 3, 4 }.AsSlice(1, 3); + var x = new byte[] { 4, 5, 6 }.AsSlice(); + var y = new byte[] { 1, 2, 3 }.AsSlice(0, 2); + var z = new byte[] { 1, 2, 3, 4 }.AsSlice(); + + // IEquatable // equals Assert.That(a, Is.EqualTo(a)); Assert.That(a, Is.EqualTo(b)); @@ -899,23 +1744,68 @@ public void Test_Slice_Equality() Assert.That(c, Is.EqualTo(a)); Assert.That(c, Is.EqualTo(b)); Assert.That(c, Is.EqualTo(c)); - // not equals Assert.That(a, Is.Not.EqualTo(x)); Assert.That(a, Is.Not.EqualTo(y)); Assert.That(a, Is.Not.EqualTo(z)); + + // Default Comparer + // equals + Assert.That(Slice.Comparer.Default.Equals(a, a), Is.True); + Assert.That(Slice.Comparer.Default.Equals(a, b), Is.True); + Assert.That(Slice.Comparer.Default.Equals(a, c), Is.True); + Assert.That(Slice.Comparer.Default.Equals(b, a), Is.True); + Assert.That(Slice.Comparer.Default.Equals(b, b), Is.True); + Assert.That(Slice.Comparer.Default.Equals(b, c), Is.True); + Assert.That(Slice.Comparer.Default.Equals(c, a), Is.True); + Assert.That(Slice.Comparer.Default.Equals(c, b), Is.True); + Assert.That(Slice.Comparer.Default.Equals(c, c), Is.True); + // not equals + Assert.That(Slice.Comparer.Default.Equals(a, x), Is.False); + Assert.That(Slice.Comparer.Default.Equals(a, y), Is.False); + Assert.That(Slice.Comparer.Default.Equals(a, z), Is.False); + + // Operators + // == + Assert.That(a == a, Is.True); + Assert.That(a == b, Is.True); + Assert.That(a == c, Is.True); + Assert.That(b == a, Is.True); + Assert.That(b == b, Is.True); + Assert.That(b == c, Is.True); + Assert.That(c == a, Is.True); + Assert.That(c == b, Is.True); + Assert.That(c == c, Is.True); + Assert.That(a == x, Is.False); + Assert.That(a == y, Is.False); + Assert.That(a == z, Is.False); + // != + Assert.That(a != a, Is.False); + Assert.That(a != b, Is.False); + Assert.That(a != c, Is.False); + Assert.That(b != a, Is.False); + Assert.That(b != b, Is.False); + Assert.That(b != c, Is.False); + Assert.That(c != a, Is.False); + Assert.That(c != b, Is.False); + Assert.That(c != c, Is.False); + Assert.That(a != x, Is.True); + Assert.That(a != y, Is.True); + Assert.That(a != z, Is.True); +#pragma warning restore 1718 + } [Test] public void Test_Slice_Equals_Slice() { - var a = Slice.Create(new byte[] { 1, 2, 3 }); - var b = Slice.Create(new byte[] { 1, 2, 3 }); - var c = Slice.Create(new byte[] { 0, 1, 2, 3, 4 }, 1, 3); - var x = Slice.Create(new byte[] { 4, 5, 6 }); - var y = Slice.Create(new byte[] { 1, 2, 3 }, 0, 2); - var z = Slice.Create(new byte[] { 1, 2, 3, 4 }); + var a = new byte[] { 1, 2, 3 }.AsSlice(); + var b = new byte[] { 1, 2, 3 }.AsSlice(); + var c = new byte[] { 0, 1, 2, 3, 4 }.AsSlice(1, 3); + var x = new byte[] { 4, 5, 6 }.AsSlice(); + var y = new byte[] { 1, 2, 3 }.AsSlice(0, 2); + var z = new byte[] { 1, 2, 3, 4 }.AsSlice(); // equals Assert.That(a.Equals(a), Is.True); @@ -943,37 +1833,37 @@ public void Test_Slice_Equals_Slice() [Test] public void Test_Slice_Equality_Corner_Cases() { - Assert.That(Slice.Create(null), Is.EqualTo(Slice.Nil)); - Assert.That(Slice.Create(new byte[0]), Is.EqualTo(Slice.Empty)); - - Assert.That(Slice.Create(null) == Slice.Nil, Is.True, "null == Nil"); - Assert.That(Slice.Create(null) == Slice.Empty, Is.False, "null != Empty"); - Assert.That(Slice.Create(new byte[0]) == Slice.Empty, Is.True, "[0] == Empty"); - Assert.That(Slice.Create(new byte[0]) == Slice.Nil, Is.False, "[0] != Nill"); + Assert.That(default(byte[]).AsSlice(), Is.EqualTo(Slice.Nil)); + Assert.That(new byte[0].AsSlice(), Is.EqualTo(Slice.Empty)); + + Assert.That(default(byte[]).AsSlice() == Slice.Nil, Is.True, "null == Nil"); + Assert.That(default(byte[]).AsSlice() == Slice.Empty, Is.False, "null != Empty"); + Assert.That(new byte[0].AsSlice() == Slice.Empty, Is.True, "[0] == Empty"); + Assert.That(new byte[0].AsSlice() == Slice.Nil, Is.False, "[0] != Nill"); // "slice == null" should be the equivalent to "slice.IsNull" so only true for Slice.Nil Assert.That(Slice.Nil == null, Is.True, "'Slice.Nil == null' is true"); Assert.That(Slice.Empty == null, Is.False, "'Slice.Empty == null' is false"); - Assert.That(Slice.FromByte(1) == null, Is.False, "'{1} == null' is false"); + Assert.That(Slice.FromByte(1) == null, Is.False, "'[1] == null' is false"); Assert.That(null == Slice.Nil, Is.True, "'Slice.Nil == null' is true"); Assert.That(null == Slice.Empty, Is.False, "'Slice.Empty == null' is false"); - Assert.That(null == Slice.FromByte(1), Is.False, "'{1} == null' is false"); + Assert.That(null == Slice.FromByte(1), Is.False, "'[1] == null' is false"); // "slice != null" should be the equivalent to "slice.HasValue" so only false for Slice.Nil Assert.That(Slice.Nil != null, Is.False, "'Slice.Nil != null' is false"); Assert.That(Slice.Empty != null, Is.True, "'Slice.Empty != null' is true"); - Assert.That(Slice.FromByte(1) != null, Is.True, "'{1} != null' is true"); + Assert.That(Slice.FromByte(1) != null, Is.True, "'[1] != null' is true"); Assert.That(null != Slice.Nil, Is.False, "'Slice.Nil != null' is false"); Assert.That(null != Slice.Empty, Is.True, "'Slice.Empty != null' is true"); - Assert.That(null != Slice.FromByte(1), Is.True, "'{1} != null' is true"); + Assert.That(null != Slice.FromByte(1), Is.True, "'[1] != null' is true"); } [Test] public void Test_Slice_Equality_TwoByteArrayWithSameContentShouldReturnTrue() { - var s1 = Slice.FromAscii("abcd"); - var s2 = Slice.FromAscii("abcd"); - Assert.IsTrue(s1.Equals(s2), "'abcd' should equals 'abcd'"); + var s1 = Slice.FromStringAscii("abcd"); + var s2 = Slice.FromStringAscii("abcd"); + Assert.That(s1.Equals(s2), Is.True, "'abcd' should equals 'abcd'"); } [Test] @@ -981,17 +1871,17 @@ public void Test_Slice_Equality_TwoByteArrayWithSameContentFromSameOriginalBuffe { var origin = System.Text.Encoding.ASCII.GetBytes("abcdabcd"); var a1 = new ArraySegment(origin, 0, 4); //"abcd", refer first part of origin buffer - var s1 = Slice.Create(a1); // + var s1 = a1.AsSlice(); // var a2 = new ArraySegment(origin, 4, 4);//"abcd", refer second part of origin buffer - var s2 = Slice.Create(a2); - Assert.IsTrue(s1.Equals(s2), "'abcd' should equals 'abcd'"); + var s2 = a2.AsSlice(); + Assert.That(s1.Equals(s2), Is.True, "'abcd' should equals 'abcd'"); } [Test] public void Test_Slice_Equality_Malformed() { - var good = Slice.FromAscii("good"); - var evil = Slice.FromAscii("evil"); + var good = Slice.FromStringAscii("good"); + var evil = Slice.FromStringAscii("evil"); // argument should be validated Assert.That(() => good.Equals(MutateOffset(evil, -1)), Throws.InstanceOf()); @@ -1019,6 +1909,12 @@ public void Test_Slice_Hash_Code() Assert.That(Slice.FromString("zabcz").Substring(1, 3).GetHashCode(), Is.EqualTo(Slice.FromString("abc").GetHashCode()), "Hashcode should not depend on the offset in the array"); Assert.That(Slice.FromString("abc").GetHashCode(), Is.Not.EqualTo(Slice.FromString("abcd").GetHashCode()), "Hashcode should include all the bytes"); + Assert.That(Slice.Comparer.Default.GetHashCode(Slice.Nil), Is.EqualTo(0), "Nil hashcode should always be 0"); + Assert.That(Slice.Comparer.Default.GetHashCode(Slice.Empty), Is.Not.EqualTo(0), "Empty hashcode should not be equal to 0"); + Assert.That(Slice.Comparer.Default.GetHashCode(Slice.FromString("abc")), Is.EqualTo(Slice.FromString("abc").GetHashCode()), "Hashcode should not depend on the backing array"); + Assert.That(Slice.Comparer.Default.GetHashCode(Slice.FromString("zabcz").Substring(1, 3)), Is.EqualTo(Slice.FromString("abc").GetHashCode()), "Hashcode should not depend on the offset in the array"); + Assert.That(Slice.Comparer.Default.GetHashCode(Slice.FromString("abc")), Is.Not.EqualTo(Slice.FromString("abcd").GetHashCode()), "Hashcode should include all the bytes"); + // should validate the arguments var x = Slice.FromString("evil"); Assert.That(() => MutateOffset(x, -1).GetHashCode(), Throws.InstanceOf()); @@ -1027,30 +1923,84 @@ public void Test_Slice_Hash_Code() } [Test] + [SuppressMessage("ReSharper", "EqualExpressionComparison")] public void Test_Slice_Comparison() { - var a = Slice.FromAscii("a"); - var ab = Slice.FromAscii("ab"); - var abc = Slice.FromAscii("abc"); - var abc2 = Slice.FromAscii("abc"); // same bytes but different buffer - var b = Slice.FromAscii("b"); - +#pragma warning disable 1718 + var a = Slice.FromStringAscii("a"); + var ab = Slice.FromStringAscii("ab"); + var abc = Slice.FromStringAscii("abc"); + var abc2 = Slice.FromStringAscii("abc"); // same bytes but different buffer + var b = Slice.FromStringAscii("b"); + + // CompateTo // a = b Assert.That(a.CompareTo(a), Is.EqualTo(0)); Assert.That(ab.CompareTo(ab), Is.EqualTo(0)); Assert.That(abc.CompareTo(abc), Is.EqualTo(0)); Assert.That(abc.CompareTo(abc2), Is.EqualTo(0)); - // a < b Assert.That(a.CompareTo(b), Is.LessThan(0)); Assert.That(a.CompareTo(ab), Is.LessThan(0)); Assert.That(a.CompareTo(abc), Is.LessThan(0)); - // a > b Assert.That(b.CompareTo(a), Is.GreaterThan(0)); Assert.That(b.CompareTo(ab), Is.GreaterThan(0)); Assert.That(b.CompareTo(abc), Is.GreaterThan(0)); - + + // Default Comparer + // a = b + Assert.That(Slice.Comparer.Default.Compare(a, a), Is.EqualTo(0)); + Assert.That(Slice.Comparer.Default.Compare(ab, ab), Is.EqualTo(0)); + Assert.That(Slice.Comparer.Default.Compare(abc, abc), Is.EqualTo(0)); + Assert.That(Slice.Comparer.Default.Compare(abc, abc2), Is.EqualTo(0)); + // a < b + Assert.That(Slice.Comparer.Default.Compare(a, b), Is.LessThan(0)); + Assert.That(Slice.Comparer.Default.Compare(a, ab), Is.LessThan(0)); + Assert.That(Slice.Comparer.Default.Compare(a, abc), Is.LessThan(0)); + // a > b + Assert.That(Slice.Comparer.Default.Compare(b, a), Is.GreaterThan(0)); + Assert.That(Slice.Comparer.Default.Compare(b, ab), Is.GreaterThan(0)); + Assert.That(Slice.Comparer.Default.Compare(b, abc), Is.GreaterThan(0)); + + // Operators + // < + Assert.That(a < a, Is.False); + Assert.That(a < ab, Is.True); + Assert.That(ab < b, Is.True); + Assert.That(a < b, Is.True); + Assert.That(ab < a, Is.False); + Assert.That(b < ab, Is.False); + Assert.That(b < a, Is.False); + Assert.That(abc < abc2, Is.False); + // <= + Assert.That(a <= a, Is.True); + Assert.That(a <= ab, Is.True); + Assert.That(ab <= b, Is.True); + Assert.That(a <= b, Is.True); + Assert.That(ab <= a, Is.False); + Assert.That(b <= ab, Is.False); + Assert.That(b <= a, Is.False); + Assert.That(abc <= abc2, Is.True); + // > + Assert.That(a > a, Is.False); + Assert.That(ab > a, Is.True); + Assert.That(b > ab, Is.True); + Assert.That(b > a, Is.True); + Assert.That(a > ab, Is.False); + Assert.That(ab > b, Is.False); + Assert.That(a > b, Is.False); + Assert.That(abc > abc2, Is.False); + // >= + Assert.That(a >= a, Is.True); + Assert.That(ab >= a, Is.True); + Assert.That(b >= ab, Is.True); + Assert.That(b >= a, Is.True); + Assert.That(a >= ab, Is.False); + Assert.That(ab >= b, Is.False); + Assert.That(a >= b, Is.False); + Assert.That(abc >= abc2, Is.True); +#pragma warning restore 1718 } [Test] @@ -1063,7 +2013,7 @@ public void Test_Slice_Comparison_Corner_Cases() Assert.That(Slice.Empty.CompareTo(Slice.Nil), Is.EqualTo(0)); // X > NULL, NULL < X - var abc = Slice.FromAscii("abc"); + var abc = Slice.FromStringAscii("abc"); Assert.That(abc.CompareTo(Slice.Nil), Is.GreaterThan(0)); Assert.That(abc.CompareTo(Slice.Empty), Is.GreaterThan(0)); Assert.That(Slice.Nil.CompareTo(abc), Is.LessThan(0)); @@ -1073,8 +2023,8 @@ public void Test_Slice_Comparison_Corner_Cases() [Test] public void Test_Slice_Comparison_Malformed() { - var good = Slice.FromAscii("good"); - var evil = Slice.FromAscii("evil"); + var good = Slice.FromStringAscii("good"); + var evil = Slice.FromStringAscii("evil"); // argument should be validated Assert.That(() => good.CompareTo(MutateOffset(evil, -1)), Throws.InstanceOf()); @@ -1089,6 +2039,8 @@ public void Test_Slice_Comparison_Malformed() Assert.That(() => MutateOffset(MutateCount(evil, 5), -1).CompareTo(good), Throws.InstanceOf()); } + #endregion + private static readonly string UNICODE_TEXT = "Thïs Ïs à strîng thât contaÎns somé ùnicodè charactêrs and should be encoded in UTF-8: よろしくお願いします"; private static readonly byte[] UNICODE_BYTES = Encoding.UTF8.GetBytes(UNICODE_TEXT); @@ -1105,7 +2057,8 @@ public void Test_Slice_FromStream() Assert.That(slice.GetBytes(), Is.EqualTo(UNICODE_BYTES)); Assert.That(slice.ToUnicode(), Is.EqualTo(UNICODE_TEXT)); - Assert.That(() => Slice.FromStream(null), Throws.InstanceOf(), "Should throw if null"); + // ReSharper disable once AssignNullToNotNullAttribute + Assert.That(() => Slice.FromStream(null), Throws.ArgumentNullException, "Should throw if null"); Assert.That(Slice.FromStream(Stream.Null), Is.EqualTo(Slice.Nil), "Stream.Null should return Slice.Nil"); using(var ms = new MemoryStream()) @@ -1116,52 +2069,65 @@ public void Test_Slice_FromStream() } [Test] - public async Task Test_Slice_FromStreamAsync() + public void Test_Slice_Substring() { - Slice slice; - - // Reading from a MemoryStream should use the non-async path - using (var ms = new MemoryStream(UNICODE_BYTES)) - { - slice = await Slice.FromStreamAsync(ms, this.Cancellation); - } - Assert.That(slice.Count, Is.EqualTo(UNICODE_BYTES.Length)); - Assert.That(slice.GetBytes(), Is.EqualTo(UNICODE_BYTES)); - Assert.That(slice.ToUnicode(), Is.EqualTo(UNICODE_TEXT)); - - // Reading from a FileStream should use the async path - var tmp = Path.GetTempFileName(); - try - { - File.WriteAllBytes(tmp, UNICODE_BYTES); - using(var fs = File.OpenRead(tmp)) - { - slice = await Slice.FromStreamAsync(fs, this.Cancellation); - } - } - finally - { - File.Delete(tmp); - } - - Assert.That(slice.Count, Is.EqualTo(UNICODE_BYTES.Length)); - Assert.That(slice.GetBytes(), Is.EqualTo(UNICODE_BYTES)); - Assert.That(slice.ToUnicode(), Is.EqualTo(UNICODE_TEXT)); + Assert.That(Slice.Empty.Substring(0), Is.EqualTo(Slice.Empty)); + Assert.That(Slice.Empty.Substring(0, 0), Is.EqualTo(Slice.Empty)); + Assert.That(() => Slice.Empty.Substring(0, 1), Throws.InstanceOf()); + Assert.That(() => Slice.Empty.Substring(1), Throws.InstanceOf()); + Assert.That(() => Slice.Empty.Substring(1, 0), Throws.Nothing, "We allow out of bound substring if count == 0"); + + // Substring(offset) + Assert.That(Value("Hello, World!").Substring(0), Is.EqualTo(Value("Hello, World!"))); + Assert.That(Value("Hello, World!").Substring(7), Is.EqualTo(Value("World!"))); + Assert.That(Value("Hello, World!").Substring(12), Is.EqualTo(Value("!"))); + Assert.That(Value("Hello, World!").Substring(13), Is.EqualTo(Slice.Empty)); + Assert.That(() => Value("Hello, World!").Substring(14), Throws.InstanceOf()); + + // Substring(offset, count) + Assert.That(Value("Hello, World!").Substring(0, 5), Is.EqualTo(Value("Hello"))); + Assert.That(Value("Hello, World!").Substring(7, 5), Is.EqualTo(Value("World"))); + Assert.That(Value("Hello, World!").Substring(7, 6), Is.EqualTo(Value("World!"))); + Assert.That(Value("Hello, World!").Substring(12, 1), Is.EqualTo(Value("!"))); + Assert.That(Value("Hello, World!").Substring(13, 0), Is.EqualTo(Slice.Empty)); + Assert.That(() => Value("Hello, World!").Substring(7, 7), Throws.InstanceOf()); + Assert.That(() => Value("Hello, World!").Substring(13, 1), Throws.InstanceOf()); + Assert.That(() => Value("Hello, World!").Substring(7, -1), Throws.InstanceOf()); + + // Substring(offset) negative indexing + Assert.That(Value("Hello, World!").Substring(-1), Is.EqualTo(Value("!"))); + Assert.That(Value("Hello, World!").Substring(-2), Is.EqualTo(Value("d!"))); + Assert.That(Value("Hello, World!").Substring(-6), Is.EqualTo(Value("World!"))); + Assert.That(Value("Hello, World!").Substring(-13), Is.EqualTo(Value("Hello, World!"))); + Assert.That(() => Value("Hello, World!").Substring(-14), Throws.InstanceOf()); + + // Slice - int + Assert.That(Value("Hello, World!") - 0, Is.EqualTo(Value("Hello, World!"))); + Assert.That(Value("Hello, World!") - 1, Is.EqualTo(Value("Hello, World"))); + Assert.That(Value("Hello, World!") - 8, Is.EqualTo(Value("Hello"))); + Assert.That(Value("Hello, World!") - 12, Is.EqualTo(Value("H"))); + Assert.That(Value("Hello, World!") - 13, Is.EqualTo(Slice.Empty)); + Assert.That(() => Value("Hello, World!") - 14, Throws.InstanceOf()); + Assert.That(() => Value("Hello, World!") - (-1), Throws.InstanceOf()); } [Test] public void Test_Slice_Concat() { - var a = Slice.FromString("a"); - var b = Slice.FromString("b"); - var c = Slice.FromString("c"); - var ab = Slice.FromString("ab"); - var bc = Slice.FromString("bc"); - var abc = Slice.FromString("abc"); + var a = Value("a"); + var b = Value("b"); + var c = Value("c"); + var ab = Value("ab"); + var bc = Value("bc"); + var abc = Value("abc"); + // Concat2 + + Assert.That(Slice.Concat(a, a).ToUnicode(), Is.EqualTo("aa")); Assert.That(Slice.Concat(a, b).ToUnicode(), Is.EqualTo("ab")); Assert.That(Slice.Concat(b, c).ToUnicode(), Is.EqualTo("bc")); + Assert.That(Slice.Concat(ab, ab).ToUnicode(), Is.EqualTo("abab")); Assert.That(Slice.Concat(ab, c).ToUnicode(), Is.EqualTo("abc")); Assert.That(Slice.Concat(a, bc).ToUnicode(), Is.EqualTo("abc")); Assert.That(Slice.Concat(a, b, c).ToUnicode(), Is.EqualTo("abc")); @@ -1180,6 +2146,8 @@ public void Test_Slice_Concat() Assert.That(Slice.Concat(Slice.Empty, abc), Is.EqualTo(abc)); Assert.That(Slice.Concat(Slice.Nil, abc), Is.EqualTo(abc)); + // Concat3 + Assert.That(Slice.Concat(Slice.Empty, b, c), Is.EqualTo(bc)); Assert.That(Slice.Concat(ab, Slice.Empty, c), Is.EqualTo(abc)); Assert.That(Slice.Concat(a, b, Slice.Empty), Is.EqualTo(ab)); @@ -1189,21 +2157,35 @@ public void Test_Slice_Concat() Assert.That(Slice.Concat(Slice.Nil, Slice.Nil, Slice.Nil), Is.EqualTo(Slice.Empty)); Assert.That(Slice.Concat(Slice.Empty, Slice.Empty, Slice.Empty), Is.EqualTo(Slice.Empty)); + + // Slice + Slice + Assert.That(a + a, Is.EqualTo(Value("aa"))); + Assert.That(a + b, Is.EqualTo(Value("ab"))); + Assert.That(b + c, Is.EqualTo(Value("bc"))); + Assert.That(ab + ab, Is.EqualTo(Value("abab"))); + Assert.That(ab + c, Is.EqualTo(Value("abc"))); + Assert.That(a + bc, Is.EqualTo(Value("abc"))); + + // Slice + byte + Assert.That(a + 0, Is.EqualTo(Key("a\x00"))); + Assert.That(a + 1, Is.EqualTo(Key("a\x01"))); + Assert.That(b + (byte)'A', Is.EqualTo(Key("bA"))); + Assert.That(abc + 255, Is.EqualTo(Key("abc\xff"))); } [Test] public void Test_Slice_Join_Array() { - var a = Slice.FromString("A"); - var b = Slice.FromString("BB"); - var c = Slice.FromString("CCC"); + var a = Value("A"); + var b = Value("BB"); + var c = Value("CCC"); // empty separator should just join all slices together Assert.That(Slice.Join(Slice.Empty, new Slice[0]), Is.EqualTo(Slice.Empty)); Assert.That(Slice.Join(Slice.Empty, new[] { Slice.Empty }), Is.EqualTo(Slice.Empty)); - Assert.That(Slice.Join(Slice.Empty, new[] { a }), Is.EqualTo(Slice.FromString("A"))); - Assert.That(Slice.Join(Slice.Empty, new[] { a, b }), Is.EqualTo(Slice.FromString("ABB"))); - Assert.That(Slice.Join(Slice.Empty, new[] { a, b, c }), Is.EqualTo(Slice.FromString("ABBCCC"))); + Assert.That(Slice.Join(Slice.Empty, new[] { a }), Is.EqualTo(Value("A"))); + Assert.That(Slice.Join(Slice.Empty, new[] { a, b }), Is.EqualTo(Value("ABB"))); + Assert.That(Slice.Join(Slice.Empty, new[] { a, b, c }), Is.EqualTo(Value("ABBCCC"))); Assert.That(Slice.Join(Slice.Empty, new[] { a, b, c }).Offset, Is.EqualTo(0)); Assert.That(Slice.Join(Slice.Empty, new[] { a, b, c }).Count, Is.EqualTo(6)); @@ -1211,63 +2193,64 @@ public void Test_Slice_Join_Array() var sep = Slice.FromChar(','); Assert.That(Slice.Join(sep, new Slice[0]), Is.EqualTo(Slice.Empty)); Assert.That(Slice.Join(sep, new[] { Slice.Empty }), Is.EqualTo(Slice.Empty)); - Assert.That(Slice.Join(sep, new[] { a }), Is.EqualTo(Slice.FromString("A"))); - Assert.That(Slice.Join(sep, new[] { a, b }), Is.EqualTo(Slice.FromString("A,BB"))); - Assert.That(Slice.Join(sep, new[] { a, b, c }), Is.EqualTo(Slice.FromString("A,BB,CCC"))); + Assert.That(Slice.Join(sep, new[] { a }), Is.EqualTo(Value("A"))); + Assert.That(Slice.Join(sep, new[] { a, b }), Is.EqualTo(Value("A,BB"))); + Assert.That(Slice.Join(sep, new[] { a, b, c }), Is.EqualTo(Value("A,BB,CCC"))); Assert.That(Slice.Join(sep, new[] { a, b, c }).Offset, Is.EqualTo(0)); Assert.That(Slice.Join(sep, new[] { a, b, c }).Count, Is.EqualTo(8)); - Assert.That(Slice.Join(sep, new[] { a, Slice.Empty, c }), Is.EqualTo(Slice.FromString("A,,CCC"))); - Assert.That(Slice.Join(sep, new[] { Slice.Empty, b, c }), Is.EqualTo(Slice.FromString(",BB,CCC"))); - Assert.That(Slice.Join(sep, new[] { Slice.Empty, Slice.Empty, Slice.Empty }), Is.EqualTo(Slice.FromString(",,"))); + Assert.That(Slice.Join(sep, new[] { a, Slice.Empty, c }), Is.EqualTo(Value("A,,CCC"))); + Assert.That(Slice.Join(sep, new[] { Slice.Empty, b, c }), Is.EqualTo(Value(",BB,CCC"))); + Assert.That(Slice.Join(sep, new[] { Slice.Empty, Slice.Empty, Slice.Empty }), Is.EqualTo(Value(",,"))); // multi byte separator, with a non-0 offset - sep = Slice.FromString("!<@>!").Substring(1, 3); + sep = Value("!<@>!").Substring(1, 3); Assert.That(sep.Offset, Is.EqualTo(1)); Assert.That(Slice.Join(sep, new Slice[0]), Is.EqualTo(Slice.Empty)); Assert.That(Slice.Join(sep, new[] { Slice.Empty }), Is.EqualTo(Slice.Empty)); - Assert.That(Slice.Join(sep, new[] { a }), Is.EqualTo(Slice.FromString("A"))); - Assert.That(Slice.Join(sep, new[] { a, b }), Is.EqualTo(Slice.FromString("A<@>BB"))); - Assert.That(Slice.Join(sep, new[] { a, b, c }), Is.EqualTo(Slice.FromString("A<@>BB<@>CCC"))); + Assert.That(Slice.Join(sep, new[] { a }), Is.EqualTo(Value("A"))); + Assert.That(Slice.Join(sep, new[] { a, b }), Is.EqualTo(Value("A<@>BB"))); + Assert.That(Slice.Join(sep, new[] { a, b, c }), Is.EqualTo(Value("A<@>BB<@>CCC"))); Assert.That(Slice.Join(sep, new[] { a, b, c }).Offset, Is.EqualTo(0)); Assert.That(Slice.Join(sep, new[] { a, b, c }).Count, Is.EqualTo(12)); // join slices that use the same underlying buffer - string s = "hello world!!!"; - byte[] tmp = Encoding.UTF8.GetBytes(s); + const string HELLO_WORLD = "hello world!!!"; + byte[] tmp = Encoding.UTF8.GetBytes(HELLO_WORLD); var slices = new Slice[tmp.Length]; - for (int i = 0; i < tmp.Length; i++) slices[i] = Slice.Create(tmp, i, 1); - Assert.That(Slice.Join(Slice.Empty, slices), Is.EqualTo(Slice.FromString(s))); - Assert.That(Slice.Join(Slice.FromChar(':'), slices), Is.EqualTo(Slice.FromString("h:e:l:l:o: :w:o:r:l:d:!:!:!"))); + for (int i = 0; i < tmp.Length; i++) slices[i] = tmp.AsSlice(i, 1); + Assert.That(Slice.Join(Slice.Empty, slices), Is.EqualTo(Value(HELLO_WORLD))); + Assert.That(Slice.Join(Slice.FromChar(':'), slices), Is.EqualTo(Value("h:e:l:l:o: :w:o:r:l:d:!:!:!"))); } [Test] public void Test_Slice_Join_Enumerable() { - var query = Enumerable.Range(1, 3).Select(c => Slice.FromString(new string((char)(64 + c), c))); - Assert.That(Slice.Join(Slice.Empty, Enumerable.Empty()), Is.EqualTo(Slice.Empty)); - Assert.That(Slice.Join(Slice.Empty, query), Is.EqualTo(Slice.FromString("ABBCCC"))); + + // ReSharper disable PossibleMultipleEnumeration + var query = Enumerable.Range(1, 3).Select(c => Value(new string((char)(64 + c), c))); + Assert.That(Slice.Join(Slice.Empty, query), Is.EqualTo(Value("ABBCCC"))); Assert.That(Slice.Join(Slice.Empty, query).Offset, Is.EqualTo(0)); Assert.That(Slice.Join(Slice.Empty, query).Count, Is.EqualTo(6)); var sep = Slice.FromChar(','); Assert.That(Slice.Join(sep, Enumerable.Empty()), Is.EqualTo(Slice.Empty)); - Assert.That(Slice.Join(sep, query), Is.EqualTo(Slice.FromString("A,BB,CCC"))); + Assert.That(Slice.Join(sep, query), Is.EqualTo(Value("A,BB,CCC"))); Assert.That(Slice.Join(sep, query).Offset, Is.EqualTo(0)); Assert.That(Slice.Join(sep, query).Count, Is.EqualTo(8)); var arr = query.ToArray(); - Assert.That(Slice.Join(Slice.Empty, (IEnumerable)arr), Is.EqualTo(Slice.FromString("ABBCCC"))); + Assert.That(Slice.Join(Slice.Empty, (IEnumerable)arr), Is.EqualTo(Value("ABBCCC"))); Assert.That(Slice.Join(Slice.Empty, (IEnumerable)arr).Offset, Is.EqualTo(0)); Assert.That(Slice.Join(Slice.Empty, (IEnumerable)arr).Count, Is.EqualTo(6)); - + // ReSharper restore PossibleMultipleEnumeration } [Test] public void Test_Slice_JoinBytes() { var sep = Slice.FromChar(' '); - var tokens = new[] { Slice.FromString("hello"), Slice.FromString("world"), Slice.FromString("!") }; + var tokens = new[] { Value("hello"), Value("world"), Value("!") }; var joined = Slice.JoinBytes(sep, tokens); Assert.That(joined, Is.Not.Null); @@ -1301,8 +2284,10 @@ public void Test_Slice_JoinBytes() Assert.That(joined, Is.Not.Null); Assert.That(joined.Length, Is.EqualTo(0)); - Assert.That(() => Slice.JoinBytes(sep, default(Slice[]), 0, 0), Throws.InstanceOf()); - Assert.That(() => Slice.JoinBytes(sep, default(IEnumerable)), Throws.InstanceOf()); + // ReSharper disable AssignNullToNotNullAttribute + Assert.That(() => Slice.JoinBytes(sep, default(Slice[]), 0, 0), Throws.ArgumentNullException); + Assert.That(() => Slice.JoinBytes(sep, default(IEnumerable)), Throws.ArgumentNullException); + // ReSharper restore AssignNullToNotNullAttribute Assert.That(() => Slice.JoinBytes(sep, tokens, 0, 4), Throws.InstanceOf()); Assert.That(() => Slice.JoinBytes(sep, tokens, -1, 1), Throws.InstanceOf()); @@ -1313,31 +2298,31 @@ public void Test_Slice_JoinBytes() [Test] public void Test_Slice_Split() { - var a = Slice.FromString("A"); - var b = Slice.FromString("BB"); - var c = Slice.FromString("CCC"); + var a = Value("A"); + var b = Value("BB"); + var c = Value("CCC"); var comma = Slice.FromChar(','); - Assert.That(Slice.FromString("A").Split(comma), Is.EqualTo(new[] { a })); - Assert.That(Slice.FromString("A,BB").Split(comma), Is.EqualTo(new[] { a, b })); - Assert.That(Slice.FromString("A,BB,CCC").Split(comma), Is.EqualTo(new[] { a, b, c })); + Assert.That(Value("A").Split(comma), Is.EqualTo(new[] { a })); + Assert.That(Value("A,BB").Split(comma), Is.EqualTo(new[] { a, b })); + Assert.That(Value("A,BB,CCC").Split(comma), Is.EqualTo(new[] { a, b, c })); // empty values should be kept or discarded, depending on the option settings - Assert.That(Slice.FromString("A,,CCC").Split(comma, StringSplitOptions.None), Is.EqualTo(new[] { a, Slice.Empty, c })); - Assert.That(Slice.FromString("A,,CCC").Split(comma, StringSplitOptions.RemoveEmptyEntries), Is.EqualTo(new[] { a, c })); + Assert.That(Value("A,,CCC").Split(comma, StringSplitOptions.None), Is.EqualTo(new[] { a, Slice.Empty, c })); + Assert.That(Value("A,,CCC").Split(comma, StringSplitOptions.RemoveEmptyEntries), Is.EqualTo(new[] { a, c })); // edge cases // > should behave the same as String.Split() Assert.That(Slice.Empty.Split(comma, StringSplitOptions.None), Is.EqualTo(new [] { Slice.Empty })); Assert.That(Slice.Empty.Split(comma, StringSplitOptions.RemoveEmptyEntries), Is.EqualTo(new Slice[0])); - Assert.That(Slice.FromString("A,").Split(comma, StringSplitOptions.None), Is.EqualTo(new[] { a, Slice.Empty })); - Assert.That(Slice.FromString("A,").Split(comma, StringSplitOptions.RemoveEmptyEntries), Is.EqualTo(new [] { a })); - Assert.That(Slice.FromString(",").Split(comma, StringSplitOptions.RemoveEmptyEntries), Is.EqualTo(new Slice[0])); - Assert.That(Slice.FromString(",,,").Split(comma, StringSplitOptions.RemoveEmptyEntries), Is.EqualTo(new Slice[0])); + Assert.That(Value("A,").Split(comma, StringSplitOptions.None), Is.EqualTo(new[] { a, Slice.Empty })); + Assert.That(Value("A,").Split(comma, StringSplitOptions.RemoveEmptyEntries), Is.EqualTo(new [] { a })); + Assert.That(Value(",").Split(comma, StringSplitOptions.RemoveEmptyEntries), Is.EqualTo(new Slice[0])); + Assert.That(Value(",,,").Split(comma, StringSplitOptions.RemoveEmptyEntries), Is.EqualTo(new Slice[0])); // multi-bytes separator with an offset - var sep = Slice.FromString("!<@>!").Substring(1, 3); - Assert.That(Slice.FromString("A<@>BB<@>CCC").Split(sep), Is.EqualTo(new[] { a, b, c })); + var sep = Value("!<@>!").Substring(1, 3); + Assert.That(Value("A<@>BB<@>CCC").Split(sep), Is.EqualTo(new[] { a, b, c })); } #region Black Magic Incantations... @@ -1371,5 +2356,21 @@ private static Slice MutateArray(Slice value, byte[] array) #endregion + #region Helpers... + + /// Create a key from a byte string + private static Slice Key(string byteString) + { + return Slice.FromByteString(byteString); + } + + /// Create a value from a unicode string + private static Slice Value(string text) + { + return Slice.FromString(text); + } + + #endregion } + } diff --git a/FoundationDB.Tests/Utils/SliceHelperFacts.cs b/FoundationDB.Tests/Utils/SliceHelperFacts.cs deleted file mode 100644 index 22e1ec768..000000000 --- a/FoundationDB.Tests/Utils/SliceHelperFacts.cs +++ /dev/null @@ -1,112 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Client.Tests -{ - using FoundationDB.Client; - using NUnit.Framework; - using System; - using System.Text; - - [TestFixture] - public class SliceHelperFacts : FdbTest - { - - #region SliceHelpers... - - [Test] - public void Test_SliceHelpers_Align() - { - // Even though 0 is a multiple of 16, it is always rounded up to 16 to simplify buffer handling logic - Assert.That(SliceHelpers.Align(0), Is.EqualTo(16)); - // 1..16 => 16 - for (int i = 1; i <= 16; i++) { Assert.That(SliceHelpers.Align(i), Is.EqualTo(16), "Align({0}) => 16", i); } - // 17..32 => 32 - for (int i = 17; i <= 32; i++) { Assert.That(SliceHelpers.Align(i), Is.EqualTo(32), "Align({0}) => 32", i); } - // 33..48 => 48 - for (int i = 33; i <= 48; i++) { Assert.That(SliceHelpers.Align(i), Is.EqualTo(48), "Align({0}) => 48", i); } - - // 2^N-1 - for (int i = 6; i < 30; i++) - { - Assert.That(SliceHelpers.Align((1 << i) - 1), Is.EqualTo(1 << i)); - } - // largest non overflowing - Assert.That(() => SliceHelpers.Align(int.MaxValue - 15), Is.EqualTo((int.MaxValue - 15))); - - // overflow - Assert.That(() => SliceHelpers.Align(int.MaxValue), Throws.InstanceOf()); - Assert.That(() => SliceHelpers.Align(int.MaxValue - 14), Throws.InstanceOf()); - - // negative values - Assert.That(() => SliceHelpers.Align(-1), Throws.InstanceOf()); - Assert.That(() => SliceHelpers.Align(int.MinValue), Throws.InstanceOf()); - } - - [Test] - public void Test_SliceHelpers_NextPowerOfTwo() - { - // 0 is a special case, to simplify bugger handling logic - Assert.That(SliceHelpers.NextPowerOfTwo(0), Is.EqualTo(1), "Special case for 0"); - Assert.That(SliceHelpers.NextPowerOfTwo(1), Is.EqualTo(1)); - Assert.That(SliceHelpers.NextPowerOfTwo(2), Is.EqualTo(2)); - - for (int i = 2; i < 31; i++) - { - Assert.That(SliceHelpers.NextPowerOfTwo((1 << i) - 1), Is.EqualTo(1 << i)); - Assert.That(SliceHelpers.NextPowerOfTwo(1 << i), Is.EqualTo(1 << i)); - } - - Assert.That(() => SliceHelpers.NextPowerOfTwo(-1), Throws.InstanceOf()); - Assert.That(() => SliceHelpers.NextPowerOfTwo(-42), Throws.InstanceOf()); - } - - [Test] - public void Test_SliceHelpers_ComputeHashCode() - { - //note: if everything fails, check that the hashcode algorithm hasn't changed also ! - - Assert.That(SliceHelpers.ComputeHashCode(new byte[0], 0, 0), Is.EqualTo(-2128831035)); - Assert.That(SliceHelpers.ComputeHashCode(new byte[1], 0, 1), Is.EqualTo(84696351)); - Assert.That(SliceHelpers.ComputeHashCode(new byte[2], 0, 1), Is.EqualTo(84696351)); - Assert.That(SliceHelpers.ComputeHashCode(new byte[2], 1, 1), Is.EqualTo(84696351)); - Assert.That(SliceHelpers.ComputeHashCode(new byte[2], 0, 2), Is.EqualTo(292984781)); - Assert.That(SliceHelpers.ComputeHashCode(Encoding.Default.GetBytes("hello"), 0, 5), Is.EqualTo(1335831723)); - - Assert.That(SliceHelpers.ComputeHashCodeUnsafe(new byte[0], 0, 0), Is.EqualTo(-2128831035)); - Assert.That(SliceHelpers.ComputeHashCodeUnsafe(new byte[1], 0, 1), Is.EqualTo(84696351)); - Assert.That(SliceHelpers.ComputeHashCodeUnsafe(new byte[2], 0, 1), Is.EqualTo(84696351)); - Assert.That(SliceHelpers.ComputeHashCodeUnsafe(new byte[2], 1, 1), Is.EqualTo(84696351)); - Assert.That(SliceHelpers.ComputeHashCodeUnsafe(new byte[2], 0, 2), Is.EqualTo(292984781)); - Assert.That(SliceHelpers.ComputeHashCodeUnsafe(Encoding.Default.GetBytes("hello"), 0, 5), Is.EqualTo(1335831723)); - } - - #endregion - - } -} diff --git a/FoundationDB.Tests/Utils/SliceStreamFacts.cs b/FoundationDB.Tests/Utils/SliceStreamFacts.cs index cad5b974f..e2a4a65d1 100644 --- a/FoundationDB.Tests/Utils/SliceStreamFacts.cs +++ b/FoundationDB.Tests/Utils/SliceStreamFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,16 +26,16 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion - -namespace FoundationDB.Client.Tests +namespace Doxense.Memory.Tests { - using FoundationDB.Client; - using NUnit.Framework; using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; + using Doxense.Memory; + using FoundationDB.Client.Tests; + using NUnit.Framework; [TestFixture] public class SliceStreamFacts : FdbTest @@ -137,13 +137,12 @@ public void Test_SliceStream_CopyTo() } } - + [Test] public void Test_SliceListStream_Basics() { const int N = 65536; var rnd = new Random(); - Slice slice; // create a random buffer var bytes = new byte[N]; @@ -156,7 +155,7 @@ public void Test_SliceListStream_Basics() while(r > 0) { int sz = Math.Min(1 + rnd.Next(1024), r); - slice = Slice.Create(bytes, p, sz); + Slice slice = bytes.AsSlice(p, sz); if (rnd.Next(2) == 1) slice = slice.Memoize(); slices.Add(slice); diff --git a/FoundationDB.Tests/Utils/SliceWriterFacts.cs b/FoundationDB.Tests/Utils/SliceWriterFacts.cs index ec1b87b1c..21f52c9f8 100644 --- a/FoundationDB.Tests/Utils/SliceWriterFacts.cs +++ b/FoundationDB.Tests/Utils/SliceWriterFacts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013-2014, Doxense SAS +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -26,15 +26,16 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY */ #endregion -namespace FoundationDB.Client.Utils.Tests +namespace Doxense.Memory.Tests { - using FoundationDB.Client; - using NUnit.Framework; using System; using System.Text; + using Doxense.Memory; + using FoundationDB.Client.Tests; + using NUnit.Framework; [TestFixture] - public class SliceWriterFacts + public class SliceWriterFacts : FdbTest { private static string Clean(string value) @@ -51,16 +52,16 @@ private static string Clean(string value) private static void PerformWriterTest(TestHandler action, T value, string expectedResult, string message = null) { - var writer = SliceWriter.Empty; + var writer = default(SliceWriter); action(ref writer, value); - Assert.That(writer.ToSlice().ToHexaString(' '), Is.EqualTo(expectedResult), "Value {0} ({1}) was not properly packed", value == null ? "" : value is string ? Clean(value as string) : value.ToString(), (value == null ? "null" : value.GetType().Name)); + Assert.That(writer.ToSlice().ToHexaString(' '), Is.EqualTo(expectedResult), "Value {0} ({1}) was not properly packed. {2}", value == null ? "" : value is string ? Clean(value as string) : value.ToString(), (value == null ? "null" : value.GetType().Name), message); } [Test] public void Test_Empty_Writer() { - var writer = SliceWriter.Empty; + var writer = default(SliceWriter); Assert.That(writer.Position, Is.EqualTo(0)); Assert.That(writer.HasData, Is.False); Assert.That(writer.Buffer, Is.Null); @@ -70,64 +71,267 @@ public void Test_Empty_Writer() [Test] public void Test_WriteBytes() { - TestHandler test = (ref SliceWriter writer, byte[] value) => writer.WriteBytes(value); + { + TestHandler test = (ref SliceWriter writer, byte[] value) => writer.WriteBytes(value); - PerformWriterTest(test, null, ""); - PerformWriterTest(test, new byte[0], ""); - PerformWriterTest(test, new byte[] { 66 }, "42"); - PerformWriterTest(test, new byte[] { 65, 66, 67 }, "41 42 43"); + PerformWriterTest(test, null, ""); + PerformWriterTest(test, new byte[0], ""); + PerformWriterTest(test, new byte[] {66}, "42"); + PerformWriterTest(test, new byte[] {65, 66, 67}, "41 42 43"); + } + { + TestHandler test = (ref SliceWriter writer, Slice value) => writer.WriteBytes(value); + + PerformWriterTest(test, Slice.Nil, ""); + PerformWriterTest(test, Slice.Empty, ""); + PerformWriterTest(test, Slice.FromByte(66), "42"); + PerformWriterTest(test, new byte[] { 65, 66, 67 }.AsSlice(), "41 42 43"); + PerformWriterTest(test, new byte[] { 65, 66, 67, 68, 69 }.AsSlice(1, 3), "42 43 44"); + } } [Test] - public void Test_WriteByte() + public void Test_WriteByte_Unsigned() { TestHandler test = (ref SliceWriter writer, byte value) => writer.WriteByte(value); - PerformWriterTest(test, default(byte), "00"); - PerformWriterTest(test, (byte)1, "01"); - PerformWriterTest(test, (byte)42, "2A"); - PerformWriterTest(test, (byte)255, "FF"); + PerformWriterTest(test, 0, "00"); + PerformWriterTest(test, 1, "01"); + PerformWriterTest(test, 42, "2A"); + PerformWriterTest(test, 255, "FF"); + } + + [Test] + public void Test_WriteByte_Signed() + { + TestHandler test = (ref SliceWriter writer, sbyte value) => writer.WriteByte(value); + + PerformWriterTest(test, 0, "00"); + PerformWriterTest(test, 1, "01"); + PerformWriterTest(test, 42, "2A"); + PerformWriterTest(test, sbyte.MaxValue, "7F"); + PerformWriterTest(test, -1, "FF"); + PerformWriterTest(test, sbyte.MinValue, "80"); + } + + [Test] + public void Test_WriteFixed16_Unsigned() + { + TestHandler test = (ref SliceWriter writer, ushort value) => writer.WriteFixed16(value); + + PerformWriterTest(test, 0, "00 00"); + PerformWriterTest(test, 1, "01 00"); + PerformWriterTest(test, 0x12, "12 00"); + PerformWriterTest(test, 0x1234, "34 12"); + PerformWriterTest(test, ushort.MaxValue, "FF FF"); + } + + [Test] + public void Test_WriteFixed16_Signed() + { + TestHandler test = (ref SliceWriter writer, short value) => writer.WriteFixed16(value); + + PerformWriterTest(test, 0, "00 00"); + PerformWriterTest(test, 1, "01 00"); + PerformWriterTest(test, 0x12, "12 00"); + PerformWriterTest(test, 0x1234, "34 12"); + PerformWriterTest(test, short.MaxValue, "FF 7F"); + PerformWriterTest(test, -1, "FF FF"); + PerformWriterTest(test, short.MinValue, "00 80"); + } + + [Test] + public void Test_WriteFixed16BE_Unsigned() + { + TestHandler test = (ref SliceWriter writer, ushort value) => writer.WriteFixed16BE(value); + + PerformWriterTest(test, 0, "00 00"); + PerformWriterTest(test, 1, "00 01"); + PerformWriterTest(test, 0x12, "00 12"); + PerformWriterTest(test, 0x1234, "12 34"); + PerformWriterTest(test, ushort.MaxValue, "FF FF"); + } + + [Test] + public void Test_WriteFixed16BE_Signed() + { + TestHandler test = (ref SliceWriter writer, short value) => writer.WriteFixed16BE(value); + + PerformWriterTest(test, 0, "00 00"); + PerformWriterTest(test, 1, "00 01"); + PerformWriterTest(test, 0x12, "00 12"); + PerformWriterTest(test, 0x1234, "12 34"); + PerformWriterTest(test, short.MaxValue, "7F FF"); + PerformWriterTest(test, -1, "FF FF"); + PerformWriterTest(test, short.MinValue, "80 00"); } [Test] - public void Test_WriteFixed32() + public void Test_WriteFixed32_Unsigned() { TestHandler test = (ref SliceWriter writer, uint value) => writer.WriteFixed32(value); - PerformWriterTest(test, 0U, "00 00 00 00"); - PerformWriterTest(test, 1U, "01 00 00 00"); - PerformWriterTest(test, 0x12U, "12 00 00 00"); - PerformWriterTest(test, 0x1234U, "34 12 00 00"); - PerformWriterTest(test, ushort.MaxValue, "FF FF 00 00"); - PerformWriterTest(test, 0x123456U, "56 34 12 00"); - PerformWriterTest(test, 0xDEADBEEF, "EF BE AD DE"); - PerformWriterTest(test, uint.MaxValue, "FF FF FF FF"); + PerformWriterTest(test, 0U, "00 00 00 00"); + PerformWriterTest(test, 1U, "01 00 00 00"); + PerformWriterTest(test, 0x12U, "12 00 00 00"); + PerformWriterTest(test, 0x1234U, "34 12 00 00"); + PerformWriterTest(test, ushort.MaxValue, "FF FF 00 00"); + PerformWriterTest(test, 0x123456U, "56 34 12 00"); + PerformWriterTest(test, 0xDEADBEEF, "EF BE AD DE"); + PerformWriterTest(test, uint.MaxValue, "FF FF FF FF"); + } + + [Test] + public void Test_WriteFixed32_Signed() + { + TestHandler test = (ref SliceWriter writer, int value) => writer.WriteFixed32(value); + + PerformWriterTest(test, 0, "00 00 00 00"); + PerformWriterTest(test, 1, "01 00 00 00"); + PerformWriterTest(test, 0x12, "12 00 00 00"); + PerformWriterTest(test, 0x1234, "34 12 00 00"); + PerformWriterTest(test, short.MaxValue, "FF 7F 00 00"); + PerformWriterTest(test, ushort.MaxValue, "FF FF 00 00"); + PerformWriterTest(test, 0x123456, "56 34 12 00"); + PerformWriterTest(test, unchecked((int)0xDEADBEEF), "EF BE AD DE"); + PerformWriterTest(test, int.MaxValue, "FF FF FF 7F"); + PerformWriterTest(test, -1, "FF FF FF FF"); + PerformWriterTest(test, short.MinValue, "00 80 FF FF"); + PerformWriterTest(test, int.MinValue, "00 00 00 80"); + + } + + [Test] + public void Test_WriteFixed32BE_Unsigned() + { + TestHandler test = (ref SliceWriter writer, uint value) => writer.WriteFixed32BE(value); + + PerformWriterTest(test, 0U, "00 00 00 00"); + PerformWriterTest(test, 1U, "00 00 00 01"); + PerformWriterTest(test, 0x12U, "00 00 00 12"); + PerformWriterTest(test, 0x1234U, "00 00 12 34"); + PerformWriterTest(test, ushort.MaxValue, "00 00 FF FF"); + PerformWriterTest(test, 0x123456U, "00 12 34 56"); + PerformWriterTest(test, 0xDEADBEEF, "DE AD BE EF"); + PerformWriterTest(test, uint.MaxValue, "FF FF FF FF"); + } + + [Test] + public void Test_WriteFixed32BE_Signed() + { + TestHandler test = (ref SliceWriter writer, int value) => writer.WriteFixed32BE(value); + + PerformWriterTest(test, 0, "00 00 00 00"); + PerformWriterTest(test, 1, "00 00 00 01"); + PerformWriterTest(test, 0x12, "00 00 00 12"); + PerformWriterTest(test, 0x1234, "00 00 12 34"); + PerformWriterTest(test, short.MaxValue, "00 00 7F FF"); + PerformWriterTest(test, ushort.MaxValue, "00 00 FF FF"); + PerformWriterTest(test, 0x123456, "00 12 34 56"); + PerformWriterTest(test, unchecked((int)0xDEADBEEF), "DE AD BE EF"); + PerformWriterTest(test, int.MaxValue, "7F FF FF FF"); + PerformWriterTest(test, -1, "FF FF FF FF"); + PerformWriterTest(test, short.MinValue, "FF FF 80 00"); + PerformWriterTest(test, int.MinValue, "80 00 00 00"); + } [Test] - public void Test_WriteFixed64() + public void Test_WriteFixed64_Unsigned() { TestHandler test = (ref SliceWriter writer, ulong value) => writer.WriteFixed64(value); - PerformWriterTest(test, 0UL, "00 00 00 00 00 00 00 00"); - PerformWriterTest(test, 1UL, "01 00 00 00 00 00 00 00"); - PerformWriterTest(test, 0x12UL, "12 00 00 00 00 00 00 00"); - PerformWriterTest(test, 0x1234UL, "34 12 00 00 00 00 00 00"); - PerformWriterTest(test, ushort.MaxValue, "FF FF 00 00 00 00 00 00"); - PerformWriterTest(test, 0x123456UL, "56 34 12 00 00 00 00 00"); - PerformWriterTest(test, 0x12345678UL, "78 56 34 12 00 00 00 00"); - PerformWriterTest(test, uint.MaxValue, "FF FF FF FF 00 00 00 00"); - PerformWriterTest(test, 0x123456789AUL, "9A 78 56 34 12 00 00 00"); - PerformWriterTest(test, 0x123456789ABCUL, "BC 9A 78 56 34 12 00 00"); - PerformWriterTest(test, 0x123456789ABCDEUL, "DE BC 9A 78 56 34 12 00"); - PerformWriterTest(test, 0xBADC0FFEE0DDF00DUL, "0D F0 DD E0 FE 0F DC BA"); - PerformWriterTest(test, ulong.MaxValue, "FF FF FF FF FF FF FF FF"); + PerformWriterTest(test, 0UL, "00 00 00 00 00 00 00 00"); + PerformWriterTest(test, 1UL, "01 00 00 00 00 00 00 00"); + PerformWriterTest(test, 0x12UL, "12 00 00 00 00 00 00 00"); + PerformWriterTest(test, 0x1234UL, "34 12 00 00 00 00 00 00"); + PerformWriterTest(test, ushort.MaxValue, "FF FF 00 00 00 00 00 00"); + PerformWriterTest(test, 0x123456UL, "56 34 12 00 00 00 00 00"); + PerformWriterTest(test, 0x12345678UL, "78 56 34 12 00 00 00 00"); + PerformWriterTest(test, uint.MaxValue, "FF FF FF FF 00 00 00 00"); + PerformWriterTest(test, 0x123456789AUL, "9A 78 56 34 12 00 00 00"); + PerformWriterTest(test, 0x123456789ABCUL, "BC 9A 78 56 34 12 00 00"); + PerformWriterTest(test, 0x123456789ABCDEUL, "DE BC 9A 78 56 34 12 00"); + PerformWriterTest(test, 0xBADC0FFEE0DDF00DUL, "0D F0 DD E0 FE 0F DC BA"); + PerformWriterTest(test, ulong.MaxValue, "FF FF FF FF FF FF FF FF"); + } + + [Test] + public void Test_WriteFixed64_Signed() + { + TestHandler test = (ref SliceWriter writer, long value) => writer.WriteFixed64(value); + + PerformWriterTest(test, 0L, "00 00 00 00 00 00 00 00"); + PerformWriterTest(test, 1L, "01 00 00 00 00 00 00 00"); + PerformWriterTest(test, 0x12L, "12 00 00 00 00 00 00 00"); + PerformWriterTest(test, 0x1234L, "34 12 00 00 00 00 00 00"); + PerformWriterTest(test, short.MaxValue, "FF 7F 00 00 00 00 00 00"); + PerformWriterTest(test, ushort.MaxValue, "FF FF 00 00 00 00 00 00"); + PerformWriterTest(test, 0x123456L, "56 34 12 00 00 00 00 00"); + PerformWriterTest(test, 0x12345678L, "78 56 34 12 00 00 00 00"); + PerformWriterTest(test, int.MaxValue, "FF FF FF 7F 00 00 00 00"); + PerformWriterTest(test, uint.MaxValue, "FF FF FF FF 00 00 00 00"); + PerformWriterTest(test, 0x123456789AL, "9A 78 56 34 12 00 00 00"); + PerformWriterTest(test, 0x123456789ABCL, "BC 9A 78 56 34 12 00 00"); + PerformWriterTest(test, 0x123456789ABCDEL, "DE BC 9A 78 56 34 12 00"); + PerformWriterTest(test, unchecked((long) 0xBADC0FFEE0DDF00D), "0D F0 DD E0 FE 0F DC BA"); + PerformWriterTest(test, long.MaxValue, "FF FF FF FF FF FF FF 7F"); + PerformWriterTest(test, -1L, "FF FF FF FF FF FF FF FF"); + PerformWriterTest(test, short.MinValue, "00 80 FF FF FF FF FF FF"); + PerformWriterTest(test, int.MinValue, "00 00 00 80 FF FF FF FF"); + PerformWriterTest(test, long.MinValue, "00 00 00 00 00 00 00 80"); + } + + [Test] + public void Test_WriteFixed64BE_Unsigned() + { + TestHandler test = (ref SliceWriter writer, ulong value) => writer.WriteFixed64BE(value); + + PerformWriterTest(test, 0UL, "00 00 00 00 00 00 00 00"); + PerformWriterTest(test, 1UL, "00 00 00 00 00 00 00 01"); + PerformWriterTest(test, 0x12UL, "00 00 00 00 00 00 00 12"); + PerformWriterTest(test, 0x1234UL, "00 00 00 00 00 00 12 34"); + PerformWriterTest(test, ushort.MaxValue, "00 00 00 00 00 00 FF FF"); + PerformWriterTest(test, 0x123456UL, "00 00 00 00 00 12 34 56"); + PerformWriterTest(test, 0x12345678UL, "00 00 00 00 12 34 56 78"); + PerformWriterTest(test, uint.MaxValue, "00 00 00 00 FF FF FF FF"); + PerformWriterTest(test, 0x123456789AUL, "00 00 00 12 34 56 78 9A"); + PerformWriterTest(test, 0x123456789ABCUL, "00 00 12 34 56 78 9A BC"); + PerformWriterTest(test, 0x123456789ABCDEUL, "00 12 34 56 78 9A BC DE"); + PerformWriterTest(test, 0xBADC0FFEE0DDF00DUL, "BA DC 0F FE E0 DD F0 0D"); + PerformWriterTest(test, ulong.MaxValue, "FF FF FF FF FF FF FF FF"); + } + + [Test] + public void Test_WriteFixed64BE_Signed() + { + TestHandler test = (ref SliceWriter writer, long value) => writer.WriteFixed64BE(value); + + PerformWriterTest(test, 0L, "00 00 00 00 00 00 00 00"); + PerformWriterTest(test, 1L, "00 00 00 00 00 00 00 01"); + PerformWriterTest(test, 0x12L, "00 00 00 00 00 00 00 12"); + PerformWriterTest(test, 0x1234L, "00 00 00 00 00 00 12 34"); + PerformWriterTest(test, short.MaxValue, "00 00 00 00 00 00 7F FF"); + PerformWriterTest(test, ushort.MaxValue, "00 00 00 00 00 00 FF FF"); + PerformWriterTest(test, 0x123456L, "00 00 00 00 00 12 34 56"); + PerformWriterTest(test, 0x12345678L, "00 00 00 00 12 34 56 78"); + PerformWriterTest(test, int.MaxValue, "00 00 00 00 7F FF FF FF"); + PerformWriterTest(test, uint.MaxValue, "00 00 00 00 FF FF FF FF"); + PerformWriterTest(test, 0x123456789AL, "00 00 00 12 34 56 78 9A"); + PerformWriterTest(test, 0x123456789ABCL, "00 00 12 34 56 78 9A BC"); + PerformWriterTest(test, 0x123456789ABCDEL, "00 12 34 56 78 9A BC DE"); + PerformWriterTest(test, unchecked((long)0xBADC0FFEE0DDF00D), "BA DC 0F FE E0 DD F0 0D"); + PerformWriterTest(test, long.MaxValue, "7F FF FF FF FF FF FF FF"); + PerformWriterTest(test, -1L, "FF FF FF FF FF FF FF FF"); + PerformWriterTest(test, short.MinValue, "FF FF FF FF FF FF 80 00"); + PerformWriterTest(test, int.MinValue, "FF FF FF FF 80 00 00 00"); + PerformWriterTest(test, long.MinValue, "80 00 00 00 00 00 00 00"); } [Test] public void Test_WriteVarint32() { - TestHandler test = (ref SliceWriter writer, uint value) => writer.WriteVarint32(value); + TestHandler test = (ref SliceWriter writer, uint value) => writer.WriteVarInt32(value); PerformWriterTest(test, 0U, "00"); PerformWriterTest(test, 1U, "01"); @@ -147,7 +351,7 @@ public void Test_WriteVarint32() [Test] public void Test_WriteVarint64() { - TestHandler test = (ref SliceWriter writer, ulong value) => writer.WriteVarint64(value); + TestHandler test = (ref SliceWriter writer, ulong value) => writer.WriteVarInt64(value); PerformWriterTest(test, 0UL, "00"); PerformWriterTest(test, 1UL, "01"); @@ -177,7 +381,7 @@ public void Test_WriteVarint64() [Test] public void Test_WriteVarBytes() { - TestHandler test = (ref SliceWriter writer, Slice value) => writer.WriteVarbytes(value); + TestHandler test = (ref SliceWriter writer, Slice value) => writer.WriteVarBytes(value); PerformWriterTest(test, Slice.Nil, "00"); PerformWriterTest(test, Slice.Empty, "00"); @@ -187,6 +391,108 @@ public void Test_WriteVarBytes() PerformWriterTest(test, Slice.FromFixedU32(0xDEADBEEF), "04 EF BE AD DE"); } + [Test] + public void Test_WriteBase10_Signed() + { + TestHandler test = (ref SliceWriter writer, int value) => writer.WriteBase10(value); + + // positive numbers + PerformWriterTest(test, 0, "30"); + PerformWriterTest(test, 1, "31"); + PerformWriterTest(test, 9, "39"); + PerformWriterTest(test, 10, "31 30"); + PerformWriterTest(test, 42, "34 32"); + PerformWriterTest(test, 99, "39 39"); + PerformWriterTest(test, 100, "31 30 30"); + PerformWriterTest(test, 123, "31 32 33"); + PerformWriterTest(test, 999, "39 39 39"); + PerformWriterTest(test, 1000, "31 30 30 30"); + PerformWriterTest(test, 1234, "31 32 33 34"); + PerformWriterTest(test, 9999, "39 39 39 39"); + PerformWriterTest(test, 10000, "31 30 30 30 30"); + PerformWriterTest(test, 12345, "31 32 33 34 35"); + PerformWriterTest(test, 99999, "39 39 39 39 39"); + PerformWriterTest(test, 100000, "31 30 30 30 30 30"); + PerformWriterTest(test, 123456, "31 32 33 34 35 36"); + PerformWriterTest(test, 999999, "39 39 39 39 39 39"); + PerformWriterTest(test, 1000000, "31 30 30 30 30 30 30"); + PerformWriterTest(test, 1234567, "31 32 33 34 35 36 37"); + PerformWriterTest(test, 9999999, "39 39 39 39 39 39 39"); + PerformWriterTest(test, 10000000, "31 30 30 30 30 30 30 30"); + PerformWriterTest(test, 12345678, "31 32 33 34 35 36 37 38"); + PerformWriterTest(test, 99999999, "39 39 39 39 39 39 39 39"); + PerformWriterTest(test, 100000000, "31 30 30 30 30 30 30 30 30"); + PerformWriterTest(test, 123456789, "31 32 33 34 35 36 37 38 39"); + PerformWriterTest(test, 999999999, "39 39 39 39 39 39 39 39 39"); + PerformWriterTest(test, int.MaxValue, "32 31 34 37 34 38 33 36 34 37"); + + // negative numbers + PerformWriterTest(test, -1, "2D 31"); + PerformWriterTest(test, -9, "2D 39"); + PerformWriterTest(test, -10, "2D 31 30"); + PerformWriterTest(test, -42, "2D 34 32"); + PerformWriterTest(test, -99, "2D 39 39"); + PerformWriterTest(test, -100, "2D 31 30 30"); + PerformWriterTest(test, -123, "2D 31 32 33"); + PerformWriterTest(test, -999, "2D 39 39 39"); + PerformWriterTest(test, -1000, "2D 31 30 30 30"); + PerformWriterTest(test, -1234, "2D 31 32 33 34"); + PerformWriterTest(test, -9999, "2D 39 39 39 39"); + PerformWriterTest(test, -10000, "2D 31 30 30 30 30"); + PerformWriterTest(test, -12345, "2D 31 32 33 34 35"); + PerformWriterTest(test, -99999, "2D 39 39 39 39 39"); + PerformWriterTest(test, -100000, "2D 31 30 30 30 30 30"); + PerformWriterTest(test, -123456, "2D 31 32 33 34 35 36"); + PerformWriterTest(test, -999999, "2D 39 39 39 39 39 39"); + PerformWriterTest(test, -1000000, "2D 31 30 30 30 30 30 30"); + PerformWriterTest(test, -1234567, "2D 31 32 33 34 35 36 37"); + PerformWriterTest(test, -9999999, "2D 39 39 39 39 39 39 39"); + PerformWriterTest(test, -10000000, "2D 31 30 30 30 30 30 30 30"); + PerformWriterTest(test, -12345678, "2D 31 32 33 34 35 36 37 38"); + PerformWriterTest(test, -99999999, "2D 39 39 39 39 39 39 39 39"); + PerformWriterTest(test, -100000000, "2D 31 30 30 30 30 30 30 30 30"); + PerformWriterTest(test, -123456789, "2D 31 32 33 34 35 36 37 38 39"); + PerformWriterTest(test, -999999999, "2D 39 39 39 39 39 39 39 39 39"); + PerformWriterTest(test, int.MinValue, "2D 32 31 34 37 34 38 33 36 34 38"); + } + + [Test] + public void Test_WriteBase10_Unsigned() + { + TestHandler test = (ref SliceWriter writer, uint value) => writer.WriteBase10(value); + + // positive numbers + PerformWriterTest(test, 0, "30"); + PerformWriterTest(test, 1, "31"); + PerformWriterTest(test, 9, "39"); + PerformWriterTest(test, 10, "31 30"); + PerformWriterTest(test, 42, "34 32"); + PerformWriterTest(test, 99, "39 39"); + PerformWriterTest(test, 100, "31 30 30"); + PerformWriterTest(test, 123, "31 32 33"); + PerformWriterTest(test, 999, "39 39 39"); + PerformWriterTest(test, 1000, "31 30 30 30"); + PerformWriterTest(test, 1234, "31 32 33 34"); + PerformWriterTest(test, 9999, "39 39 39 39"); + PerformWriterTest(test, 10000, "31 30 30 30 30"); + PerformWriterTest(test, 12345, "31 32 33 34 35"); + PerformWriterTest(test, 99999, "39 39 39 39 39"); + PerformWriterTest(test, 100000, "31 30 30 30 30 30"); + PerformWriterTest(test, 123456, "31 32 33 34 35 36"); + PerformWriterTest(test, 999999, "39 39 39 39 39 39"); + PerformWriterTest(test, 1000000, "31 30 30 30 30 30 30"); + PerformWriterTest(test, 1234567, "31 32 33 34 35 36 37"); + PerformWriterTest(test, 9999999, "39 39 39 39 39 39 39"); + PerformWriterTest(test, 10000000, "31 30 30 30 30 30 30 30"); + PerformWriterTest(test, 12345678, "31 32 33 34 35 36 37 38"); + PerformWriterTest(test, 99999999, "39 39 39 39 39 39 39 39"); + PerformWriterTest(test, 100000000, "31 30 30 30 30 30 30 30 30"); + PerformWriterTest(test, 123456789, "31 32 33 34 35 36 37 38 39"); + PerformWriterTest(test, 999999999, "39 39 39 39 39 39 39 39 39"); + PerformWriterTest(test, int.MaxValue, "32 31 34 37 34 38 33 36 34 37"); + PerformWriterTest(test, uint.MaxValue, "34 32 39 34 39 36 37 32 39 35"); + } + [Test] public void Test_Indexer() { @@ -263,5 +569,245 @@ public void Test_Skip() Assert.That(writer.ToSlice().ToString(), Is.EqualTo("helloworld*****")); } + [Test] + public void Test_ToSlice() + { + var writer = new SliceWriter(64); + var slice = writer.ToSlice(); + //note: slice.Array is not guaranteed to be equal to writer.Buffer + Assert.That(slice.Count, Is.EqualTo(0)); + Assert.That(slice.Offset, Is.EqualTo(0)); + + writer.WriteBytes(Slice.FromString("hello world!")); + slice = writer.ToSlice(); + Assert.That(slice.Array, Is.SameAs(writer.Buffer)); + Assert.That(slice.Offset, Is.EqualTo(0)); + Assert.That(slice.Count, Is.EqualTo(12)); + Assert.That(slice.ToStringAscii(), Is.EqualTo("hello world!")); + + writer.WriteBytes(Slice.FromString("foo")); + slice = writer.ToSlice(); + Assert.That(slice.Array, Is.SameAs(writer.Buffer)); + Assert.That(slice.Offset, Is.EqualTo(0)); + Assert.That(slice.Count, Is.EqualTo(15)); + Assert.That(slice.ToStringAscii(), Is.EqualTo("hello world!foo")); + } + + [Test] + public void Test_Head() + { + var writer = new SliceWriter(64); + var slice = writer.Head(0); + Assert.That(slice.Count, Is.EqualTo(0)); + Assert.That(slice.Offset, Is.EqualTo(0)); + //note: slice.Array is not guaranteed to be equal to writer.Buffer + Assert.That(() => writer.Head(1), Throws.InstanceOf()); + + writer.WriteBytes(Slice.FromString("hello world!")); + slice = writer.Head(5); + Assert.That(slice.Array, Is.SameAs(writer.Buffer)); + Assert.That(slice.Offset, Is.EqualTo(0)); + Assert.That(slice.Count, Is.EqualTo(5)); + Assert.That(slice.ToStringAscii(), Is.EqualTo("hello")); + + slice = writer.Head(12); + Assert.That(slice.Array, Is.SameAs(writer.Buffer)); + Assert.That(slice.Offset, Is.EqualTo(0)); + Assert.That(slice.Count, Is.EqualTo(12)); + Assert.That(slice.ToStringAscii(), Is.EqualTo("hello world!")); + + Assert.That(() => writer.Head(13), Throws.InstanceOf()); + Assert.That(() => writer.Head(-1), Throws.InstanceOf()); + + writer.WriteBytes(Slice.FromString("foo")); + slice = writer.Head(3); + Assert.That(slice.Array, Is.SameAs(writer.Buffer)); + Assert.That(slice.Offset, Is.EqualTo(0)); + Assert.That(slice.Count, Is.EqualTo(3)); + Assert.That(slice.ToStringAscii(), Is.EqualTo("hel")); + + slice = writer.Head(15); + Assert.That(slice.Array, Is.SameAs(writer.Buffer)); + Assert.That(slice.Offset, Is.EqualTo(0)); + Assert.That(slice.Count, Is.EqualTo(15)); + Assert.That(slice.ToStringAscii(), Is.EqualTo("hello world!foo")); + + Assert.That(() => writer.Head(16), Throws.InstanceOf()); + + } + + [Test] + public void Test_Tail() + { + var writer = new SliceWriter(64); + var slice = writer.Tail(0); + Assert.That(slice.Count, Is.EqualTo(0)); + Assert.That(slice.Offset, Is.EqualTo(0)); + //note: slice.Array is not guaranteed to be equal to writer.Buffer + Assert.That(() => writer.Head(1), Throws.InstanceOf()); + + writer.WriteBytes(Slice.FromString("hello world!")); + slice = writer.Tail(6); + Assert.That(slice.Array, Is.SameAs(writer.Buffer)); + Assert.That(slice.Offset, Is.EqualTo(6)); + Assert.That(slice.Count, Is.EqualTo(6)); + Assert.That(slice.ToStringAscii(), Is.EqualTo("world!")); + + slice = writer.Tail(12); + Assert.That(slice.Array, Is.SameAs(writer.Buffer)); + Assert.That(slice.Offset, Is.EqualTo(0)); + Assert.That(slice.Count, Is.EqualTo(12)); + Assert.That(slice.ToStringAscii(), Is.EqualTo("hello world!")); + + Assert.That(() => writer.Tail(13), Throws.InstanceOf()); + Assert.That(() => writer.Tail(-1), Throws.InstanceOf()); + + writer.WriteBytes(Slice.FromString("foo")); + slice = writer.Tail(3); + Assert.That(slice.Array, Is.SameAs(writer.Buffer)); + Assert.That(slice.Offset, Is.EqualTo(12)); + Assert.That(slice.Count, Is.EqualTo(3)); + Assert.That(slice.ToStringAscii(), Is.EqualTo("foo")); + + slice = writer.Tail(15); + Assert.That(slice.Array, Is.SameAs(writer.Buffer)); + Assert.That(slice.Offset, Is.EqualTo(0)); + Assert.That(slice.Count, Is.EqualTo(15)); + Assert.That(slice.ToStringAscii(), Is.EqualTo("hello world!foo")); + + Assert.That(() => writer.Tail(16), Throws.InstanceOf()); + + } + + [Test] + public void Test_AppendBytes() + { + var writer = new SliceWriter(64); + var slice = writer.AppendBytes(Slice.Empty); + //note: slice.Array is not guaranteed to be equal to writer.Buffer + Assert.That(slice.Offset, Is.EqualTo(0)); + Assert.That(slice.Count, Is.EqualTo(0)); + + slice = writer.AppendBytes(Slice.FromString("hello world!")); + Assert.That(slice.Array, Is.SameAs(writer.Buffer)); + Assert.That(slice.Offset, Is.EqualTo(0)); + Assert.That(slice.Count, Is.EqualTo(12)); + Assert.That(slice.ToStringUtf8(), Is.EqualTo("hello world!")); + Assert.That(writer.ToSlice().ToStringUtf8(), Is.EqualTo("hello world!")); + + var foo = Slice.FromString("foo"); + slice = writer.AppendBytes(foo); + Assert.That(slice.Array, Is.SameAs(writer.Buffer)); + Assert.That(slice.Offset, Is.EqualTo(12)); + Assert.That(slice.Count, Is.EqualTo(3)); + Assert.That(slice.ToStringUtf8(), Is.EqualTo("foo")); + Assert.That(writer.ToSlice().ToStringUtf8(), Is.EqualTo("hello world!foo")); + + var bar = Slice.FromString("bar"); + unsafe + { + fixed (byte* ptr = &bar.DangerousGetPinnableReference()) + { + slice = writer.AppendBytes(ptr, 3); + } + } + Assert.That(slice.Array, Is.SameAs(writer.Buffer)); + Assert.That(slice.Offset, Is.EqualTo(15)); + Assert.That(slice.Count, Is.EqualTo(3)); + Assert.That(slice.ToStringUtf8(), Is.EqualTo("bar")); + Assert.That(writer.ToSlice().ToStringUtf8(), Is.EqualTo("hello world!foobar")); + + var baz = Slice.FromString("baz"); + unsafe + { + fixed (byte* ptr = &baz.DangerousGetPinnableReference()) + { + //TODO: this test was using ReadOnlySpan, update it once we enable support for these! + slice = writer.AppendBytes(ptr, 3); + } + } + Assert.That(slice.Array, Is.SameAs(writer.Buffer)); + Assert.That(slice.Offset, Is.EqualTo(18)); + Assert.That(slice.Count, Is.EqualTo(3)); + Assert.That(slice.ToStringUtf8(), Is.EqualTo("baz")); + Assert.That(writer.ToSlice().ToStringUtf8(), Is.EqualTo("hello world!foobarbaz")); + + unsafe + { + slice = writer.AppendBytes(null, 0); + } + //note: slice.Array is not guaranteed to be equal to writer.Buffer + Assert.That(slice.Offset, Is.EqualTo(0)); //REVIEW: should we return (Buffer, Position, 0) instead of (EmptyArray, 0, 0) ? + Assert.That(slice.Count, Is.EqualTo(0)); + } + + [Test] + public void Test_WriteBytes_Resize_Buffer() + { + + // check buffer resize occurs as intended + var original = new byte[32]; + var writer = new SliceWriter(original); + Assert.That(writer.Buffer, Is.SameAs(original)); + + // first write should not resize the buffer + writer.WriteBytes(Slice.Repeat((byte)'a', 24)); + Assert.That(writer.Buffer, Is.SameAs(original)); + Assert.That(writer.ToSlice().ToStringAscii(), Is.EqualTo("aaaaaaaaaaaaaaaaaaaaaaaa")); + + // second write should resize the buffer + writer.WriteBytes(Slice.Repeat((byte)'b', 24)); + // buffer should have been replaced with larger one + Assert.That(writer.Buffer, Is.Not.SameAs(original)); + Assert.That(writer.Buffer.Length, Is.GreaterThanOrEqualTo(48)); + + //but the content should be unchanged + Assert.That(writer.ToSlice().ToStringAscii(), Is.EqualTo("aaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbb")); + + // adding exactly what is missing should not resize the buffer + writer = new SliceWriter(original); + writer.WriteBytes(Slice.Repeat((byte)'c', original.Length)); + Assert.That(writer.Buffer, Is.SameAs(original)); + Assert.That(writer.ToSlice().ToStringAscii(), Is.EqualTo("cccccccccccccccccccccccccccccccc")); + + // adding nothing should not resize the buffer + writer.WriteBytes(Slice.Empty); + Assert.That(writer.Buffer, Is.SameAs(original)); + Assert.That(writer.ToSlice().ToStringAscii(), Is.EqualTo("cccccccccccccccccccccccccccccccc")); + + // adding a single byte should resize the buffer + writer.WriteBytes(Slice.FromChar('Z')); + Assert.That(writer.Buffer, Is.Not.SameAs(original)); + Assert.That(writer.Buffer.Length, Is.GreaterThanOrEqualTo(33)); + Assert.That(writer.ToSlice().ToStringAscii(), Is.EqualTo("ccccccccccccccccccccccccccccccccZ")); + } + + [Test] + public void Test_AppendBytes_Resize_Buffer() + { + + // check buffer resize occurs as intended + var original = new byte[32]; + var writer = new SliceWriter(original); + Assert.That(writer.Buffer, Is.SameAs(original)); + + // first write should not resize the buffer + var aaa = writer.AppendBytes(Slice.Repeat((byte) 'a', 24)); + Assert.That(aaa.Array, Is.SameAs(original)); + + // second write should resize the buffer + var bbb = writer.AppendBytes(Slice.Repeat((byte) 'b', 24)); + Assert.That(bbb.Array, Is.SameAs(writer.Buffer)); + //note: buffer should have been copied between both calls, so 'aaa' should point to the OLD buffer + Assert.That(bbb.Array, Is.Not.SameAs(original)); + //but the content should be unchanged + Assert.That(writer.ToSlice().ToStringAscii(), Is.EqualTo("aaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbb")); + // => mutating aaa should not change the buffer + aaa.Array[aaa.Offset] = (byte) 'Z'; + Assert.That(writer.ToSlice().ToStringAscii(), Is.EqualTo("aaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbb")); + // => but mutating bbb should change the buffer + bbb.Array[bbb.Offset] = (byte)'Z'; + Assert.That(writer.ToSlice().ToStringAscii(), Is.EqualTo("aaaaaaaaaaaaaaaaaaaaaaaaZbbbbbbbbbbbbbbbbbbbbbbb")); + } } } diff --git a/FoundationDB.Tests/Utils/TuPackFacts.cs b/FoundationDB.Tests/Utils/TuPackFacts.cs new file mode 100644 index 000000000..8d6934975 --- /dev/null +++ b/FoundationDB.Tests/Utils/TuPackFacts.cs @@ -0,0 +1,2204 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +// ReSharper disable AccessToModifiedClosure +namespace Doxense.Collections.Tuples.Tests +{ + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.Linq; + using System.Net; + using Doxense.Collections.Tuples.Encoding; + using FoundationDB.Client; + using FoundationDB.Client.Tests; + using NUnit.Framework; + + [TestFixture] + public class TuPackFacts : FdbTest + { + + #region Serialization... + + [Test] + public void Test_TuplePack_Serialize_Bytes() + { + // Byte arrays are stored with prefix '01' followed by the bytes, and terminated by '00'. All occurences of '00' in the byte array are escaped with '00 FF' + // - Best case: packed_size = 2 + array_len + // - Worst case: packed_size = 2 + array_len * 2 + + Slice packed; + + packed = TuPack.EncodeKey(new byte[] {0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0}); + Assert.That(packed.ToString(), Is.EqualTo("<01><12>4Vx<9A><00>")); + packed = TuPack.EncodeKey(new byte[] {0x00, 0x42}); + Assert.That(packed.ToString(), Is.EqualTo("<01><00>B<00>")); + packed = TuPack.EncodeKey(new byte[] {0x42, 0x00}); + Assert.That(packed.ToString(), Is.EqualTo("<01>B<00><00>")); + packed = TuPack.EncodeKey(new byte[] {0x42, 0x00, 0x42}); + Assert.That(packed.ToString(), Is.EqualTo("<01>B<00>B<00>")); + packed = TuPack.EncodeKey(new byte[] {0x42, 0x00, 0x00, 0x42}); + Assert.That(packed.ToString(), Is.EqualTo("<01>B<00><00>B<00>")); + } + + [Test] + public void Test_TuplePack_Deserialize_Bytes() + { + ITuple t; + + t = TuPack.Unpack(Slice.Unescape("<01><01><23><45><67><89><00>")); + Assert.That(t.Get(0), Is.EqualTo(new byte[] {0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF})); + Assert.That(t.Get(0).ToHexaString(' '), Is.EqualTo("01 23 45 67 89 AB CD EF")); + + t = TuPack.Unpack(Slice.Unescape("<01><42><00><00>")); + Assert.That(t.Get(0), Is.EqualTo(new byte[] {0x42, 0x00})); + Assert.That(t.Get(0).ToHexaString(' '), Is.EqualTo("42 00")); + + t = TuPack.Unpack(Slice.Unescape("<01><00><42><00>")); + Assert.That(t.Get(0), Is.EqualTo(new byte[] {0x00, 0x42})); + Assert.That(t.Get(0).ToHexaString(' '), Is.EqualTo("00 42")); + + t = TuPack.Unpack(Slice.Unescape("<01><42><00><42><00>")); + Assert.That(t.Get(0), Is.EqualTo(new byte[] {0x42, 0x00, 0x42})); + Assert.That(t.Get(0).ToHexaString(' '), Is.EqualTo("42 00 42")); + + t = TuPack.Unpack(Slice.Unescape("<01><42><00><00><42><00>")); + Assert.That(t.Get(0), Is.EqualTo(new byte[] {0x42, 0x00, 0x00, 0x42})); + Assert.That(t.Get(0).ToHexaString(' '), Is.EqualTo("42 00 00 42")); + } + + [Test] + public void Test_TuplePack_Serialize_Unicode_Strings() + { + // Unicode strings are stored with prefix '02' followed by the utf8 bytes, and terminated by '00'. All occurences of '00' in the UTF8 bytes are escaped with '00 FF' + + Slice packed; + + // simple string + packed = TuPack.EncodeKey("hello world"); + Assert.That(packed.ToString(), Is.EqualTo("<02>hello world<00>")); + + // empty + packed = TuPack.EncodeKey(String.Empty); + Assert.That(packed.ToString(), Is.EqualTo("<02><00>")); + + // null + packed = TuPack.EncodeKey(default(string)); + Assert.That(packed.ToString(), Is.EqualTo("<00>")); + + // unicode + packed = TuPack.EncodeKey("こんにちは世界"); + // note: Encoding.UTF8.GetBytes("こんにちは世界") => { e3 81 93 e3 82 93 e3 81 ab e3 81 a1 e3 81 af e4 b8 96 e7 95 8c } + Assert.That(packed.ToString(), Is.EqualTo("<02><81><93><82><93><81><81><81><96><95><8C><00>")); + } + + [Test] + public void Test_TuplePack_Deserialize_Unicode_Strings() + { + ITuple t; + + // simple string + t = TuPack.Unpack(Slice.Unescape("<02>hello world<00>")); + Assert.That(t.Get(0), Is.EqualTo("hello world")); + Assert.That(t[0], Is.EqualTo("hello world")); + + // empty + t = TuPack.Unpack(Slice.Unescape("<02><00>")); + Assert.That(t.Get(0), Is.EqualTo(String.Empty)); + Assert.That(t[0], Is.EqualTo(String.Empty)); + + // null + t = TuPack.Unpack(Slice.Unescape("<00>")); + Assert.That(t.Get(0), Is.EqualTo(default(string))); + Assert.That(t[0], Is.Null); + + // unicode + t = TuPack.Unpack(Slice.Unescape("<02><81><93><82><93><81><81><81><96><95><8C><00>")); + // note: Encoding.UTF8.GetString({ e3 81 93 e3 82 93 e3 81 ab e3 81 a1 e3 81 af e4 b8 96 e7 95 8c }) => "こんにちは世界" + Assert.That(t.Get(0), Is.EqualTo("こんにちは世界")); + Assert.That(t[0], Is.EqualTo("こんにちは世界")); + } + + [Test] + public void Test_TuplePack_Serialize_Guids() + { + // 128-bit Guids are stored with prefix '30' followed by 16 bytes formatted according to RFC 4122 + + // System.Guid are stored in Little-Endian, but RFC 4122's UUIDs are stored in Big Endian, so per convention we will swap them + + Slice packed; + + // note: new Guid(bytes from 0 to 15) => "03020100-0504-0706-0809-0a0b0c0d0e0f"; + packed = TuPack.EncodeKey(Guid.Parse("00010203-0405-0607-0809-0a0b0c0d0e0f")); + Assert.That(packed.ToString(), Is.EqualTo("0<00><01><02><03><04><05><06><07><08><09><0A><0B><0C><0D><0E><0F>")); + + packed = TuPack.EncodeKey(Guid.Empty); + Assert.That(packed.ToString(), Is.EqualTo("0<00><00><00><00><00><00><00><00><00><00><00><00><00><00><00><00>")); + + } + + [Test] + public void Test_TuplePack_Deserialize_Guids() + { + // 128-bit Guids are stored with prefix '30' followed by 16 bytes + // we also accept byte arrays (prefix '01') if they are of length 16 + + ITuple packed; + + packed = TuPack.Unpack(Slice.Unescape("<30><00><01><02><03><04><05><06><07><08><09><0A><0B><0C><0D><0E><0F>")); + Assert.That(packed.Get(0), Is.EqualTo(Guid.Parse("00010203-0405-0607-0809-0a0b0c0d0e0f"))); + Assert.That(packed[0], Is.EqualTo(Guid.Parse("00010203-0405-0607-0809-0a0b0c0d0e0f"))); + + packed = TuPack.Unpack(Slice.Unescape("<30><00><00><00><00><00><00><00><00><00><00><00><00><00><00><00><00>")); + Assert.That(packed.Get(0), Is.EqualTo(Guid.Empty)); + Assert.That(packed[0], Is.EqualTo(Guid.Empty)); + + // unicode string + packed = TuPack.Unpack(Slice.Unescape("<02>03020100-0504-0706-0809-0a0b0c0d0e0f<00>")); + Assert.That(packed.Get(0), Is.EqualTo(Guid.Parse("03020100-0504-0706-0809-0a0b0c0d0e0f"))); + //note: t[0] returns a string, not a GUID + + // null maps to Guid.Empty + packed = TuPack.Unpack(Slice.Unescape("<00>")); + Assert.That(packed.Get(0), Is.EqualTo(Guid.Empty)); + //note: t[0] returns null, not a GUID + + } + + [Test] + public void Test_TuplePack_Serialize_Uuid128s() + { + // UUID128s are stored with prefix '30' followed by 16 bytes formatted according to RFC 4122 + + Slice packed; + + // note: new Uuid(bytes from 0 to 15) => "03020100-0504-0706-0809-0a0b0c0d0e0f"; + packed = TuPack.EncodeKey(Uuid128.Parse("00010203-0405-0607-0809-0a0b0c0d0e0f")); + Assert.That(packed.ToString(), Is.EqualTo("0<00><01><02><03><04><05><06><07><08><09><0A><0B><0C><0D><0E><0F>")); + + packed = TuPack.EncodeKey(Uuid128.Empty); + Assert.That(packed.ToString(), Is.EqualTo("0<00><00><00><00><00><00><00><00><00><00><00><00><00><00><00><00>")); + } + + [Test] + public void Test_TuplePack_Deserialize_Uuid128s() + { + // UUID128s are stored with prefix '30' followed by 16 bytes (the result of uuid.ToByteArray()) + // we also accept byte arrays (prefix '01') if they are of length 16 + + ITuple packed; + + // note: new Uuid(bytes from 0 to 15) => "00010203-0405-0607-0809-0a0b0c0d0e0f"; + packed = TuPack.Unpack(Slice.Unescape("<30><00><01><02><03><04><05><06><07><08><09><0A><0B><0C><0D><0E><0F>")); + Assert.That(packed.Get(0), Is.EqualTo(Uuid128.Parse("00010203-0405-0607-0809-0a0b0c0d0e0f"))); + Assert.That(packed[0], Is.EqualTo(Uuid128.Parse("00010203-0405-0607-0809-0a0b0c0d0e0f"))); + + packed = TuPack.Unpack(Slice.Unescape("<30><00><00><00><00><00><00><00><00><00><00><00><00><00><00><00><00>")); + Assert.That(packed.Get(0), Is.EqualTo(Uuid128.Empty)); + Assert.That(packed[0], Is.EqualTo(Uuid128.Empty)); + + // unicode string + packed = TuPack.Unpack(Slice.Unescape("<02>00010203-0405-0607-0809-0a0b0c0d0e0f<00>")); + Assert.That(packed.Get(0), Is.EqualTo(Uuid128.Parse("00010203-0405-0607-0809-0a0b0c0d0e0f"))); + //note: t[0] returns a string, not a UUID + + // null maps to Uuid.Empty + packed = TuPack.Unpack(Slice.Unescape("<00>")); + Assert.That(packed.Get(0), Is.EqualTo(Uuid128.Empty)); + //note: t[0] returns null, not a UUID + + } + + [Test] + public void Test_TuplePack_Serialize_Uuid64s() + { + // UUID64s are stored with prefix '31' followed by 8 bytes formatted according to RFC 4122 + + Slice packed; + + // note: new Uuid(bytes from 0 to 7) => "00010203-04050607"; + packed = TuPack.EncodeKey(Uuid64.Parse("00010203-04050607")); + Assert.That(packed.ToString(), Is.EqualTo("1<00><01><02><03><04><05><06><07>")); + + packed = TuPack.EncodeKey(Uuid64.Parse("01234567-89ABCDEF")); + Assert.That(packed.ToString(), Is.EqualTo("1<01>#Eg<89>")); + + packed = TuPack.EncodeKey(Uuid64.Empty); + Assert.That(packed.ToString(), Is.EqualTo("1<00><00><00><00><00><00><00><00>")); + + packed = TuPack.EncodeKey(new Uuid64(0xBADC0FFEE0DDF00DUL)); + Assert.That(packed.ToString(), Is.EqualTo("1<0F>
<0D>")); + + packed = TuPack.EncodeKey(new Uuid64(0xDEADBEEFL)); + Assert.That(packed.ToString(), Is.EqualTo("1<00><00><00><00>")); + } + + [Test] + public void Test_TuplePack_Deserialize_Uuid64s() + { + // UUID64s are stored with prefix '31' followed by 8 bytes (the result of uuid.ToByteArray()) + // we also accept byte arrays (prefix '01') if they are of length 8, and unicode strings (prefix '02') + + ITuple packed; + + // note: new Uuid(bytes from 0 to 15) => "00010203-0405-0607-0809-0a0b0c0d0e0f"; + packed = TuPack.Unpack(Slice.Unescape("<31><01><23><45><67><89>")); + Assert.That(packed.Get(0), Is.EqualTo(Uuid64.Parse("01234567-89abcdef"))); + Assert.That(packed[0], Is.EqualTo(Uuid64.Parse("01234567-89abcdef"))); + + packed = TuPack.Unpack(Slice.Unescape("<31><00><00><00><00><00><00><00><00>")); + Assert.That(packed.Get(0), Is.EqualTo(Uuid64.Empty)); + Assert.That(packed[0], Is.EqualTo(Uuid64.Empty)); + + // 8 bytes + packed = TuPack.Unpack(Slice.Unescape("<01><01><23><45><67><89><00>")); + Assert.That(packed.Get(0), Is.EqualTo(Uuid64.Parse("01234567-89abcdef"))); + //note: t[0] returns a string, not a UUID + + // unicode string + packed = TuPack.Unpack(Slice.Unescape("<02>01234567-89abcdef<00>")); + Assert.That(packed.Get(0), Is.EqualTo(Uuid64.Parse("01234567-89abcdef"))); + //note: t[0] returns a string, not a UUID + + // null maps to Uuid.Empty + packed = TuPack.Unpack(Slice.Unescape("<00>")); + Assert.That(packed.Get(0), Is.EqualTo(Uuid64.Empty)); + //note: t[0] returns null, not a UUID + + } + + [Test] + public void Test_TuplePack_Serialize_Integers() + { + // Positive integers are stored with a variable-length encoding. + // - The prefix is 0x14 + the minimum number of bytes to encode the integer, from 0 to 8, so valid prefixes range from 0x14 to 0x1C + // - The bytes are stored in High-Endian (ie: the upper bits first) + // Examples: + // - 0 => <14> + // - 1..255 => <15><##> + // - 256..65535 .. => <16> + // - ulong.MaxValue => <1C> + + Assert.That( + TuPack.EncodeKey(0).ToString(), + Is.EqualTo("<14>") + ); + + Assert.That( + TuPack.EncodeKey(1).ToString(), + Is.EqualTo("<15><01>") + ); + + Assert.That( + TuPack.EncodeKey(255).ToString(), + Is.EqualTo("<15>") + ); + + Assert.That( + TuPack.EncodeKey(256).ToString(), + Is.EqualTo("<16><01><00>") + ); + + Assert.That( + TuPack.EncodeKey(65535).ToString(), + Is.EqualTo("<16>") + ); + + Assert.That( + TuPack.EncodeKey(65536).ToString(), + Is.EqualTo("<17><01><00><00>") + ); + + Assert.That( + TuPack.EncodeKey(int.MaxValue).ToString(), + Is.EqualTo("<18><7F>") + ); + + // signed max + Assert.That( + TuPack.EncodeKey(long.MaxValue).ToString(), + Is.EqualTo("<1C><7F>") + ); + + // unsigned max + Assert.That( + TuPack.EncodeKey(ulong.MaxValue).ToString(), + Is.EqualTo("<1C>") + ); + } + + [Test] + public void Test_TuplePack_Deserialize_Integers() + { + + Action verify = (encoded, value) => + { + var slice = Slice.Unescape(encoded); + Assert.That(TuplePackers.DeserializeBoxed(slice), Is.EqualTo(value), "DeserializeBoxed({0})", encoded); + + // int64 + Assert.That(TuplePackers.DeserializeInt64(slice), Is.EqualTo(value), "DeserializeInt64({0})", encoded); + Assert.That(TuplePacker.Deserialize(slice), Is.EqualTo(value), "Deserialize({0})", encoded); + + // uint64 + if (value >= 0) + { + Assert.That(TuplePackers.DeserializeUInt64(slice), Is.EqualTo((ulong) value), "DeserializeUInt64({0})", encoded); + Assert.That(TuplePacker.Deserialize(slice), Is.EqualTo((ulong) value), "Deserialize({0})", encoded); + } + else + { + Assert.That(() => TuplePackers.DeserializeUInt64(slice), Throws.InstanceOf(), "DeserializeUInt64({0})", encoded); + } + + // int32 + if (value <= int.MaxValue && value >= int.MinValue) + { + Assert.That(TuplePackers.DeserializeInt32(slice), Is.EqualTo((int) value), "DeserializeInt32({0})", encoded); + Assert.That(TuplePacker.Deserialize(slice), Is.EqualTo((int) value), "Deserialize({0})", encoded); + } + else + { + Assert.That(() => TuplePackers.DeserializeInt32(slice), Throws.InstanceOf(), "DeserializeInt32({0})", encoded); + } + + // uint32 + if (value <= uint.MaxValue && value >= 0) + { + Assert.That(TuplePackers.DeserializeUInt32(slice), Is.EqualTo((uint) value), "DeserializeUInt32({0})", encoded); + Assert.That(TuplePacker.Deserialize(slice), Is.EqualTo((uint) value), "Deserialize({0})", encoded); + } + else + { + Assert.That(() => TuplePackers.DeserializeUInt32(slice), Throws.InstanceOf(), "DeserializeUInt32({0})", encoded); + } + + // int16 + if (value <= short.MaxValue && value >= short.MinValue) + { + Assert.That(TuplePackers.DeserializeInt16(slice), Is.EqualTo((short) value), "DeserializeInt16({0})", encoded); + Assert.That(TuplePacker.Deserialize(slice), Is.EqualTo((short) value), "Deserialize({0})", encoded); + } + else + { + Assert.That(() => TuplePackers.DeserializeInt16(slice), Throws.InstanceOf(), "DeserializeInt16({0})", encoded); + } + + // uint16 + if (value <= ushort.MaxValue && value >= 0) + { + Assert.That(TuplePackers.DeserializeUInt16(slice), Is.EqualTo((ushort) value), "DeserializeUInt16({0})", encoded); + Assert.That(TuplePacker.Deserialize(slice), Is.EqualTo((ushort) value), "Deserialize({0})", encoded); + } + else + { + Assert.That(() => TuplePackers.DeserializeUInt16(slice), Throws.InstanceOf(), "DeserializeUInt16({0})", encoded); + } + + // sbyte + if (value <= sbyte.MaxValue && value >= sbyte.MinValue) + { + Assert.That(TuplePackers.DeserializeSByte(slice), Is.EqualTo((sbyte) value), "DeserializeSByte({0})", encoded); + Assert.That(TuplePacker.Deserialize(slice), Is.EqualTo((sbyte) value), "Deserialize({0})", encoded); + } + else + { + Assert.That(() => TuplePackers.DeserializeSByte(slice), Throws.InstanceOf(), "DeserializeSByte({0})", encoded); + } + + // byte + if (value <= 255 && value >= 0) + { + Assert.That(TuplePackers.DeserializeByte(slice), Is.EqualTo((byte) value), "DeserializeByte({0})", encoded); + Assert.That(TuplePacker.Deserialize(slice), Is.EqualTo((byte) value), "Deserialize({0})", encoded); + } + else + { + Assert.That(() => TuplePackers.DeserializeByte(slice), Throws.InstanceOf(), "DeserializeByte({0})", encoded); + } + + }; + verify("<14>", 0); + verify("<15>{", 123); + verify("<15><80>", 128); + verify("<15>", 255); + verify("<16><01><00>", 256); + verify("<16><04>", 1234); + verify("<16><80><00>", 32768); + verify("<16>", 65535); + verify("<17><01><00><00>", 65536); + verify("<13>", -1); + verify("<13><00>", -255); + verify("<12>", -256); + verify("<12><00><00>", -65535); + verify("<11>", -65536); + verify("<18><7F>", int.MaxValue); + verify("<10><7F>", int.MinValue); + verify("<1C><7F>", long.MaxValue); + verify("<0C><7F>", long.MinValue); + } + + [Test] + public void Test_TuplePack_Serialize_Negative_Integers() + { + // Negative integers are stored with a variable-length encoding. + // - The prefix is 0x14 - the minimum number of bytes to encode the integer, from 0 to 8, so valid prefixes range from 0x0C to 0x13 + // - The value is encoded as the one's complement, and stored in High-Endian (ie: the upper bits first) + // - There is no way to encode '-0', it will be encoded as '0' (<14>) + // Examples: + // - -255..-1 => <13><00> .. <13> + // - -65535..-256 => <12><00>00> .. <12> + // - long.MinValue => <0C><7F> + + Assert.That( + TuPack.EncodeKey(-1).ToString(), + Is.EqualTo("<13>") + ); + + Assert.That( + TuPack.EncodeKey(-255).ToString(), + Is.EqualTo("<13><00>") + ); + + Assert.That( + TuPack.EncodeKey(-256).ToString(), + Is.EqualTo("<12>") + ); + Assert.That( + TuPack.EncodeKey(-257).ToString(), + Is.EqualTo("<12>") + ); + + Assert.That( + TuPack.EncodeKey(-65535).ToString(), + Is.EqualTo("<12><00><00>") + ); + Assert.That( + TuPack.EncodeKey(-65536).ToString(), + Is.EqualTo("<11>") + ); + + Assert.That( + TuPack.EncodeKey(int.MinValue).ToString(), + Is.EqualTo("<10><7F>") + ); + + Assert.That( + TuPack.EncodeKey(long.MinValue).ToString(), + Is.EqualTo("<0C><7F>") + ); + } + + [Test] + public void Test_TuplePack_Serialize_Singles() + { + // 32-bit floats are stored in 5 bytes, using the prefix 0x20 followed by the High-Endian representation of their normalized form + + Assert.That(TuPack.EncodeKey(0f).ToHexaString(' '), Is.EqualTo("20 80 00 00 00")); + Assert.That(TuPack.EncodeKey(42f).ToHexaString(' '), Is.EqualTo("20 C2 28 00 00")); + Assert.That(TuPack.EncodeKey(-42f).ToHexaString(' '), Is.EqualTo("20 3D D7 FF FF")); + + Assert.That(TuPack.EncodeKey((float) Math.Sqrt(2)).ToHexaString(' '), Is.EqualTo("20 BF B5 04 F3")); + + Assert.That(TuPack.EncodeKey(float.MinValue).ToHexaString(' '), Is.EqualTo("20 00 80 00 00"), "float.MinValue"); + Assert.That(TuPack.EncodeKey(float.MaxValue).ToHexaString(' '), Is.EqualTo("20 FF 7F FF FF"), "float.MaxValue"); + Assert.That(TuPack.EncodeKey(-0f).ToHexaString(' '), Is.EqualTo("20 7F FF FF FF"), "-0f"); + Assert.That(TuPack.EncodeKey(float.NegativeInfinity).ToHexaString(' '), Is.EqualTo("20 00 7F FF FF"), "float.NegativeInfinity"); + Assert.That(TuPack.EncodeKey(float.PositiveInfinity).ToHexaString(' '), Is.EqualTo("20 FF 80 00 00"), "float.PositiveInfinity"); + Assert.That(TuPack.EncodeKey(float.Epsilon).ToHexaString(' '), Is.EqualTo("20 80 00 00 01"), "+float.Epsilon"); + Assert.That(TuPack.EncodeKey(-float.Epsilon).ToHexaString(' '), Is.EqualTo("20 7F FF FF FE"), "-float.Epsilon"); + + // all possible variants of NaN should all be equal + Assert.That(TuPack.EncodeKey(float.NaN).ToHexaString(' '), Is.EqualTo("20 00 3F FF FF"), "float.NaN"); + + // cook up a non standard NaN (with some bits set in the fraction) + float f = float.NaN; // defined as 1f / 0f + uint nan; + unsafe { nan = *((uint*) &f); } + nan += 123; + unsafe { f = *((float*) &nan); } + Assert.That(float.IsNaN(f), Is.True); + Assert.That( + TuPack.EncodeKey(f).ToHexaString(' '), + Is.EqualTo("20 00 3F FF FF"), + "All variants of NaN must be normalized" + //note: if we have 20 00 3F FF 84, that means that the NaN was not normalized + ); + + } + + [Test] + public void Test_TuplePack_Deserialize_Singles() + { + Assert.That(TuPack.DecodeKey(Slice.FromHexa("20 80 00 00 00")), Is.EqualTo(0f), "0f"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("20 C2 28 00 00")), Is.EqualTo(42f), "42f"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("20 3D D7 FF FF")), Is.EqualTo(-42f), "-42f"); + + Assert.That(TuPack.DecodeKey(Slice.FromHexa("20 BF B5 04 F3")), Is.EqualTo((float) Math.Sqrt(2)), "Sqrt(2)"); + + // well known values + Assert.That(TuPack.DecodeKey(Slice.FromHexa("20 00 80 00 00")), Is.EqualTo(float.MinValue), "float.MinValue"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("20 FF 7F FF FF")), Is.EqualTo(float.MaxValue), "float.MaxValue"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("20 7F FF FF FF")), Is.EqualTo(-0f), "-0f"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("20 00 7F FF FF")), Is.EqualTo(float.NegativeInfinity), "float.NegativeInfinity"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("20 FF 80 00 00")), Is.EqualTo(float.PositiveInfinity), "float.PositiveInfinity"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("20 00 80 00 00")), Is.EqualTo(float.MinValue), "float.Epsilon"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("20 80 00 00 01")), Is.EqualTo(float.Epsilon), "+float.Epsilon"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("20 7F FF FF FE")), Is.EqualTo(-float.Epsilon), "-float.Epsilon"); + + // all possible variants of NaN should end up equal and normalized to float.NaN + Assert.That(TuPack.DecodeKey(Slice.FromHexa("20 00 3F FF FF")), Is.EqualTo(float.NaN), "float.NaN"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("20 00 3F FF FF")), Is.EqualTo(float.NaN), "float.NaN"); + } + + [Test] + public void Test_TuplePack_Serialize_Doubles() + { + // 64-bit floats are stored in 9 bytes, using the prefix 0x21 followed by the High-Endian representation of their normalized form + + Assert.That(TuPack.EncodeKey(0d).ToHexaString(' '), Is.EqualTo("21 80 00 00 00 00 00 00 00")); + Assert.That(TuPack.EncodeKey(42d).ToHexaString(' '), Is.EqualTo("21 C0 45 00 00 00 00 00 00")); + Assert.That(TuPack.EncodeKey(-42d).ToHexaString(' '), Is.EqualTo("21 3F BA FF FF FF FF FF FF")); + + Assert.That(TuPack.EncodeKey(Math.PI).ToHexaString(' '), Is.EqualTo("21 C0 09 21 FB 54 44 2D 18")); + Assert.That(TuPack.EncodeKey(Math.E).ToHexaString(' '), Is.EqualTo("21 C0 05 BF 0A 8B 14 57 69")); + + Assert.That(TuPack.EncodeKey(double.MinValue).ToHexaString(' '), Is.EqualTo("21 00 10 00 00 00 00 00 00"), "double.MinValue"); + Assert.That(TuPack.EncodeKey(double.MaxValue).ToHexaString(' '), Is.EqualTo("21 FF EF FF FF FF FF FF FF"), "double.MaxValue"); + Assert.That(TuPack.EncodeKey(-0d).ToHexaString(' '), Is.EqualTo("21 7F FF FF FF FF FF FF FF"), "-0d"); + Assert.That(TuPack.EncodeKey(double.NegativeInfinity).ToHexaString(' '), Is.EqualTo("21 00 0F FF FF FF FF FF FF"), "double.NegativeInfinity"); + Assert.That(TuPack.EncodeKey(double.PositiveInfinity).ToHexaString(' '), Is.EqualTo("21 FF F0 00 00 00 00 00 00"), "double.PositiveInfinity"); + Assert.That(TuPack.EncodeKey(double.Epsilon).ToHexaString(' '), Is.EqualTo("21 80 00 00 00 00 00 00 01"), "+double.Epsilon"); + Assert.That(TuPack.EncodeKey(-double.Epsilon).ToHexaString(' '), Is.EqualTo("21 7F FF FF FF FF FF FF FE"), "-double.Epsilon"); + + // all possible variants of NaN should all be equal + + Assert.That(TuPack.EncodeKey(double.NaN).ToHexaString(' '), Is.EqualTo("21 00 07 FF FF FF FF FF FF"), "double.NaN"); + + // cook up a non standard NaN (with some bits set in the fraction) + double d = double.NaN; // defined as 1d / 0d + ulong nan; + unsafe { nan = *((ulong*) &d); } + nan += 123; + unsafe { d = *((double*) &nan); } + Assert.That(double.IsNaN(d), Is.True); + Assert.That( + TuPack.EncodeKey(d).ToHexaString(' '), + Is.EqualTo("21 00 07 FF FF FF FF FF FF") + //note: if we have 21 00 07 FF FF FF FF FF 84, that means that the NaN was not normalized + ); + + // roundtripping vectors of doubles + var tuple = STuple.Create(Math.PI, Math.E, Math.Log(1), Math.Log(2)); + Assert.That(TuPack.Unpack(TuPack.EncodeKey(Math.PI, Math.E, Math.Log(1), Math.Log(2))), Is.EqualTo(tuple)); + Assert.That(TuPack.Unpack(TuPack.Pack(STuple.Create(Math.PI, Math.E, Math.Log(1), Math.Log(2)))), Is.EqualTo(tuple)); + Assert.That(TuPack.Unpack(TuPack.Pack(STuple.Empty.Append(Math.PI).Append(Math.E).Append(Math.Log(1)).Append(Math.Log(2)))), Is.EqualTo(tuple)); + } + + [Test] + public void Test_TuplePack_Deserialize_Doubles() + { + Assert.That(TuPack.DecodeKey(Slice.FromHexa("21 80 00 00 00 00 00 00 00")), Is.EqualTo(0d), "0d"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("21 C0 45 00 00 00 00 00 00")), Is.EqualTo(42d), "42d"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("21 3F BA FF FF FF FF FF FF")), Is.EqualTo(-42d), "-42d"); + + Assert.That(TuPack.DecodeKey(Slice.FromHexa("21 C0 09 21 FB 54 44 2D 18")), Is.EqualTo(Math.PI), "Math.PI"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("21 C0 05 BF 0A 8B 14 57 69")), Is.EqualTo(Math.E), "Math.E"); + + Assert.That(TuPack.DecodeKey(Slice.FromHexa("21 00 10 00 00 00 00 00 00")), Is.EqualTo(double.MinValue), "double.MinValue"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("21 FF EF FF FF FF FF FF FF")), Is.EqualTo(double.MaxValue), "double.MaxValue"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("21 7F FF FF FF FF FF FF FF")), Is.EqualTo(-0d), "-0d"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("21 00 0F FF FF FF FF FF FF")), Is.EqualTo(double.NegativeInfinity), "double.NegativeInfinity"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("21 FF F0 00 00 00 00 00 00")), Is.EqualTo(double.PositiveInfinity), "double.PositiveInfinity"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("21 80 00 00 00 00 00 00 01")), Is.EqualTo(double.Epsilon), "+double.Epsilon"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("21 7F FF FF FF FF FF FF FE")), Is.EqualTo(-double.Epsilon), "-double.Epsilon"); + + // all possible variants of NaN should end up equal and normalized to double.NaN + Assert.That(TuPack.DecodeKey(Slice.FromHexa("21 00 07 FF FF FF FF FF FF")), Is.EqualTo(double.NaN), "double.NaN"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("21 00 07 FF FF FF FF FF 84")), Is.EqualTo(double.NaN), "double.NaN"); + } + + [Test] + public void Test_TuplePack_Serialize_Booleans() + { + // Booleans are stored as interger 0 (<14>) for false, and integer 1 (<15><01>) for true + + Slice packed; + + // bool + packed = TuPack.EncodeKey(false); + Assert.That(packed.ToString(), Is.EqualTo("<14>")); + packed = TuPack.EncodeKey(true); + Assert.That(packed.ToString(), Is.EqualTo("<15><01>")); + + // bool? + packed = TuPack.EncodeKey(default(bool?)); + Assert.That(packed.ToString(), Is.EqualTo("<00>")); + packed = TuPack.EncodeKey((bool?) false); + Assert.That(packed.ToString(), Is.EqualTo("<14>")); + packed = TuPack.EncodeKey((bool?) true); + Assert.That(packed.ToString(), Is.EqualTo("<15><01>")); + + // tuple containing bools + packed = TuPack.EncodeKey(true); + Assert.That(packed.ToString(), Is.EqualTo("<15><01>")); + packed = TuPack.EncodeKey(true, default(string), false); + Assert.That(packed.ToString(), Is.EqualTo("<15><01><00><14>")); + } + + [Test] + public void Test_TuplePack_Deserialize_Booleans() + { + // Null, 0, and empty byte[]/strings are equivalent to False. All others are equivalent to True + + // Falsy... + Assert.That(TuPack.DecodeKey(Slice.Unescape("<00>")), Is.False, "Null => False"); + Assert.That(TuPack.DecodeKey(Slice.Unescape("<14>")), Is.False, "0 => False"); + Assert.That(TuPack.DecodeKey(Slice.Unescape("<01><00>")), Is.False, "byte[0] => False"); + Assert.That(TuPack.DecodeKey(Slice.Unescape("<02><00>")), Is.False, "String.Empty => False"); + + // Truthy + Assert.That(TuPack.DecodeKey(Slice.Unescape("<15><01>")), Is.True, "1 => True"); + Assert.That(TuPack.DecodeKey(Slice.Unescape("<13>")), Is.True, "-1 => True"); + Assert.That(TuPack.DecodeKey(Slice.Unescape("<01>Hello<00>")), Is.True, "'Hello' => True"); + Assert.That(TuPack.DecodeKey(Slice.Unescape("<02>Hello<00>")), Is.True, "\"Hello\" => True"); + Assert.That(TuPack.DecodeKey(TuPack.EncodeKey(123456789)), Is.True, "random int => True"); + + Assert.That(TuPack.DecodeKey(Slice.Unescape("<02>True<00>")), Is.True, "\"True\" => True"); + Assert.That(TuPack.DecodeKey(Slice.Unescape("<02>False<00>")), Is.True, "\"False\" => True ***"); + // note: even though it would be tempting to convert the string "false" to False, it is not a standard behavior accross all bindings + + // When decoded to object, though, they should return 0 and 1 + Assert.That(TuplePackers.DeserializeBoxed(TuPack.EncodeKey(false)), Is.EqualTo(0)); + Assert.That(TuplePackers.DeserializeBoxed(TuPack.EncodeKey(true)), Is.EqualTo(1)); + } + + [Test] + public void Test_TuplePack_Serialize_VersionStamps() + { + // incomplete, 80 bits + Assert.That( + TuPack.EncodeKey(VersionStamp.Incomplete()).ToHexaString(' '), + Is.EqualTo("32 FF FF FF FF FF FF FF FF FF FF") + ); + + // incomplete, 96 bits + Assert.That( + TuPack.EncodeKey(VersionStamp.Incomplete(0)).ToHexaString(' '), + Is.EqualTo("33 FF FF FF FF FF FF FF FF FF FF 00 00") + ); + Assert.That( + TuPack.EncodeKey(VersionStamp.Incomplete(42)).ToHexaString(' '), + Is.EqualTo("33 FF FF FF FF FF FF FF FF FF FF 00 2A") + ); + Assert.That( + TuPack.EncodeKey(VersionStamp.Incomplete(456)).ToHexaString(' '), + Is.EqualTo("33 FF FF FF FF FF FF FF FF FF FF 01 C8") + ); + Assert.That( + TuPack.EncodeKey(VersionStamp.Incomplete(65535)).ToHexaString(' '), + Is.EqualTo("33 FF FF FF FF FF FF FF FF FF FF FF FF") + ); + + // complete, 80 bits + Assert.That( + TuPack.EncodeKey(VersionStamp.Complete(0x0123456789ABCDEF, 1234)).ToHexaString(' '), + Is.EqualTo("32 01 23 45 67 89 AB CD EF 04 D2") + ); + + // complete, 96 bits + Assert.That( + TuPack.EncodeKey(VersionStamp.Complete(0x0123456789ABCDEF, 1234, 0)).ToHexaString(' '), + Is.EqualTo("33 01 23 45 67 89 AB CD EF 04 D2 00 00") + ); + Assert.That( + TuPack.EncodeKey(VersionStamp.Complete(0x0123456789ABCDEF, 1234, 42)).ToHexaString(' '), + Is.EqualTo("33 01 23 45 67 89 AB CD EF 04 D2 00 2A") + ); + Assert.That( + TuPack.EncodeKey(VersionStamp.Complete(0x0123456789ABCDEF, 65535, 42)).ToHexaString(' '), + Is.EqualTo("33 01 23 45 67 89 AB CD EF FF FF 00 2A") + ); + Assert.That( + TuPack.EncodeKey(VersionStamp.Complete(0x0123456789ABCDEF, 1234, 65535)).ToHexaString(' '), + Is.EqualTo("33 01 23 45 67 89 AB CD EF 04 D2 FF FF") + ); + } + + [Test] + public void Test_TuplePack_Deserailize_VersionStamps() + { + Assert.That(TuPack.DecodeKey(Slice.FromHexa("32 FF FF FF FF FF FF FF FF FF FF")), Is.EqualTo(VersionStamp.Incomplete()), "Incomplete()"); + + Assert.That(TuPack.DecodeKey(Slice.FromHexa("33 FF FF FF FF FF FF FF FF FF FF 00 00")), Is.EqualTo(VersionStamp.Incomplete(0)), "Incomplete(0)"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("33 FF FF FF FF FF FF FF FF FF FF 00 2A")), Is.EqualTo(VersionStamp.Incomplete(42)), "Incomplete(42)"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("33 FF FF FF FF FF FF FF FF FF FF 01 C8")), Is.EqualTo(VersionStamp.Incomplete(456)), "Incomplete(456)"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("33 FF FF FF FF FF FF FF FF FF FF FF FF")), Is.EqualTo(VersionStamp.Incomplete(65535)), "Incomplete(65535)"); + + Assert.That(TuPack.DecodeKey(Slice.FromHexa("32 01 23 45 67 89 AB CD EF 04 D2")), Is.EqualTo(VersionStamp.Complete(0x0123456789ABCDEF, 1234)), "Complete(..., 1234)"); + + Assert.That(TuPack.DecodeKey(Slice.FromHexa("33 01 23 45 67 89 AB CD EF 04 D2 00 00")), Is.EqualTo(VersionStamp.Complete(0x0123456789ABCDEF, 1234, 0)), "Complete(..., 1234, 0)"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("33 01 23 45 67 89 AB CD EF 04 D2 00 2A")), Is.EqualTo(VersionStamp.Complete(0x0123456789ABCDEF, 1234, 42)), "Complete(..., 1234, 42)"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("33 01 23 45 67 89 AB CD EF FF FF 00 2A")), Is.EqualTo(VersionStamp.Complete(0x0123456789ABCDEF, 65535, 42)), "Complete(..., 65535, 42)"); + Assert.That(TuPack.DecodeKey(Slice.FromHexa("33 01 23 45 67 89 AB CD EF 04 D2 FF FF")), Is.EqualTo(VersionStamp.Complete(0x0123456789ABCDEF, 1234, 65535)), "Complete(..., 1234, 65535)"); + } + + [Test] + public void Test_TuplePack_Serialize_IPAddress() + { + // IP Addresses are stored as a byte array (<01>..<00>), in network order (big-endian) + // They will take from 6 to 10 bytes, depending on the number of '.0' in them. + + Assert.That( + TuPack.EncodeKey(IPAddress.Loopback).ToHexaString(' '), + Is.EqualTo("01 7F 00 FF 00 FF 01 00") + ); + + Assert.That( + TuPack.EncodeKey(IPAddress.Any).ToHexaString(' '), + Is.EqualTo("01 00 FF 00 FF 00 FF 00 FF 00") + ); + + Assert.That( + TuPack.EncodeKey(IPAddress.Parse("1.2.3.4")).ToHexaString(' '), + Is.EqualTo("01 01 02 03 04 00") + ); + + } + + + [Test] + public void Test_TuplePack_Deserialize_IPAddress() + { + Assert.That(TuPack.DecodeKey(Slice.Unescape("<01><7F><00><00><01><00>")), Is.EqualTo(IPAddress.Parse("127.0.0.1"))); + Assert.That(TuPack.DecodeKey(Slice.Unescape("<01><00><00><00><00><00>")), Is.EqualTo(IPAddress.Parse("0.0.0.0"))); + Assert.That(TuPack.DecodeKey(Slice.Unescape("<01><01><02><03><04><00>")), Is.EqualTo(IPAddress.Parse("1.2.3.4"))); + + Assert.That(TuPack.DecodeKey(TuPack.EncodeKey("127.0.0.1")), Is.EqualTo(IPAddress.Loopback)); + + var ip = IPAddress.Parse("192.168.0.1"); + Assert.That(TuPack.DecodeKey(TuPack.EncodeKey(ip.ToString())), Is.EqualTo(ip)); + Assert.That(TuPack.DecodeKey(TuPack.EncodeKey(ip.GetAddressBytes())), Is.EqualTo(ip)); +#pragma warning disable 618 + Assert.That(TuPack.DecodeKey(TuPack.EncodeKey(ip.Address)), Is.EqualTo(ip)); +#pragma warning restore 618 + } + + [Test] + public void Test_TuplePack_NullableTypes() + { + // Nullable types will either be encoded as <14> for null, or their regular encoding if not null + + // serialize + + Assert.That(TuPack.EncodeKey(0), Is.EqualTo(Slice.Unescape("<14>"))); + Assert.That(TuPack.EncodeKey(123), Is.EqualTo(Slice.Unescape("<15>{"))); + Assert.That(TuPack.EncodeKey(null), Is.EqualTo(Slice.Unescape("<00>"))); + + Assert.That(TuPack.EncodeKey(0L), Is.EqualTo(Slice.Unescape("<14>"))); + Assert.That(TuPack.EncodeKey(123L), Is.EqualTo(Slice.Unescape("<15>{"))); + Assert.That(TuPack.EncodeKey(null), Is.EqualTo(Slice.Unescape("<00>"))); + + Assert.That(TuPack.EncodeKey(true), Is.EqualTo(Slice.Unescape("<15><01>"))); + Assert.That(TuPack.EncodeKey(false), Is.EqualTo(Slice.Unescape("<14>"))); + Assert.That(TuPack.EncodeKey(null), Is.EqualTo(Slice.Unescape("<00>")), "Maybe it was File Not Found?"); + + Assert.That(TuPack.EncodeKey(Guid.Empty), Is.EqualTo(Slice.Unescape("0<00><00><00><00><00><00><00><00><00><00><00><00><00><00><00><00>"))); + Assert.That(TuPack.EncodeKey(null), Is.EqualTo(Slice.Unescape("<00>"))); + + Assert.That(TuPack.EncodeKey(TimeSpan.Zero), Is.EqualTo(Slice.Unescape("!<80><00><00><00><00><00><00><00>"))); + Assert.That(TuPack.EncodeKey(null), Is.EqualTo(Slice.Unescape("<00>"))); + + // deserialize + + Assert.That(TuPack.DecodeKey(Slice.Unescape("<14>")), Is.EqualTo(0)); + Assert.That(TuPack.DecodeKey(Slice.Unescape("<15>{")), Is.EqualTo(123)); + Assert.That(TuPack.DecodeKey(Slice.Unescape("<00>")), Is.Null); + + Assert.That(TuPack.DecodeKey(Slice.Unescape("<14>")), Is.EqualTo(0L)); + Assert.That(TuPack.DecodeKey(Slice.Unescape("<15>{")), Is.EqualTo(123L)); + Assert.That(TuPack.DecodeKey(Slice.Unescape("<00>")), Is.Null); + + Assert.That(TuPack.DecodeKey(Slice.Unescape("<15><01>")), Is.True); + Assert.That(TuPack.DecodeKey(Slice.Unescape("<14>")), Is.False); + Assert.That(TuPack.DecodeKey(Slice.Unescape("<00>")), Is.Null); + + Assert.That(TuPack.DecodeKey(Slice.Unescape("0<00><00><00><00><00><00><00><00><00><00><00><00><00><00><00><00>")), Is.EqualTo(Guid.Empty)); + Assert.That(TuPack.DecodeKey(Slice.Unescape("<00>")), Is.Null); + + Assert.That(TuPack.DecodeKey(Slice.Unescape("<14>")), Is.EqualTo(TimeSpan.Zero)); + Assert.That(TuPack.DecodeKey(Slice.Unescape("<00>")), Is.Null); + + } + + [Test] + public void Test_TuplePack_Serialize_Embedded_Tuples() + { + Action verify = (t, expected) => + { + var key = TuPack.Pack(t); + Assert.That(key.ToHexaString(' '), Is.EqualTo(expected)); + var t2 = TuPack.Unpack(key); + Assert.That(t2, Is.Not.Null); + Assert.That(t2.Count, Is.EqualTo(t.Count), "{0}", t2); + Assert.That(t2, Is.EqualTo(t)); + }; + + // Index composite key + ITuple value = STuple.Create(2014, 11, 6); // Indexing a date value (Y, M, D) + string docId = "Doc123"; + // key would be "(..., value, id)" + + verify( + STuple.Create(42, value, docId), + "15 2A 03 16 07 DE 15 0B 15 06 00 02 44 6F 63 31 32 33 00" + ); + verify( + STuple.Create(new object[] {42, value, docId}), + "15 2A 03 16 07 DE 15 0B 15 06 00 02 44 6F 63 31 32 33 00" + ); + verify( + STuple.Create(42).Append(value).Append(docId), + "15 2A 03 16 07 DE 15 0B 15 06 00 02 44 6F 63 31 32 33 00" + ); + verify( + STuple.Create(42).Append(value, docId), + "15 2A 03 16 07 DE 15 0B 15 06 00 02 44 6F 63 31 32 33 00" + ); + + // multiple depth + verify( + STuple.Create(1, STuple.Create(2, 3), STuple.Create(STuple.Create(4, 5, 6)), 7), + "15 01 03 15 02 15 03 00 03 03 15 04 15 05 15 06 00 00 15 07" + ); + + // corner cases + verify( + STuple.Create(STuple.Empty), + "03 00" // empty tumple should have header and footer + ); + verify( + STuple.Create(STuple.Empty, default(string)), + "03 00 00" // outer null should not be escaped + ); + verify( + STuple.Create(STuple.Create(default(string)), default(string)), + "03 00 FF 00 00" // inner null should be escaped, but not outer + ); + verify( + STuple.Create(STuple.Create(0x100, 0x10000, 0x1000000)), + "03 16 01 00 17 01 00 00 18 01 00 00 00 00" + ); + verify( + STuple.Create(default(string), STuple.Empty, default(string), STuple.Create(default(string)), default(string)), + "00 03 00 00 03 00 FF 00 00" + ); + + } + + [Test] + public void Test_TuplePack_Deserialize_Embedded_Tuples() + { + // ((42, (2014, 11, 6), "Hello", true), ) + var packed = TuPack.EncodeKey(STuple.Create(42, STuple.Create(2014, 11, 6), "Hello", true)); + Log($"t = {TuPack.Unpack(packed)}"); + Assert.That(packed[0], Is.EqualTo(TupleTypes.TupleStart), "Missing Embedded Tuple marker"); + { + var t = TuPack.DecodeKey(packed); + Assert.That(t, Is.Not.Null); + Assert.That(t.Count, Is.EqualTo(4)); + Assert.That(t.Get(0), Is.EqualTo(42)); + Assert.That(t.Get(1), Is.EqualTo(STuple.Create(2014, 11, 6))); + Assert.That(t.Get(2), Is.EqualTo("Hello")); + Assert.That(t.Get(3), Is.True); + } + { + var t = TuPack.DecodeKey>(packed); + Assert.That(t, Is.Not.Null); + Assert.That(t.Item1, Is.EqualTo(42)); + Assert.That(t.Item2, Is.EqualTo(STuple.Create(2014, 11, 6))); + Assert.That(t.Item3, Is.EqualTo("Hello")); + Assert.That(t.Item4, Is.True); + } + { + var t = TuPack.DecodeKey, string, bool>>(packed); + Assert.That(t, Is.Not.Null); + Assert.That(t.Item1, Is.EqualTo(42)); + Assert.That(t.Item2, Is.EqualTo(STuple.Create(2014, 11, 6))); + Assert.That(t.Item3, Is.EqualTo("Hello")); + Assert.That(t.Item4, Is.True); + } + + // (null,) + packed = TuPack.EncodeKey(default(string)); + Log($"t = {TuPack.Unpack(packed)}"); + { + var t = TuPack.DecodeKey(packed); + Assert.That(t, Is.Null); + } + { + var t = TuPack.DecodeKey, string, bool>>(packed); + Assert.That(t.Item1, Is.EqualTo(0)); + Assert.That(t.Item2, Is.EqualTo(default(STuple))); + Assert.That(t.Item3, Is.Null); + Assert.That(t.Item4, Is.False); + } + + //fallback if encoded as slice + packed = TuPack.EncodeKey(TuPack.EncodeKey(42, STuple.Create(2014, 11, 6), "Hello", true)); + Log($"t = {TuPack.Unpack(packed)}"); + Assert.That(packed[0], Is.EqualTo(TupleTypes.Bytes), "Missing Slice marker"); + { + var t = TuPack.DecodeKey, string, bool>>(packed); + Assert.That(t, Is.Not.Null); + Assert.That(t.Item1, Is.EqualTo(42)); + Assert.That(t.Item2, Is.EqualTo(STuple.Create(2014, 11, 6))); + Assert.That(t.Item3, Is.EqualTo("Hello")); + Assert.That(t.Item4, Is.True); + } + } + + [Test] + public void Test_TuplePack_SameBytes() + { + // two ways on packing the "same" tuple yield the same binary output + { + var expected = TuPack.EncodeKey("Hello World"); + Assert.That(TuPack.Pack(STuple.Create("Hello World")), Is.EqualTo(expected)); + Assert.That(TuPack.Pack(((ITuple) STuple.Create("Hello World"))), Is.EqualTo(expected)); + Assert.That(TuPack.Pack(STuple.Create(new object[] {"Hello World"})), Is.EqualTo(expected)); + Assert.That(TuPack.Pack(STuple.Create("Hello World", 1234).Substring(0, 1)), Is.EqualTo(expected)); + } + { + var expected = TuPack.EncodeKey("Hello World", 1234); + Assert.That(TuPack.Pack(STuple.Create("Hello World", 1234)), Is.EqualTo(expected)); + Assert.That(TuPack.Pack(((ITuple) STuple.Create("Hello World", 1234))), Is.EqualTo(expected)); + Assert.That(TuPack.Pack(STuple.Create("Hello World").Append(1234)), Is.EqualTo(expected)); + Assert.That(TuPack.Pack(((ITuple) STuple.Create("Hello World")).Append(1234)), Is.EqualTo(expected)); + Assert.That(TuPack.Pack(STuple.Create(new object[] {"Hello World", 1234})), Is.EqualTo(expected)); + Assert.That(TuPack.Pack(STuple.Create("Hello World", 1234, "Foo").Substring(0, 2)), Is.EqualTo(expected)); + } + { + var expected = TuPack.EncodeKey("Hello World", 1234, "Foo"); + Assert.That(TuPack.Pack(STuple.Create("Hello World", 1234, "Foo")), Is.EqualTo(expected)); + Assert.That(TuPack.Pack(((ITuple) STuple.Create("Hello World", 1234, "Foo"))), Is.EqualTo(expected)); + Assert.That(TuPack.Pack(STuple.Create("Hello World").Append(1234).Append("Foo")), Is.EqualTo(expected)); + Assert.That(TuPack.Pack(((ITuple) STuple.Create("Hello World")).Append(1234).Append("Foo")), Is.EqualTo(expected)); + Assert.That(TuPack.Pack(STuple.Create(new object[] {"Hello World", 1234, "Foo"})), Is.EqualTo(expected)); + Assert.That(TuPack.Pack(STuple.Create("Hello World", 1234, "Foo", "Bar").Substring(0, 3)), Is.EqualTo(expected)); + } + + // also, there should be no differences between int,long,uint,... if they have the same value + Assert.That(TuPack.Pack(STuple.Create("Hello", 123)), Is.EqualTo(TuPack.Pack(STuple.Create("Hello", 123L)))); + Assert.That(TuPack.Pack(STuple.Create("Hello", -123)), Is.EqualTo(TuPack.Pack(STuple.Create("Hello", -123L)))); + + // GUID / UUID128 should pack the same way + var g = Guid.NewGuid(); + Assert.That(TuPack.Pack(STuple.Create(g)), Is.EqualTo(TuPack.Pack(STuple.Create((Uuid128) g))), "GUID vs UUID128"); + } + + [Test] + public void Test_TuplePack_Numbers_Are_Sorted_Lexicographically() + { + // pick two numbers 'x' and 'y' at random, and check that the order of 'x' compared to 'y' is the same as 'pack(tuple(x))' compared to 'pack(tuple(y))' + + // ie: ensure that x.CompareTo(y) always has the same sign as Tuple(x).CompareTo(Tuple(y)) + + const int N = 1 * 1000 * 1000; + var rnd = new Random(); + var sw = Stopwatch.StartNew(); + + for (int i = 0; i < N; i++) + { + int x = rnd.Next() - 1073741824; + int y = x; + while (y == x) + { + y = rnd.Next() - 1073741824; + } + + var t1 = TuPack.EncodeKey(x); + var t2 = TuPack.EncodeKey(y); + + int dint = x.CompareTo(y); + int dtup = t1.CompareTo(t2); + + if (dtup == 0) Assert.Fail("Tuples for x={0} and y={1} should not have the same packed value", x, y); + + // compare signs + if (Math.Sign(dint) != Math.Sign(dtup)) + { + Assert.Fail("Tuples for x={0} and y={1} are not sorted properly ({2} / {3}): t(x)='{4}' and t(y)='{5}'", x, y, dint, dtup, t1.ToString(), t2.ToString()); + } + } + sw.Stop(); + Log("Checked {0:N0} tuples in {1:N1} ms", N, sw.ElapsedMilliseconds); + + } + + #endregion + + [Test] + public void Test_TuplePack_Pack() + { + Assert.That( + TuPack.Pack(STuple.Create()), + Is.EqualTo(Slice.Empty) + ); + Assert.That( + TuPack.Pack(STuple.Create("hello world")).ToString(), + Is.EqualTo("<02>hello world<00>") + ); + Assert.That( + TuPack.Pack(STuple.Create("hello", "world")).ToString(), + Is.EqualTo("<02>hello<00><02>world<00>") + ); + Assert.That( + TuPack.Pack(STuple.Create("hello world", 123)).ToString(), + Is.EqualTo("<02>hello world<00><15>{") + ); + Assert.That( + TuPack.Pack(STuple.Create("hello world", 1234, -1234)).ToString(), + Is.EqualTo("<02>hello world<00><16><04><12>-") + ); + Assert.That( + TuPack.Pack(STuple.Create("hello world", 123, false)).ToString(), + Is.EqualTo("<02>hello world<00><15>{<14>") + ); + Assert.That( + TuPack.Pack(STuple.Create("hello world", 123, false, new byte[] {123, 1, 66, 0, 42})).ToString(), + Is.EqualTo("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>") + ); + Assert.That( + TuPack.Pack(STuple.Create("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }, Math.PI)).ToString(), + Is.EqualTo("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18>") + ); + Assert.That( + TuPack.Pack(STuple.Create("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }, Math.PI, -1234L)).ToString(), + Is.EqualTo("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18><12>-") + ); + Assert.That( + TuPack.Pack(STuple.Create("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }, Math.PI, -1234L, "こんにちは世界")).ToString(), + Is.EqualTo("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18><12>-<02><81><93><82><93><81><81><81><96><95><8C><00>") + ); + Assert.That( + TuPack.Pack(STuple.Create("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }, Math.PI, -1234L, "こんにちは世界", true)).ToString(), + Is.EqualTo("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18><12>-<02><81><93><82><93><81><81><81><96><95><8C><00><15><01>") + ); + Assert.That( + TuPack.Pack(STuple.Create(new object[] {"hello world", 123, false, new byte[] {123, 1, 66, 0, 42}})).ToString(), + Is.EqualTo("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>") + ); + Assert.That( + TuPack.Pack(STuple.FromArray(new object[] {"hello world", 123, false, new byte[] {123, 1, 66, 0, 42}}, 1, 2)).ToString(), + Is.EqualTo("<15>{<14>") + ); + Assert.That( + TuPack.Pack(STuple.FromEnumerable(new List {"hello world", 123, false, new byte[] {123, 1, 66, 0, 42}})).ToString(), + Is.EqualTo("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>") + ); + + } + + [Test] + public void Test_TuplePack_Pack_With_Prefix() + { + + Slice prefix = Slice.FromString("ABC"); + + Assert.That( + TuPack.Pack(prefix, STuple.Create()).ToString(), + Is.EqualTo("ABC") + ); + Assert.That( + TuPack.Pack(prefix, STuple.Create("hello world")).ToString(), + Is.EqualTo("ABC<02>hello world<00>") + ); + Assert.That( + TuPack.Pack(prefix, STuple.Create("hello", "world")).ToString(), + Is.EqualTo("ABC<02>hello<00><02>world<00>") + ); + Assert.That( + TuPack.Pack(prefix, STuple.Create("hello world", 123)).ToString(), + Is.EqualTo("ABC<02>hello world<00><15>{") + ); + Assert.That( + TuPack.Pack(prefix, STuple.Create("hello world", 1234, -1234)).ToString(), + Is.EqualTo("ABC<02>hello world<00><16><04><12>-") + ); + Assert.That( + TuPack.Pack(prefix, STuple.Create("hello world", 123, false)).ToString(), + Is.EqualTo("ABC<02>hello world<00><15>{<14>") + ); + Assert.That( + TuPack.Pack(prefix, STuple.Create("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 })).ToString(), + Is.EqualTo("ABC<02>hello world<00><15>{<14><01>{<01>B<00>*<00>") + ); + Assert.That( + TuPack.Pack(prefix, STuple.Create("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }, Math.PI)).ToString(), + Is.EqualTo("ABC<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18>") + ); + Assert.That( + TuPack.Pack(prefix, STuple.Create("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }, Math.PI, -1234L)).ToString(), + Is.EqualTo("ABC<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18><12>-") + ); + Assert.That( + TuPack.Pack(prefix, STuple.Create("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }, Math.PI, -1234L, "こんにちは世界")).ToString(), + Is.EqualTo("ABC<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18><12>-<02><81><93><82><93><81><81><81><96><95><8C><00>") + ); + Assert.That( + TuPack.Pack(prefix, STuple.Create("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }, Math.PI, -1234L, "こんにちは世界", true)).ToString(), + Is.EqualTo("ABC<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18><12>-<02><81><93><82><93><81><81><81><96><95><8C><00><15><01>") + ); + Assert.That( + TuPack.Pack(prefix, STuple.Create(new object[] { "hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 } })).ToString(), + Is.EqualTo("ABC<02>hello world<00><15>{<14><01>{<01>B<00>*<00>") + ); + Assert.That( + TuPack.Pack(prefix, STuple.FromArray(new object[] { "hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 } }, 1, 2)).ToString(), + Is.EqualTo("ABC<15>{<14>") + ); + Assert.That( + TuPack.Pack(prefix, STuple.FromEnumerable(new List { "hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 } })).ToString(), + Is.EqualTo("ABC<02>hello world<00><15>{<14><01>{<01>B<00>*<00>") + ); + + // Nil or Empty slice should be equivalent to no prefix + Assert.That( + TuPack.Pack(Slice.Nil, STuple.Create("hello world", 123)).ToString(), + Is.EqualTo("<02>hello world<00><15>{") + ); + Assert.That( + TuPack.Pack(Slice.Empty, STuple.Create("hello world", 123)).ToString(), + Is.EqualTo("<02>hello world<00><15>{") + ); + } + + [Test] + public void Test_TuplePack_PackTuples() + { + { + Slice[] slices; + var tuples = new ITuple[] + { + STuple.Create("hello"), + STuple.Create(123), + STuple.Create(false), + STuple.Create("world", 456, true) + }; + + // array version + slices = TuPack.PackTuples(tuples); + Assert.That(slices, Is.Not.Null); + Assert.That(slices.Length, Is.EqualTo(tuples.Length)); + Assert.That(slices, Is.EqualTo(tuples.Select(t => TuPack.Pack(t)))); + + // IEnumerable version that is passed an array + slices = tuples.PackTuples(); + Assert.That(slices, Is.Not.Null); + Assert.That(slices.Length, Is.EqualTo(tuples.Length)); + Assert.That(slices, Is.EqualTo(tuples.Select(t => TuPack.Pack(t)))); + + // IEnumerable version but with a "real" enumerable + slices = tuples.Select(t => t).PackTuples(); + Assert.That(slices, Is.Not.Null); + Assert.That(slices.Length, Is.EqualTo(tuples.Length)); + Assert.That(slices, Is.EqualTo(tuples.Select(t => TuPack.Pack(t)))); + } + + //Optimized STuple<...> versions + + { + var packed = TuPack.PackTuples( + STuple.Create("Hello"), + STuple.Create(123, true), + STuple.Create(Math.PI, -1234L) + ); + Assert.That(packed, Is.Not.Null.And.Length.EqualTo(3)); + Assert.That(packed[0].ToString(), Is.EqualTo("<02>Hello<00>")); + Assert.That(packed[1].ToString(), Is.EqualTo("<15>{<15><01>")); + Assert.That(packed[2].ToString(), Is.EqualTo("!<09>!TD-<18><12>-")); + Assert.That(packed[1].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + Assert.That(packed[2].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + } + + { + var packed = TuPack.PackTuples( + STuple.Create(123), + STuple.Create(456), + STuple.Create(789) + ); + Assert.That(packed, Is.Not.Null.And.Length.EqualTo(3)); + Assert.That(packed[0].ToString(), Is.EqualTo("<15>{")); + Assert.That(packed[1].ToString(), Is.EqualTo("<16><01>")); + Assert.That(packed[2].ToString(), Is.EqualTo("<16><03><15>")); + Assert.That(packed[1].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + Assert.That(packed[2].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + } + + { + var packed = TuPack.PackTuples( + STuple.Create(123, true), + STuple.Create(456, false), + STuple.Create(789, false) + ); + Assert.That(packed, Is.Not.Null.And.Length.EqualTo(3)); + Assert.That(packed[0].ToString(), Is.EqualTo("<15>{<15><01>")); + Assert.That(packed[1].ToString(), Is.EqualTo("<16><01><14>")); + Assert.That(packed[2].ToString(), Is.EqualTo("<16><03><15><14>")); + Assert.That(packed[1].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + Assert.That(packed[2].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + } + + { + var packed = TuPack.PackTuples( + STuple.Create("foo", 123, true), + STuple.Create("bar", 456, false), + STuple.Create("baz", 789, false) + ); + Assert.That(packed, Is.Not.Null.And.Length.EqualTo(3)); + Assert.That(packed[0].ToString(), Is.EqualTo("<02>foo<00><15>{<15><01>")); + Assert.That(packed[1].ToString(), Is.EqualTo("<02>bar<00><16><01><14>")); + Assert.That(packed[2].ToString(), Is.EqualTo("<02>baz<00><16><03><15><14>")); + Assert.That(packed[1].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + Assert.That(packed[2].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + } + + { + var packed = TuPack.PackTuples( + STuple.Create("foo", 123, true, "yes"), + STuple.Create("bar", 456, false, "yes"), + STuple.Create("baz", 789, false, "no") + ); + Assert.That(packed, Is.Not.Null.And.Length.EqualTo(3)); + Assert.That(packed[0].ToString(), Is.EqualTo("<02>foo<00><15>{<15><01><02>yes<00>")); + Assert.That(packed[1].ToString(), Is.EqualTo("<02>bar<00><16><01><14><02>yes<00>")); + Assert.That(packed[2].ToString(), Is.EqualTo("<02>baz<00><16><03><15><14><02>no<00>")); + Assert.That(packed[1].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + Assert.That(packed[2].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + } + + { + var packed = TuPack.PackTuples( + STuple.Create("foo", 123, true, "yes", 7), + STuple.Create("bar", 456, false, "yes", 42), + STuple.Create("baz", 789, false, "no", 9) + ); + Assert.That(packed, Is.Not.Null.And.Length.EqualTo(3)); + Assert.That(packed[0].ToString(), Is.EqualTo("<02>foo<00><15>{<15><01><02>yes<00><15><07>")); + Assert.That(packed[1].ToString(), Is.EqualTo("<02>bar<00><16><01><14><02>yes<00><15>*")); + Assert.That(packed[2].ToString(), Is.EqualTo("<02>baz<00><16><03><15><14><02>no<00><15><09>")); + Assert.That(packed[1].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + Assert.That(packed[2].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + } + + { + var packed = TuPack.PackTuples( + STuple.Create("foo", 123, true, "yes", 7, 1.5d), + STuple.Create("bar", 456, false, "yes", 42, 0.7d), + STuple.Create("baz", 789, false, "no", 9, 0.66d) + ); + Assert.That(packed, Is.Not.Null.And.Length.EqualTo(3)); + Assert.That(packed[0].ToString(), Is.EqualTo("<02>foo<00><15>{<15><01><02>yes<00><15><07>!<00><00><00><00><00><00>")); + Assert.That(packed[1].ToString(), Is.EqualTo("<02>bar<00><16><01><14><02>yes<00><15>*!ffffff")); + Assert.That(packed[2].ToString(), Is.EqualTo("<02>baz<00><16><03><15><14><02>no<00><15><09>!<1E>Q<85><1F>")); + Assert.That(packed[1].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + Assert.That(packed[2].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + } + } + + [Test] + public void Test_TuplePack_PackTuples_With_Prefix() + { + Slice prefix = Slice.FromString("ABC"); + + { + Slice[] slices; + var tuples = new ITuple[] + { + STuple.Create("hello"), + STuple.Create(123), + STuple.Create(false), + STuple.Create("world", 456, true) + }; + + // array version + slices = TuPack.PackTuples(prefix, tuples); + Assert.That(slices, Is.Not.Null); + Assert.That(slices.Length, Is.EqualTo(tuples.Length)); + Assert.That(slices, Is.EqualTo(tuples.Select(t => prefix + TuPack.Pack(t)))); + + // LINQ version + slices = TuPack.PackTuples(prefix, tuples.Select(x => x)); + Assert.That(slices, Is.Not.Null); + Assert.That(slices.Length, Is.EqualTo(tuples.Length)); + Assert.That(slices, Is.EqualTo(tuples.Select(t => prefix + TuPack.Pack(t)))); + + } + + //Optimized STuple<...> versions + + { + var packed = TuPack.PackTuples( + prefix, + STuple.Create("Hello"), + STuple.Create(123, true), + STuple.Create(Math.PI, -1234L) + ); + Assert.That(packed, Is.Not.Null.And.Length.EqualTo(3)); + Assert.That(packed[0].ToString(), Is.EqualTo("ABC<02>Hello<00>")); + Assert.That(packed[1].ToString(), Is.EqualTo("ABC<15>{<15><01>")); + Assert.That(packed[2].ToString(), Is.EqualTo("ABC!<09>!TD-<18><12>-")); + Assert.That(packed[1].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + Assert.That(packed[2].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + } + + { + var packed = TuPack.PackTuples( + prefix, + STuple.Create(123), + STuple.Create(456), + STuple.Create(789) + ); + Assert.That(packed, Is.Not.Null.And.Length.EqualTo(3)); + Assert.That(packed[0].ToString(), Is.EqualTo("ABC<15>{")); + Assert.That(packed[1].ToString(), Is.EqualTo("ABC<16><01>")); + Assert.That(packed[2].ToString(), Is.EqualTo("ABC<16><03><15>")); + Assert.That(packed[1].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + Assert.That(packed[2].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + } + + { + var packed = TuPack.PackTuples( + prefix, + STuple.Create(123, true), + STuple.Create(456, false), + STuple.Create(789, false) + ); + Assert.That(packed, Is.Not.Null.And.Length.EqualTo(3)); + Assert.That(packed[0].ToString(), Is.EqualTo("ABC<15>{<15><01>")); + Assert.That(packed[1].ToString(), Is.EqualTo("ABC<16><01><14>")); + Assert.That(packed[2].ToString(), Is.EqualTo("ABC<16><03><15><14>")); + Assert.That(packed[1].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + Assert.That(packed[2].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + } + + { + var packed = TuPack.PackTuples( + prefix, + STuple.Create("foo", 123, true), + STuple.Create("bar", 456, false), + STuple.Create("baz", 789, false) + ); + Assert.That(packed, Is.Not.Null.And.Length.EqualTo(3)); + Assert.That(packed[0].ToString(), Is.EqualTo("ABC<02>foo<00><15>{<15><01>")); + Assert.That(packed[1].ToString(), Is.EqualTo("ABC<02>bar<00><16><01><14>")); + Assert.That(packed[2].ToString(), Is.EqualTo("ABC<02>baz<00><16><03><15><14>")); + Assert.That(packed[1].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + Assert.That(packed[2].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + } + + } + + [Test] + public void Test_TuplePack_EncodeKey() + { + Assert.That( + TuPack.EncodeKey("hello world").ToString(), + Is.EqualTo("<02>hello world<00>") + ); + Assert.That( + TuPack.EncodeKey("hello", "world").ToString(), + Is.EqualTo("<02>hello<00><02>world<00>") + ); + Assert.That( + TuPack.EncodeKey("hello world", 123).ToString(), + Is.EqualTo("<02>hello world<00><15>{") + ); + Assert.That( + TuPack.EncodeKey("hello world", 1234, -1234).ToString(), + Is.EqualTo("<02>hello world<00><16><04><12>-") + ); + Assert.That( + TuPack.EncodeKey("hello world", 123, false).ToString(), + Is.EqualTo("<02>hello world<00><15>{<14>") + ); + Assert.That( + TuPack.EncodeKey("hello world", 123, false, new byte[] {123, 1, 66, 0, 42}).ToString(), + Is.EqualTo("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>") + ); + Assert.That( + TuPack.EncodeKey("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }, Math.PI).ToString(), + Is.EqualTo("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18>") + ); + Assert.That( + TuPack.EncodeKey("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }, Math.PI, -1234L).ToString(), + Is.EqualTo("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18><12>-") + ); + Assert.That( + TuPack.EncodeKey("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }, Math.PI, -1234L, "こんにちは世界").ToString(), + Is.EqualTo("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18><12>-<02><81><93><82><93><81><81><81><96><95><8C><00>") + ); + Assert.That( + TuPack.EncodeKey("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }, Math.PI, -1234L, "こんにちは世界", true).ToString(), + Is.EqualTo("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18><12>-<02><81><93><82><93><81><81><81><96><95><8C><00><15><01>") + ); + + } + + [Test] + public void Test_TuplePack_EncodeKey_With_Prefix() + { + Slice prefix = Slice.FromString("ABC"); + + Assert.That( + TuPack.EncodePrefixedKey(prefix, "hello world").ToString(), + Is.EqualTo("ABC<02>hello world<00>") + ); + Assert.That( + TuPack.EncodePrefixedKey(prefix, "hello", "world").ToString(), + Is.EqualTo("ABC<02>hello<00><02>world<00>") + ); + Assert.That( + TuPack.EncodePrefixedKey(prefix, "hello world", 123).ToString(), + Is.EqualTo("ABC<02>hello world<00><15>{") + ); + Assert.That( + TuPack.EncodePrefixedKey(prefix, "hello world", 1234, -1234).ToString(), + Is.EqualTo("ABC<02>hello world<00><16><04><12>-") + ); + Assert.That( + TuPack.EncodePrefixedKey(prefix, "hello world", 123, false).ToString(), + Is.EqualTo("ABC<02>hello world<00><15>{<14>") + ); + Assert.That( + TuPack.EncodePrefixedKey(prefix, "hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }).ToString(), + Is.EqualTo("ABC<02>hello world<00><15>{<14><01>{<01>B<00>*<00>") + ); + Assert.That( + TuPack.EncodePrefixedKey(prefix, "hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }, Math.PI).ToString(), + Is.EqualTo("ABC<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18>") + ); + Assert.That( + TuPack.EncodePrefixedKey(prefix, "hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }, Math.PI, -1234L).ToString(), + Is.EqualTo("ABC<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18><12>-") + ); + Assert.That( + TuPack.EncodePrefixedKey(prefix, "hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }, Math.PI, -1234L, "こんにちは世界").ToString(), + Is.EqualTo("ABC<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18><12>-<02><81><93><82><93><81><81><81><96><95><8C><00>") + ); + Assert.That( + TuPack.EncodePrefixedKey(prefix, "hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }, Math.PI, -1234L, "こんにちは世界", true).ToString(), + Is.EqualTo("ABC<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18><12>-<02><81><93><82><93><81><81><81><96><95><8C><00><15><01>") + ); + + } + + [Test] + public void Test_TuplePack_EncodeKey_Boxed() + { + Slice slice; + + slice = TuPack.EncodeKey(default(object)); + Assert.That(slice.ToString(), Is.EqualTo("<00>")); + + slice = TuPack.EncodeKey(1); + Assert.That(slice.ToString(), Is.EqualTo("<15><01>")); + + slice = TuPack.EncodeKey(1L); + Assert.That(slice.ToString(), Is.EqualTo("<15><01>")); + + slice = TuPack.EncodeKey(1U); + Assert.That(slice.ToString(), Is.EqualTo("<15><01>")); + + slice = TuPack.EncodeKey(1UL); + Assert.That(slice.ToString(), Is.EqualTo("<15><01>")); + + slice = TuPack.EncodeKey(false); + Assert.That(slice.ToString(), Is.EqualTo("<14>")); + + slice = TuPack.EncodeKey(new byte[] {4, 5, 6}); + Assert.That(slice.ToString(), Is.EqualTo("<01><04><05><06><00>")); + + slice = TuPack.EncodeKey("hello"); + Assert.That(slice.ToString(), Is.EqualTo("<02>hello<00>")); + } + + [Test] + public void Test_TuplePack_EncodeKeys() + { + //Optimized STuple<...> versions + + { + var packed = TuPack.EncodeKeys( + "foo", + "bar", + "baz" + ); + Assert.That(packed, Is.Not.Null.And.Length.EqualTo(3)); + Assert.That(packed[0].ToString(), Is.EqualTo("<02>foo<00>")); + Assert.That(packed[1].ToString(), Is.EqualTo("<02>bar<00>")); + Assert.That(packed[2].ToString(), Is.EqualTo("<02>baz<00>")); + Assert.That(packed[1].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + Assert.That(packed[2].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + } + + { + var packed = TuPack.EncodeKeys( + 123, + 456, + 789 + ); + Assert.That(packed, Is.Not.Null.And.Length.EqualTo(3)); + Assert.That(packed[0].ToString(), Is.EqualTo("<15>{")); + Assert.That(packed[1].ToString(), Is.EqualTo("<16><01>")); + Assert.That(packed[2].ToString(), Is.EqualTo("<16><03><15>")); + Assert.That(packed[1].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + Assert.That(packed[2].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + } + + + { + var packed = TuPack.EncodeKeys(Enumerable.Range(0, 3)); + Assert.That(packed, Is.Not.Null.And.Length.EqualTo(3)); + Assert.That(packed[0].ToString(), Is.EqualTo("<14>")); + Assert.That(packed[1].ToString(), Is.EqualTo("<15><01>")); + Assert.That(packed[2].ToString(), Is.EqualTo("<15><02>")); + Assert.That(packed[1].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + Assert.That(packed[2].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + } + + { + var packed = TuPack.EncodeKeys(new[] {"Bonjour", "le", "Monde"}, (s) => s.Length); + Assert.That(packed, Is.Not.Null.And.Length.EqualTo(3)); + Assert.That(packed[0].ToString(), Is.EqualTo("<15><07>")); + Assert.That(packed[1].ToString(), Is.EqualTo("<15><02>")); + Assert.That(packed[2].ToString(), Is.EqualTo("<15><05>")); + Assert.That(packed[1].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + Assert.That(packed[2].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + } + + } + + [Test] + public void Test_TuplePack_EncodeKeys_With_Prefix() + { + Slice prefix = Slice.FromString("ABC"); + + { + var packed = TuPack.EncodePrefixedKeys( + prefix, + "foo", + "bar", + "baz" + ); + Assert.That(packed, Is.Not.Null.And.Length.EqualTo(3)); + Assert.That(packed[0].ToString(), Is.EqualTo("ABC<02>foo<00>")); + Assert.That(packed[1].ToString(), Is.EqualTo("ABC<02>bar<00>")); + Assert.That(packed[2].ToString(), Is.EqualTo("ABC<02>baz<00>")); + Assert.That(packed[1].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + Assert.That(packed[2].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + } + + { + var packed = TuPack.EncodePrefixedKeys( + prefix, + 123, + 456, + 789 + ); + Assert.That(packed, Is.Not.Null.And.Length.EqualTo(3)); + Assert.That(packed[0].ToString(), Is.EqualTo("ABC<15>{")); + Assert.That(packed[1].ToString(), Is.EqualTo("ABC<16><01>")); + Assert.That(packed[2].ToString(), Is.EqualTo("ABC<16><03><15>")); + Assert.That(packed[1].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + Assert.That(packed[2].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + } + + { + var packed = TuPack.EncodePrefixedKeys( + prefix, + new[] { "Bonjour", "le", "Monde" }, + (s) => s.Length + ); + Assert.That(packed, Is.Not.Null.And.Length.EqualTo(3)); + Assert.That(packed[0].ToString(), Is.EqualTo("ABC<15><07>")); + Assert.That(packed[1].ToString(), Is.EqualTo("ABC<15><02>")); + Assert.That(packed[2].ToString(), Is.EqualTo("ABC<15><05>")); + Assert.That(packed[1].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + Assert.That(packed[2].Array, Is.SameAs(packed[0].Array), "Should share same bufer"); + } + + } + + [Test] + public void Test_TuplePack_SerializersOfT() + { + Slice prefix = Slice.FromString("ABC"); + { + var serializer = TupleSerializer.Default; + var t = STuple.Create(123); + var tw = new TupleWriter(); + tw.Output.WriteBytes(prefix); + serializer.PackTo(ref tw, in t); + Assert.That(tw.ToSlice().ToString(), Is.EqualTo("ABC<15>{")); + } + { + var serializer = TupleSerializer.Default; + var t = STuple.Create("foo"); + var tw = new TupleWriter(); + tw.Output.WriteBytes(prefix); + serializer.PackTo(ref tw, in t); + Assert.That(tw.ToSlice().ToString(), Is.EqualTo("ABC<02>foo<00>")); + } + + { + var serializer = TupleSerializer.Default; + var t = STuple.Create("foo", 123); + var tw = new TupleWriter(); + tw.Output.WriteBytes(prefix); + serializer.PackTo(ref tw, in t); + Assert.That(tw.ToSlice().ToString(), Is.EqualTo("ABC<02>foo<00><15>{")); + } + + { + var serializer = TupleSerializer.Default; + var t = STuple.Create("foo", false, 123); + var tw = new TupleWriter(); + tw.Output.WriteBytes(prefix); + serializer.PackTo(ref tw, in t); + Assert.That(tw.ToSlice().ToString(), Is.EqualTo("ABC<02>foo<00><14><15>{")); + } + + { + var serializer = TupleSerializer.Default; + var t = STuple.Create("foo", false, 123, -1L); + var tw = new TupleWriter(); + tw.Output.WriteBytes(prefix); + serializer.PackTo(ref tw, in t); + Assert.That(tw.ToSlice().ToString(), Is.EqualTo("ABC<02>foo<00><14><15>{<13>")); + } + + { + var serializer = TupleSerializer.Default; + var t = STuple.Create("foo", false, 123, -1L, "narf"); + var tw = new TupleWriter(); + tw.Output.WriteBytes(prefix); + serializer.PackTo(ref tw, in t); + Assert.That(tw.ToSlice().ToString(), Is.EqualTo("ABC<02>foo<00><14><15>{<13><02>narf<00>")); + } + + { + var serializer = TupleSerializer.Default; + var t = STuple.Create("foo", false, 123, -1L, "narf", Math.PI); + var tw = new TupleWriter(); + tw.Output.WriteBytes(prefix); + serializer.PackTo(ref tw, in t); + Assert.That(tw.ToSlice().ToString(), Is.EqualTo("ABC<02>foo<00><14><15>{<13><02>narf<00>!<09>!TD-<18>")); + } + + } + [Test] + public void Test_TuplePack_Unpack() + { + + var packed = TuPack.EncodeKey("hello world"); + Log(packed); + + var tuple = TuPack.Unpack(packed); + Assert.That(tuple, Is.Not.Null); + Log(tuple); + Assert.That(tuple.Count, Is.EqualTo(1)); + Assert.That(tuple.Get(0), Is.EqualTo("hello world")); + + packed = TuPack.EncodeKey("hello world", 123); + Log(packed); + + tuple = TuPack.Unpack(packed); + Assert.That(tuple, Is.Not.Null); + Log(tuple); + Assert.That(tuple.Count, Is.EqualTo(2)); + Assert.That(tuple.Get(0), Is.EqualTo("hello world")); + Assert.That(tuple.Get(1), Is.EqualTo(123)); + + packed = TuPack.EncodeKey(1, 256, 257, 65536, int.MaxValue, long.MaxValue); + Log(packed); + + tuple = TuPack.Unpack(packed); + Assert.That(tuple, Is.Not.Null); + Assert.That(tuple.Count, Is.EqualTo(6)); + Assert.That(tuple.Get(0), Is.EqualTo(1)); + Assert.That(tuple.Get(1), Is.EqualTo(256)); + Assert.That(tuple.Get(2), Is.EqualTo(257), ((SlicedTuple) tuple).GetSlice(2).ToString()); + Assert.That(tuple.Get(3), Is.EqualTo(65536)); + Assert.That(tuple.Get(4), Is.EqualTo(int.MaxValue)); + Assert.That(tuple.Get(5), Is.EqualTo(long.MaxValue)); + + packed = TuPack.EncodeKey(-1, -256, -257, -65536, int.MinValue, long.MinValue); + Log(packed); + + tuple = TuPack.Unpack(packed); + Assert.That(tuple, Is.Not.Null); + Assert.That(tuple, Is.InstanceOf()); + Log(tuple); + Assert.That(tuple.Count, Is.EqualTo(6)); + Assert.That(tuple.Get(0), Is.EqualTo(-1)); + Assert.That(tuple.Get(1), Is.EqualTo(-256)); + Assert.That(tuple.Get(2), Is.EqualTo(-257), "Slice is " + ((SlicedTuple) tuple).GetSlice(2).ToString()); + Assert.That(tuple.Get(3), Is.EqualTo(-65536)); + Assert.That(tuple.Get(4), Is.EqualTo(int.MinValue)); + Assert.That(tuple.Get(5), Is.EqualTo(long.MinValue)); + } + + [Test] + public void Test_TuplePack_DecodeKey() + { + Assert.That( + TuPack.DecodeKey(Slice.Unescape("<02>hello world<00>")), + Is.EqualTo("hello world") + ); + Assert.That( + TuPack.DecodeKey(Slice.Unescape("<15>{")), + Is.EqualTo(123) + ); + Assert.That( + TuPack.DecodeKey(Slice.Unescape("<02>hello<00><02>world<00>")), + Is.EqualTo(STuple.Create("hello", "world")) + ); + Assert.That( + TuPack.DecodeKey(Slice.Unescape("<02>hello world<00><15>{")), + Is.EqualTo(STuple.Create("hello world", 123)) + ); + Assert.That( + TuPack.DecodeKey(Slice.Unescape("<02>hello world<00><16><04><12>-")), + Is.EqualTo(STuple.Create("hello world", 1234, -1234L)) + ); + Assert.That( + TuPack.DecodeKey(Slice.Unescape("<02>hello world<00><15>{<14>")), + Is.EqualTo(STuple.Create("hello world", 123, false)) + ); + Assert.That( + TuPack.DecodeKey(Slice.Unescape("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>")), + Is.EqualTo(STuple.Create("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }.AsSlice())) + ); + Assert.That( + TuPack.DecodeKey(Slice.Unescape("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18>")), + Is.EqualTo(STuple.Create("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }.AsSlice(), Math.PI)) + ); + Assert.That( + TuPack.DecodeKey(Slice.Unescape("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18><12>-")), + Is.EqualTo(STuple.Create("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }.AsSlice(), Math.PI, -1234L)) + ); + //TODO: if/when we have tuples with 7 or 8 items... + //Assert.That( + // TuPack.DecodeKey(Slice.Unescape("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18><12>-<02><81><93><82><93><81><81><81><96><95><8C><00>")), + // Is.EqualTo(STuple.Create("hello world", 123, false, Slice.Create(new byte[] { 123, 1, 66, 0, 42 }), Math.PI, -1234L, "こんにちは世界")) + //); + //Assert.That( + // TuPack.DecodeKey(Slice.Unescape("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18><12>-<02><81><93><82><93><81><81><81><96><95><8C><00><15><01>")), + // Is.EqualTo(STuple.Create("hello world", 123, false, Slice.Create(new byte[] { 123, 1, 66, 0, 42 }), Math.PI, -1234L, "こんにちは世界", true)) + //); + } + + [Test] + public void Test_TuplePack_Serialize_ITupleFormattable() + { + // types that implement ITupleFormattable should be packed by calling ToTuple() and then packing the returned tuple + + Slice packed; + + packed = TuplePacker.Serialize(new Thing {Foo = 123, Bar = "hello"}); + Assert.That(packed.ToString(), Is.EqualTo("<03><15>{<02>hello<00><00>")); + + packed = TuplePacker.Serialize(new Thing()); + Assert.That(packed.ToString(), Is.EqualTo("<03><14><00><00>")); + + packed = TuplePacker.Serialize(default(Thing)); + Assert.That(packed.ToString(), Is.EqualTo("<00>")); + + } + + [Test] + public void Test_TuplePack_Deserialize_ITupleFormattable() + { + Slice slice; + Thing thing; + + slice = Slice.Unescape("<03><16><01><02>world<00><00>"); + thing = TuplePackers.DeserializeFormattable(slice); + Assert.That(thing, Is.Not.Null); + Assert.That(thing.Foo, Is.EqualTo(456)); + Assert.That(thing.Bar, Is.EqualTo("world")); + + slice = Slice.Unescape("<03><14><00><00>"); + thing = TuplePackers.DeserializeFormattable(slice); + Assert.That(thing, Is.Not.Null); + Assert.That(thing.Foo, Is.EqualTo(0)); + Assert.That(thing.Bar, Is.EqualTo(null)); + + slice = Slice.Unescape("<00>"); + thing = TuplePackers.DeserializeFormattable(slice); + Assert.That(thing, Is.Null); + } + + [Test] + public void Test_TuplePack_EncodeKeys_Of_T() + { + Slice[] slices; + + #region PackRange(Tuple, ...) + + var tuple = STuple.Create("hello"); + int[] items = new int[] {1, 2, 3, 123, -1, int.MaxValue}; + + // array version + slices = TuPack.EncodePrefixedKeys(tuple, items); + Assert.That(slices, Is.Not.Null); + Assert.That(slices.Length, Is.EqualTo(items.Length)); + Assert.That(slices, Is.EqualTo(items.Select(x => TuPack.Pack(tuple.Append(x))))); + + // IEnumerable version that is passed an array + slices = TuPack.EncodePrefixedKeys(tuple, (IEnumerable) items); + Assert.That(slices, Is.Not.Null); + Assert.That(slices.Length, Is.EqualTo(items.Length)); + Assert.That(slices, Is.EqualTo(items.Select(x => TuPack.Pack(tuple.Append(x))))); + + // IEnumerable version but with a "real" enumerable + slices = TuPack.EncodePrefixedKeys(tuple, items.Select(t => t)); + Assert.That(slices, Is.Not.Null); + Assert.That(slices.Length, Is.EqualTo(items.Length)); + Assert.That(slices, Is.EqualTo(items.Select(x => TuPack.Pack(tuple.Append(x))))); + + #endregion + + #region PackRange(Slice, ...) + + string[] words = {"hello", "world", "très bien", "断トツ", "abc\0def", null, String.Empty}; + + var merged = TuPack.EncodePrefixedKeys(Slice.FromByte(42), words); + Assert.That(merged, Is.Not.Null); + Assert.That(merged.Length, Is.EqualTo(words.Length)); + + for (int i = 0; i < words.Length; i++) + { + var expected = Slice.FromByte(42) + TuPack.EncodeKey(words[i]); + Assert.That(merged[i], Is.EqualTo(expected)); + + Assert.That(merged[i].Array, Is.SameAs(merged[0].Array), "All slices should be stored in the same buffer"); + if (i > 0) Assert.That(merged[i].Offset, Is.EqualTo(merged[i - 1].Offset + merged[i - 1].Count), "All slices should be contiguous"); + } + + // corner cases + // ReSharper disable AssignNullToNotNullAttribute + Assert.That( + () => TuPack.EncodePrefixedKeys(Slice.Empty, default(int[])), + Throws.InstanceOf().With.Property("ParamName").EqualTo("keys")); + Assert.That( + () => TuPack.EncodePrefixedKeys(Slice.Empty, default(IEnumerable)), + Throws.InstanceOf().With.Property("ParamName").EqualTo("keys")); + // ReSharper restore AssignNullToNotNullAttribute + + #endregion + } + + [Test] + public void Test_TuplePack_EncodeKeys_Boxed() + { + Slice[] slices; + var tuple = STuple.Create("hello"); + object[] items = {"world", 123, false, Guid.NewGuid(), long.MinValue}; + + // array version + slices = TuPack.EncodePrefixedKeys(tuple, items); + Assert.That(slices, Is.Not.Null); + Assert.That(slices.Length, Is.EqualTo(items.Length)); + Assert.That(slices, Is.EqualTo(items.Select(x => TuPack.Pack(tuple.Append(x))))); + + // IEnumerable version that is passed an array + slices = TuPack.EncodePrefixedKeys(tuple, (IEnumerable) items); + Assert.That(slices, Is.Not.Null); + Assert.That(slices.Length, Is.EqualTo(items.Length)); + Assert.That(slices, Is.EqualTo(items.Select(x => TuPack.Pack(tuple.Append(x))))); + + // IEnumerable version but with a "real" enumerable + slices = TuPack.EncodePrefixedKeys(tuple, items.Select(t => t)); + Assert.That(slices, Is.Not.Null); + Assert.That(slices.Length, Is.EqualTo(items.Length)); + Assert.That(slices, Is.EqualTo(items.Select(x => TuPack.Pack(tuple.Append(x))))); + } + + [Test] + public void Test_TuplePack_Unpack_First_And_Last() + { + // should only work with tuples having at least one element + + Slice packed; + + packed = TuPack.EncodeKey(1); + Assert.That(TuPack.DecodeFirst(packed), Is.EqualTo(1)); + Assert.That(TuPack.DecodeFirst(packed), Is.EqualTo("1")); + Assert.That(TuPack.DecodeLast(packed), Is.EqualTo(1)); + Assert.That(TuPack.DecodeLast(packed), Is.EqualTo("1")); + + packed = TuPack.EncodeKey(1, 2); + Assert.That(TuPack.DecodeFirst(packed), Is.EqualTo(1)); + Assert.That(TuPack.DecodeFirst(packed), Is.EqualTo("1")); + Assert.That(TuPack.DecodeLast(packed), Is.EqualTo(2)); + Assert.That(TuPack.DecodeLast(packed), Is.EqualTo("2")); + + packed = TuPack.EncodeKey(1, 2, 3); + Assert.That(TuPack.DecodeFirst(packed), Is.EqualTo(1)); + Assert.That(TuPack.DecodeFirst(packed), Is.EqualTo("1")); + Assert.That(TuPack.DecodeLast(packed), Is.EqualTo(3)); + Assert.That(TuPack.DecodeLast(packed), Is.EqualTo("3")); + + packed = TuPack.EncodeKey(1, 2, 3, 4); + Assert.That(TuPack.DecodeFirst(packed), Is.EqualTo(1)); + Assert.That(TuPack.DecodeFirst(packed), Is.EqualTo("1")); + Assert.That(TuPack.DecodeLast(packed), Is.EqualTo(4)); + Assert.That(TuPack.DecodeLast(packed), Is.EqualTo("4")); + + packed = TuPack.EncodeKey(1, 2, 3, 4, 5); + Assert.That(TuPack.DecodeFirst(packed), Is.EqualTo(1)); + Assert.That(TuPack.DecodeFirst(packed), Is.EqualTo("1")); + Assert.That(TuPack.DecodeLast(packed), Is.EqualTo(5)); + Assert.That(TuPack.DecodeLast(packed), Is.EqualTo("5")); + + packed = TuPack.EncodeKey(1, 2, 3, 4, 5, 6); + Assert.That(TuPack.DecodeFirst(packed), Is.EqualTo(1)); + Assert.That(TuPack.DecodeFirst(packed), Is.EqualTo("1")); + Assert.That(TuPack.DecodeLast(packed), Is.EqualTo(6)); + Assert.That(TuPack.DecodeLast(packed), Is.EqualTo("6")); + + packed = TuPack.EncodeKey(1, 2, 3, 4, 5, 6, 7); + Assert.That(TuPack.DecodeFirst(packed), Is.EqualTo(1)); + Assert.That(TuPack.DecodeFirst(packed), Is.EqualTo("1")); + Assert.That(TuPack.DecodeLast(packed), Is.EqualTo(7)); + Assert.That(TuPack.DecodeLast(packed), Is.EqualTo("7")); + + packed = TuPack.EncodeKey(1, 2, 3, 4, 5, 6, 7, 8); + Assert.That(TuPack.DecodeFirst(packed), Is.EqualTo(1)); + Assert.That(TuPack.DecodeFirst(packed), Is.EqualTo("1")); + Assert.That(TuPack.DecodeLast(packed), Is.EqualTo(8)); + Assert.That(TuPack.DecodeLast(packed), Is.EqualTo("8")); + + Assert.That(() => TuPack.DecodeFirst(Slice.Nil), Throws.InstanceOf()); + Assert.That(() => TuPack.DecodeFirst(Slice.Empty), Throws.InstanceOf()); + Assert.That(() => TuPack.DecodeLast(Slice.Nil), Throws.InstanceOf()); + Assert.That(() => TuPack.DecodeLast(Slice.Empty), Throws.InstanceOf()); + + } + + [Test] + public void Test_TuplePack_UnpackSingle() + { + // should only work with tuples having exactly one element + + Slice packed; + + packed = TuPack.EncodeKey(1); + Assert.That(TuPack.DecodeKey(packed), Is.EqualTo(1)); + Assert.That(TuPack.DecodeKey(packed), Is.EqualTo("1")); + + packed = TuPack.EncodeKey("Hello\0World"); + Assert.That(TuPack.DecodeKey(packed), Is.EqualTo("Hello\0World")); + + Assert.That(() => TuPack.DecodeKey(Slice.Nil), Throws.InstanceOf()); + Assert.That(() => TuPack.DecodeKey(Slice.Empty), Throws.InstanceOf()); + Assert.That(() => TuPack.DecodeKey(TuPack.EncodeKey(1, 2)), Throws.InstanceOf()); + Assert.That(() => TuPack.DecodeKey(TuPack.EncodeKey(1, 2, 3)), Throws.InstanceOf()); + Assert.That(() => TuPack.DecodeKey(TuPack.EncodeKey(1, 2, 3, 4)), Throws.InstanceOf()); + Assert.That(() => TuPack.DecodeKey(TuPack.EncodeKey(1, 2, 3, 4, 5)), Throws.InstanceOf()); + Assert.That(() => TuPack.DecodeKey(TuPack.EncodeKey(1, 2, 3, 4, 5, 6)), Throws.InstanceOf()); + Assert.That(() => TuPack.DecodeKey(TuPack.EncodeKey(1, 2, 3, 4, 5, 6, 7)), Throws.InstanceOf()); + Assert.That(() => TuPack.DecodeKey(TuPack.EncodeKey(1, 2, 3, 4, 5, 6, 7, 8)), Throws.InstanceOf()); + + } + + [Test] + public void Test_TuplePack_ToRange() + { + KeyRange range; + + // ToRange() should add 0x00 and 0xFF to the packed representations of the tuples + // note: we cannot increment the key to get the End key, because it conflicts with the Tuple Binary Encoding itself + + // Slice + range = TuPack.ToRange(Slice.FromString("ABC")); + Assert.That(range.Begin.ToString(), Is.EqualTo("ABC<00>"), "Begin key should be suffixed by 0x00"); + Assert.That(range.End.ToString(), Is.EqualTo("ABC"), "End key should be suffixed by 0xFF"); + + // Tuples + + range = TuPack.ToRange(STuple.Create("Hello")); + Assert.That(range.Begin.ToString(), Is.EqualTo("<02>Hello<00><00>")); + Assert.That(range.End.ToString(), Is.EqualTo("<02>Hello<00>")); + + range = TuPack.ToRange(STuple.Create("Hello", 123)); + Assert.That(range.Begin.ToString(), Is.EqualTo("<02>Hello<00><15>{<00>")); + Assert.That(range.End.ToString(), Is.EqualTo("<02>Hello<00><15>{")); + + range = TuPack.ToRange(STuple.Create("Hello", 123, true)); + Assert.That(range.Begin.ToString(), Is.EqualTo("<02>Hello<00><15>{<15><01><00>")); + Assert.That(range.End.ToString(), Is.EqualTo("<02>Hello<00><15>{<15><01>")); + + range = TuPack.ToRange(STuple.Create("Hello", 123, true, -1234L)); + Assert.That(range.Begin.ToString(), Is.EqualTo("<02>Hello<00><15>{<15><01><12>-<00>")); + Assert.That(range.End.ToString(), Is.EqualTo("<02>Hello<00><15>{<15><01><12>-")); + + range = TuPack.ToRange(STuple.Create("Hello", 123, true, -1234L, "こんにちは世界")); + Assert.That(range.Begin.ToString(), Is.EqualTo("<02>Hello<00><15>{<15><01><12>-<02><81><93><82><93><81><81><81><96><95><8C><00><00>")); + Assert.That(range.End.ToString(), Is.EqualTo("<02>Hello<00><15>{<15><01><12>-<02><81><93><82><93><81><81><81><96><95><8C><00>")); + + range = TuPack.ToRange(STuple.Create("Hello", 123, true, -1234L, "こんにちは世界", Math.PI)); + Assert.That(range.Begin.ToString(), Is.EqualTo("<02>Hello<00><15>{<15><01><12>-<02><81><93><82><93><81><81><81><96><95><8C><00>!<09>!TD-<18><00>")); + Assert.That(range.End.ToString(), Is.EqualTo("<02>Hello<00><15>{<15><01><12>-<02><81><93><82><93><81><81><81><96><95><8C><00>!<09>!TD-<18>")); + + range = TuPack.ToRange(STuple.Create("Hello", 123, true, -1234L, "こんにちは世界", Math.PI, false)); + Assert.That(range.Begin.ToString(), Is.EqualTo("<02>Hello<00><15>{<15><01><12>-<02><81><93><82><93><81><81><81><96><95><8C><00>!<09>!TD-<18><14><00>")); + Assert.That(range.End.ToString(), Is.EqualTo("<02>Hello<00><15>{<15><01><12>-<02><81><93><82><93><81><81><81><96><95><8C><00>!<09>!TD-<18><14>")); + + range = TuPack.ToRange(STuple.Create("Hello", 123, true, -1234L, "こんにちは世界", Math.PI, false, "TheEnd")); + Assert.That(range.Begin.ToString(), Is.EqualTo("<02>Hello<00><15>{<15><01><12>-<02><81><93><82><93><81><81><81><96><95><8C><00>!<09>!TD-<18><14><02>TheEnd<00><00>")); + Assert.That(range.End.ToString(), Is.EqualTo("<02>Hello<00><15>{<15><01><12>-<02><81><93><82><93><81><81><81><96><95><8C><00>!<09>!TD-<18><14><02>TheEnd<00>")); + } + + [Test] + public void Test_TuplePack_ToRange_With_Prefix() + { + Slice prefix = Slice.FromString("ABC"); + KeyRange range; + + range = TuPack.ToRange(prefix, STuple.Create("Hello")); + Assert.That(range.Begin.ToString(), Is.EqualTo("ABC<02>Hello<00><00>")); + Assert.That(range.End.ToString(), Is.EqualTo("ABC<02>Hello<00>")); + + range = TuPack.ToRange(prefix, STuple.Create("Hello", 123)); + Assert.That(range.Begin.ToString(), Is.EqualTo("ABC<02>Hello<00><15>{<00>")); + Assert.That(range.End.ToString(), Is.EqualTo("ABC<02>Hello<00><15>{")); + + range = TuPack.ToRange(prefix, STuple.Create("Hello", 123, true)); + Assert.That(range.Begin.ToString(), Is.EqualTo("ABC<02>Hello<00><15>{<15><01><00>")); + Assert.That(range.End.ToString(), Is.EqualTo("ABC<02>Hello<00><15>{<15><01>")); + + range = TuPack.ToRange(prefix, STuple.Create("Hello", 123, true, -1234L)); + Assert.That(range.Begin.ToString(), Is.EqualTo("ABC<02>Hello<00><15>{<15><01><12>-<00>")); + Assert.That(range.End.ToString(), Is.EqualTo("ABC<02>Hello<00><15>{<15><01><12>-")); + + range = TuPack.ToRange(prefix, STuple.Create("Hello", 123, true, -1234L, "こんにちは世界")); + Assert.That(range.Begin.ToString(), Is.EqualTo("ABC<02>Hello<00><15>{<15><01><12>-<02><81><93><82><93><81><81><81><96><95><8C><00><00>")); + Assert.That(range.End.ToString(), Is.EqualTo("ABC<02>Hello<00><15>{<15><01><12>-<02><81><93><82><93><81><81><81><96><95><8C><00>")); + + range = TuPack.ToRange(prefix, STuple.Create("Hello", 123, true, -1234L, "こんにちは世界", Math.PI)); + Assert.That(range.Begin.ToString(), Is.EqualTo("ABC<02>Hello<00><15>{<15><01><12>-<02><81><93><82><93><81><81><81><96><95><8C><00>!<09>!TD-<18><00>")); + Assert.That(range.End.ToString(), Is.EqualTo("ABC<02>Hello<00><15>{<15><01><12>-<02><81><93><82><93><81><81><81><96><95><8C><00>!<09>!TD-<18>")); + + range = TuPack.ToRange(prefix, STuple.Create("Hello", 123, true, -1234L, "こんにちは世界", Math.PI, false)); + Assert.That(range.Begin.ToString(), Is.EqualTo("ABC<02>Hello<00><15>{<15><01><12>-<02><81><93><82><93><81><81><81><96><95><8C><00>!<09>!TD-<18><14><00>")); + Assert.That(range.End.ToString(), Is.EqualTo("ABC<02>Hello<00><15>{<15><01><12>-<02><81><93><82><93><81><81><81><96><95><8C><00>!<09>!TD-<18><14>")); + + range = TuPack.ToRange(prefix, STuple.Create("Hello", 123, true, -1234L, "こんにちは世界", Math.PI, false, "TheEnd")); + Assert.That(range.Begin.ToString(), Is.EqualTo("ABC<02>Hello<00><15>{<15><01><12>-<02><81><93><82><93><81><81><81><96><95><8C><00>!<09>!TD-<18><14><02>TheEnd<00><00>")); + Assert.That(range.End.ToString(), Is.EqualTo("ABC<02>Hello<00><15>{<15><01><12>-<02><81><93><82><93><81><81><81><96><95><8C><00>!<09>!TD-<18><14><02>TheEnd<00>")); + + // Nil or Empty prefix should not add anything + + range = TuPack.ToRange(Slice.Nil, STuple.Create("Hello", 123)); + Assert.That(range.Begin.ToString(), Is.EqualTo("<02>Hello<00><15>{<00>")); + Assert.That(range.End.ToString(), Is.EqualTo("<02>Hello<00><15>{")); + + range = TuPack.ToRange(Slice.Empty, STuple.Create("Hello", 123)); + Assert.That(range.Begin.ToString(), Is.EqualTo("<02>Hello<00><15>{<00>")); + Assert.That(range.End.ToString(), Is.EqualTo("<02>Hello<00><15>{")); + + } + + private class Thing : ITupleFormattable + { + public int Foo { get; set; } + public string Bar { get; set; } + + ITuple ITupleFormattable.ToTuple() + { + return STuple.Create(this.Foo, this.Bar); + } + + void ITupleFormattable.FromTuple(ITuple tuple) + { + this.Foo = tuple.Get(0); + this.Bar = tuple.Get(1); + } + } + + [Test] + public void Test_TuPack_ValueTuple_Pack() + { + Assert.That( + TuPack.Pack(ValueTuple.Create("hello world")).ToString(), + Is.EqualTo("<02>hello world<00>") + ); + Assert.That( + TuPack.Pack(ValueTuple.Create("hello world", 123)).ToString(), + Is.EqualTo("<02>hello world<00><15>{") + ); + Assert.That( + TuPack.Pack(ValueTuple.Create("hello world", 123, false)).ToString(), + Is.EqualTo("<02>hello world<00><15>{<14>") + ); + Assert.That( + TuPack.Pack(ValueTuple.Create("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 })).ToString(), + Is.EqualTo("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>") + ); + Assert.That( + TuPack.Pack(ValueTuple.Create("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }, Math.PI)).ToString(), + Is.EqualTo("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18>") + ); + Assert.That( + TuPack.Pack(ValueTuple.Create("hello world", 123, false, new byte[] { 123, 1, 66, 0, 42 }, Math.PI, -1234L)).ToString(), + Is.EqualTo("<02>hello world<00><15>{<14><01>{<01>B<00>*<00>!<09>!TD-<18><12>-") + ); + + { // Embedded Tuples + var packed = TuPack.Pack(ValueTuple.Create("hello", ValueTuple.Create(123, false), "world")); + Assert.That( + packed.ToString(), + Is.EqualTo("<02>hello<00><03><15>{<14><00><02>world<00>") + ); + var t = TuPack.DecodeKey, string>(packed); + Assert.That(t.Item1, Is.EqualTo("hello")); + Assert.That(t.Item2.Item1, Is.EqualTo(123)); + Assert.That(t.Item2.Item2, Is.False); + Assert.That(t.Item3, Is.EqualTo("world")); + } + + } + + } + + +} diff --git a/FoundationDB.Tests/Utils/TupleFacts.cs b/FoundationDB.Tests/Utils/TupleFacts.cs new file mode 100644 index 000000000..8921a5709 --- /dev/null +++ b/FoundationDB.Tests/Utils/TupleFacts.cs @@ -0,0 +1,2127 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +// ReSharper disable AccessToModifiedClosure +namespace Doxense.Collections.Tuples.Tests +{ + using System; + using System.Linq; + using System.Net; + using Doxense.Runtime.Converters; + using FoundationDB.Client.Tests; + using NUnit.Framework; + + [TestFixture] + public class TupleFacts : FdbTest + { + + #region General Use... + + [Test] + public void Test_Tuple_0() + { + var t0 = STuple.Create(); + Assert.That(t0.Count, Is.Zero); + Assert.That(t0.ToArray(), Is.EqualTo(new object[0])); + Assert.That(t0.ToString(), Is.EqualTo("()")); + Assert.That(t0, Is.InstanceOf()); + } + + [Test] + public void Test_Tuple_1() + { + var t1 = STuple.Create("hello world"); + Assert.That(t1.Count, Is.EqualTo(1)); + Assert.That(t1.Item1, Is.EqualTo("hello world")); + Assert.That(t1.Get(0), Is.EqualTo("hello world")); + Assert.That(t1[0], Is.EqualTo("hello world")); + Assert.That(t1.ToArray(), Is.EqualTo(new object[] { "hello world" })); + Assert.That(t1.ToString(), Is.EqualTo("(\"hello world\",)")); + Assert.That(t1, Is.InstanceOf>()); + + Assert.That(STuple.Create(123).GetHashCode(), Is.EqualTo(STuple.Create("Hello", 123).Tail.GetHashCode()), "Hashcode should be stable"); + Assert.That(STuple.Create(123).GetHashCode(), Is.EqualTo(STuple.Create(123L).GetHashCode()), "Hashcode should be stable"); + + // ReSharper disable CannotApplyEqualityOperatorToType + // ReSharper disable EqualExpressionComparison + Assert.That(STuple.Create(123) == STuple.Create(123), Is.True, "op_Equality should work for struct tuples"); + Assert.That(STuple.Create(123) != STuple.Create(123), Is.False, "op_Inequality should work for struct tuples"); + Assert.That(STuple.Create(123) == STuple.Create(456), Is.False, "op_Equality should work for struct tuples"); + Assert.That(STuple.Create(123) != STuple.Create(456), Is.True, "op_Inequality should work for struct tuples"); + // ReSharper restore EqualExpressionComparison + // ReSharper restore CannotApplyEqualityOperatorToType + + { // Deconstruct + t1.Deconstruct(out string item1); + Assert.That(item1, Is.EqualTo("hello world")); + } + } + + [Test] + public void Test_Tuple_2() + { + var t2 = STuple.Create("hello world", 123); + Assert.That(t2.Count, Is.EqualTo(2)); + Assert.That(t2.Item1, Is.EqualTo("hello world")); + Assert.That(t2.Item2, Is.EqualTo(123)); + Assert.That(t2.Get(0), Is.EqualTo("hello world")); + Assert.That(t2.Get(1), Is.EqualTo(123)); + Assert.That(t2[0], Is.EqualTo("hello world")); + Assert.That(t2[1], Is.EqualTo(123)); + Assert.That(t2.ToArray(), Is.EqualTo(new object[] { "hello world", 123 })); + Assert.That(t2.ToString(), Is.EqualTo("(\"hello world\", 123)")); + Assert.That(t2, Is.InstanceOf>()); + + Assert.That(t2.Tail.Count, Is.EqualTo(1)); + Assert.That(t2.Tail.Item1, Is.EqualTo(123)); + + Assert.That(STuple.Create(123, true).GetHashCode(), Is.EqualTo(STuple.Create("Hello", 123, true).Tail.GetHashCode()), "Hashcode should be stable"); + Assert.That(STuple.Create(123, true).GetHashCode(), Is.EqualTo(STuple.Create(123L, 1).GetHashCode()), "Hashcode should be stable"); + + // ReSharper disable CannotApplyEqualityOperatorToType + // ReSharper disable EqualExpressionComparison + Assert.That(STuple.Create(123, true) == STuple.Create(123, true), Is.True, "op_Equality should work for struct tuples"); + Assert.That(STuple.Create(123, true) != STuple.Create(123, true), Is.False, "op_Inequality should work for struct tuples"); + Assert.That(STuple.Create(123, true) == STuple.Create(456, true), Is.False, "op_Equality should work for struct tuples"); + Assert.That(STuple.Create(123, true) != STuple.Create(456, true), Is.True, "op_Inequality should work for struct tuples"); + Assert.That(STuple.Create(123, true) == STuple.Create(123, false), Is.False, "op_Equality should work for struct tuples"); + Assert.That(STuple.Create(123, true) != STuple.Create(123, false), Is.True, "op_Inequality should work for struct tuples"); + // ReSharper restore EqualExpressionComparison + // ReSharper restore CannotApplyEqualityOperatorToType + + { // Deconstruct + t2.Deconstruct(out string item1, out int item2); + Assert.That(item1, Is.EqualTo("hello world")); + Assert.That(item2, Is.EqualTo(123)); + } + { // Deconstruct + (string item1, int item2) = t2; + Assert.That(item1, Is.EqualTo("hello world")); + Assert.That(item2, Is.EqualTo(123)); + } + } + + [Test] + public void Test_Tuple_3() + { + var t3 = STuple.Create("hello world", 123, false); + Assert.That(t3.Count, Is.EqualTo(3)); + Assert.That(t3.Item1, Is.EqualTo("hello world")); + Assert.That(t3.Item2, Is.EqualTo(123)); + Assert.That(t3.Item3, Is.False); + Assert.That(t3.Get(0), Is.EqualTo("hello world")); + Assert.That(t3.Get(1), Is.EqualTo(123)); + Assert.That(t3.Get(2), Is.False); + Assert.That(t3[0], Is.EqualTo("hello world")); + Assert.That(t3[1], Is.EqualTo(123)); + Assert.That(t3[2], Is.False); + Assert.That(t3.ToArray(), Is.EqualTo(new object[] { "hello world", 123, false })); + Assert.That(t3.ToString(), Is.EqualTo(@"(""hello world"", 123, false)")); + Assert.That(t3, Is.InstanceOf>()); + + Assert.That(t3.Tail.Count, Is.EqualTo(2)); + Assert.That(t3.Tail.Item1, Is.EqualTo(123)); + Assert.That(t3.Tail.Item2, Is.False); + + Assert.That(STuple.Create(123, true, "foo").GetHashCode(), Is.EqualTo(STuple.Create("Hello", 123, true, "foo").Tail.GetHashCode()), "Hashcode should be stable"); + Assert.That(STuple.Create(123, true, "foo").GetHashCode(), Is.EqualTo(STuple.Create(123L, 1, "foo").GetHashCode()), "Hashcode should be stable"); + + // ReSharper disable CannotApplyEqualityOperatorToType + // ReSharper disable EqualExpressionComparison + Assert.That(STuple.Create(123, true, "foo") == STuple.Create(123, true, "foo"), Is.True, "op_Equality should work for struct tuples"); + Assert.That(STuple.Create(123, true, "foo") != STuple.Create(123, true, "foo"), Is.False, "op_Inequality should work for struct tuples"); + Assert.That(STuple.Create(123, true, "foo") == STuple.Create(456, true, "foo"), Is.False, "op_Equality should work for struct tuples"); + Assert.That(STuple.Create(123, true, "foo") != STuple.Create(456, true, "foo"), Is.True, "op_Inequality should work for struct tuples"); + Assert.That(STuple.Create(123, true, "foo") == STuple.Create(123, false, "foo"), Is.False, "op_Equality should work for struct tuples"); + Assert.That(STuple.Create(123, true, "foo") != STuple.Create(123, false, "foo"), Is.True, "op_Inequality should work for struct tuples"); + Assert.That(STuple.Create(123, true, "foo") == STuple.Create(123, true, "bar"), Is.False, "op_Equality should work for struct tuples"); + Assert.That(STuple.Create(123, true, "foo") != STuple.Create(123, true, "bar"), Is.True, "op_Inequality should work for struct tuples"); + // ReSharper restore EqualExpressionComparison + // ReSharper restore CannotApplyEqualityOperatorToType + + { // Deconstruct + t3.Deconstruct(out string item1, out int item2, out bool item3); + Assert.That(item1, Is.EqualTo("hello world")); + Assert.That(item2, Is.EqualTo(123)); + Assert.That(item3, Is.False); + } + { // Deconstruct + (string item1, int item2, bool item3) = t3; + Assert.That(item1, Is.EqualTo("hello world")); + Assert.That(item2, Is.EqualTo(123)); + Assert.That(item3, Is.False); + } + } + + [Test] + public void Test_Tuple_4() + { + var t4 = STuple.Create("hello world", 123, false, 1234L); + Assert.That(t4.Count, Is.EqualTo(4)); + Assert.That(t4.Item1, Is.EqualTo("hello world")); + Assert.That(t4.Item2, Is.EqualTo(123)); + Assert.That(t4.Item3, Is.False); + Assert.That(t4.Item4, Is.EqualTo(1234L)); + Assert.That(t4.Get(0), Is.EqualTo("hello world")); + Assert.That(t4.Get(1), Is.EqualTo(123)); + Assert.That(t4.Get(2), Is.False); + Assert.That(t4.Get(3), Is.EqualTo(1234L)); + Assert.That(t4[0], Is.EqualTo("hello world")); + Assert.That(t4[1], Is.EqualTo(123)); + Assert.That(t4[2], Is.False); + Assert.That(t4[3], Is.EqualTo(1234L)); + Assert.That(t4.ToArray(), Is.EqualTo(new object[] { "hello world", 123, false, 1234L})); + Assert.That(t4.ToString(), Is.EqualTo(@"(""hello world"", 123, false, 1234)")); + Assert.That(t4, Is.InstanceOf>()); + + Assert.That(t4.Tail.Count, Is.EqualTo(3)); + Assert.That(t4.Tail.Item1, Is.EqualTo(123)); + Assert.That(t4.Tail.Item2, Is.False); + Assert.That(t4.Tail.Item3, Is.EqualTo(1234L)); + + Assert.That(STuple.Create(123, true, "foo", 666).GetHashCode(), Is.EqualTo(STuple.Create("Hello", 123, true, "foo", 666).Tail.GetHashCode()), "Hashcode should be stable"); + Assert.That(STuple.Create(123, true, "foo", 666).GetHashCode(), Is.EqualTo(STuple.Create(123L, 1, "foo", 666UL).GetHashCode()), "Hashcode should be stable"); + + // ReSharper disable CannotApplyEqualityOperatorToType + // ReSharper disable EqualExpressionComparison + Assert.That(STuple.Create(123, true, "foo", 666) == STuple.Create(123, true, "foo", 666), Is.True, "op_Equality should work for struct tuples"); + Assert.That(STuple.Create(123, true, "foo", 666) != STuple.Create(123, true, "foo", 666), Is.False, "op_Inequality should work for struct tuples"); + Assert.That(STuple.Create(123, true, "foo", 666) == STuple.Create(456, true, "foo", 666), Is.False, "op_Equality should work for struct tuples"); + Assert.That(STuple.Create(123, true, "foo", 666) != STuple.Create(456, true, "foo", 666), Is.True, "op_Inequality should work for struct tuples"); + Assert.That(STuple.Create(123, true, "foo", 666) == STuple.Create(123, false, "foo", 666), Is.False, "op_Equality should work for struct tuples"); + Assert.That(STuple.Create(123, true, "foo", 666) != STuple.Create(123, false, "foo", 666), Is.True, "op_Inequality should work for struct tuples"); + Assert.That(STuple.Create(123, true, "foo", 666) == STuple.Create(123, true, "bar", 666), Is.False, "op_Equality should work for struct tuples"); + Assert.That(STuple.Create(123, true, "foo", 666) != STuple.Create(123, true, "bar", 666), Is.True, "op_Inequality should work for struct tuples"); + Assert.That(STuple.Create(123, true, "foo", 666) == STuple.Create(123, true, "foo", 667), Is.False, "op_Equality should work for struct tuples"); + Assert.That(STuple.Create(123, true, "foo", 666) != STuple.Create(123, true, "foo", 667), Is.True, "op_Inequality should work for struct tuples"); + // ReSharper restore EqualExpressionComparison + // ReSharper restore CannotApplyEqualityOperatorToType + + { // Deconstruct + t4.Deconstruct(out string item1, out int item2, out bool item3, out long item4); + Assert.That(item1, Is.EqualTo("hello world")); + Assert.That(item2, Is.EqualTo(123)); + Assert.That(item3, Is.False); + Assert.That(item4, Is.EqualTo(1234L)); + } + { // Deconstruct + (string item1, int item2, bool item3, long item4) = t4; + Assert.That(item1, Is.EqualTo("hello world")); + Assert.That(item2, Is.EqualTo(123)); + Assert.That(item3, Is.False); + Assert.That(item4, Is.EqualTo(1234L)); + } + } + + [Test] + public void Test_Tuple_5() + { + var t5 = STuple.Create("hello world", 123, false, 1234L, -1234); + Assert.That(t5.Count, Is.EqualTo(5)); + Assert.That(t5.Item1, Is.EqualTo("hello world")); + Assert.That(t5.Item2, Is.EqualTo(123)); + Assert.That(t5.Item3, Is.False); + Assert.That(t5.Item4, Is.EqualTo(1234L)); + Assert.That(t5.Item5, Is.EqualTo(-1234)); + Assert.That(t5.Get(0), Is.EqualTo("hello world")); + Assert.That(t5.Get(1), Is.EqualTo(123)); + Assert.That(t5.Get(2), Is.False); + Assert.That(t5.Get(3), Is.EqualTo(1234L)); + Assert.That(t5.Get(4), Is.EqualTo(-1234)); + Assert.That(t5[0], Is.EqualTo("hello world")); + Assert.That(t5[1], Is.EqualTo(123)); + Assert.That(t5[2], Is.False); + Assert.That(t5[3], Is.EqualTo(1234L)); + Assert.That(t5[4], Is.EqualTo(-1234)); + Assert.That(t5.ToArray(), Is.EqualTo(new object[] { "hello world", 123, false, 1234L, -1234 })); + Assert.That(t5.ToString(), Is.EqualTo(@"(""hello world"", 123, false, 1234, -1234)")); + Assert.That(t5, Is.InstanceOf>()); + + Assert.That(t5.Tail.Count, Is.EqualTo(4)); + Assert.That(t5.Tail.Item1, Is.EqualTo(123)); + Assert.That(t5.Tail.Item2, Is.False); + Assert.That(t5.Tail.Item3, Is.EqualTo(1234L)); + Assert.That(t5.Tail.Item4, Is.EqualTo(-1234L)); + + Assert.That(STuple.Create(123, true, "foo", 666, false).GetHashCode(), Is.EqualTo(STuple.Create("Hello", 123, true, "foo", 666, false).Tail.GetHashCode()), "Hashcode should be stable"); + Assert.That(STuple.Create(123, true, "foo", 666, false).GetHashCode(), Is.EqualTo(STuple.Create(123L, 1, "foo", 666UL, 0).GetHashCode()), "Hashcode should be stable"); + + { // Deconstruct + t5.Deconstruct(out string item1, out int item2, out bool item3, out long item4, out long item5); + Assert.That(item1, Is.EqualTo("hello world")); + Assert.That(item2, Is.EqualTo(123)); + Assert.That(item3, Is.False); + Assert.That(item4, Is.EqualTo(1234L)); + Assert.That(item5, Is.EqualTo(-1234L)); + } + { // Deconstruct + (string item1, int item2, bool item3, long item4, long item5) = t5; + Assert.That(item1, Is.EqualTo("hello world")); + Assert.That(item2, Is.EqualTo(123)); + Assert.That(item3, Is.False); + Assert.That(item4, Is.EqualTo(1234L)); + Assert.That(item5, Is.EqualTo(-1234L)); + } + } + + [Test] + public void Test_Tuple_6() + { + var t6 = STuple.Create("hello world", 123, false, 1234L, -1234, "six"); + Assert.That(t6.Count, Is.EqualTo(6)); + Assert.That(t6.Item1, Is.EqualTo("hello world")); + Assert.That(t6.Item2, Is.EqualTo(123)); + Assert.That(t6.Item3, Is.False); + Assert.That(t6.Item4, Is.EqualTo(1234L)); + Assert.That(t6.Item5, Is.EqualTo(-1234)); + Assert.That(t6.Get(0), Is.EqualTo("hello world")); + Assert.That(t6.Get(1), Is.EqualTo(123)); + Assert.That(t6.Get(2), Is.False); + Assert.That(t6.Get(3), Is.EqualTo(1234L)); + Assert.That(t6.Get(4), Is.EqualTo(-1234)); + Assert.That(t6.Get(5), Is.EqualTo("six")); + Assert.That(t6[0], Is.EqualTo("hello world")); + Assert.That(t6[1], Is.EqualTo(123)); + Assert.That(t6[2], Is.False); + Assert.That(t6[3], Is.EqualTo(1234L)); + Assert.That(t6[4], Is.EqualTo(-1234)); + Assert.That(t6[5], Is.EqualTo("six")); + Assert.That(t6.ToArray(), Is.EqualTo(new object[] { "hello world", 123, false, 1234L, -1234, "six" })); + Assert.That(t6.ToString(), Is.EqualTo(@"(""hello world"", 123, false, 1234, -1234, ""six"")")); + Assert.That(t6, Is.InstanceOf>()); + + Assert.That(t6.Tail.Count, Is.EqualTo(5)); + Assert.That(t6.Tail.Item1, Is.EqualTo(123)); + Assert.That(t6.Tail.Item2, Is.False); + Assert.That(t6.Tail.Item3, Is.EqualTo(1234L)); + Assert.That(t6.Tail.Item4, Is.EqualTo(-1234L)); + Assert.That(t6.Tail.Item5, Is.EqualTo("six")); + + Assert.That(STuple.Create(123, true, "foo", 666, false, "bar").GetHashCode(), Is.EqualTo(STuple.Create(123, true, "foo", 666, false).Append("bar").GetHashCode()), "Hashcode should be stable"); + Assert.That(STuple.Create(123, true, "foo", 666, false, "bar").GetHashCode(), Is.EqualTo(STuple.Create(123L, 1, "foo", 666UL, 0, "bar").GetHashCode()), "Hashcode should be stable"); + + { // Deconstruct + t6.Deconstruct(out string item1, out int item2, out bool item3, out long item4, out long item5, out string item6); + Assert.That(item1, Is.EqualTo("hello world")); + Assert.That(item2, Is.EqualTo(123)); + Assert.That(item3, Is.False); + Assert.That(item4, Is.EqualTo(1234L)); + Assert.That(item5, Is.EqualTo(-1234L)); + Assert.That(item6, Is.EqualTo("six")); + } + { // Deconstruct + (string item1, int item2, bool item3, long item4, long item5, string item6) = t6; + Assert.That(item1, Is.EqualTo("hello world")); + Assert.That(item2, Is.EqualTo(123)); + Assert.That(item3, Is.False); + Assert.That(item4, Is.EqualTo(1234L)); + Assert.That(item5, Is.EqualTo(-1234L)); + Assert.That(item6, Is.EqualTo("six")); + } + } + + [Test] + public void Test_Tuple_Many() + { + // ReSharper disable once RedundantExplicitParamsArrayCreation + ITuple tn = STuple.Create(new object[] { "hello world", 123, false, 1234L, -1234, "six", true, Math.PI }); + Assert.That(tn.Count, Is.EqualTo(8)); + Assert.That(tn.Get(0), Is.EqualTo("hello world")); + Assert.That(tn.Get(1), Is.EqualTo(123)); + Assert.That(tn.Get(2), Is.False); + Assert.That(tn.Get(3), Is.EqualTo(1234)); + Assert.That(tn.Get(4), Is.EqualTo(-1234)); + Assert.That(tn.Get(5), Is.EqualTo("six")); + Assert.That(tn.Get(6), Is.True); + Assert.That(tn.Get(7), Is.EqualTo(Math.PI)); + Assert.That(tn.ToArray(), Is.EqualTo(new object[] { "hello world", 123, false, 1234L, -1234, "six", true, Math.PI })); + Assert.That(tn.ToString(), Is.EqualTo("(\"hello world\", 123, false, 1234, -1234, \"six\", true, 3.1415926535897931)")); + Assert.That(tn, Is.InstanceOf()); + + { // Deconstruct + string item1; + int item2; + bool item3; + long item4; + long item5; + string item6; + bool item7; + double item8; + + Assert.That(() => tn.Deconstruct(out item1), Throws.InvalidOperationException); + Assert.That(() => tn.Deconstruct(out item1, out item2), Throws.InvalidOperationException); + Assert.That(() => tn.Deconstruct(out item1, out item2, out item3), Throws.InvalidOperationException); + Assert.That(() => tn.Deconstruct(out item1, out item2, out item3, out item4), Throws.InvalidOperationException); + Assert.That(() => tn.Deconstruct(out item1, out item2, out item3, out item4, out item5), Throws.InvalidOperationException); + Assert.That(() => tn.Deconstruct(out item1, out item2, out item3, out item4, out item5, out item6), Throws.InvalidOperationException); + Assert.That(() => tn.Deconstruct(out item1, out item2, out item3, out item4, out item5, out item6, out item7), Throws.InvalidOperationException); + + tn.Deconstruct(out item1, out item2, out item3, out item4, out item5, out item6, out item7, out item8); + Assert.That(item1, Is.EqualTo("hello world")); + Assert.That(item2, Is.EqualTo(123)); + Assert.That(item3, Is.False); + Assert.That(item4, Is.EqualTo(1234)); + Assert.That(item5, Is.EqualTo(-1234)); + Assert.That(item6, Is.EqualTo("six")); + Assert.That(item7, Is.True); + Assert.That(item8, Is.EqualTo(Math.PI)); + } + } + + [Test] + public void Test_Tuple_Wrap() + { + // STuple.Wrap(...) does not copy the items of the array + + var arr = new object[] { "Hello", 123, false, TimeSpan.FromSeconds(5) }; + + var t = STuple.Wrap(arr); + Assert.That(t, Is.Not.Null); + Assert.That(t.Count, Is.EqualTo(4)); + Assert.That(t[0], Is.EqualTo("Hello")); + Assert.That(t[1], Is.EqualTo(123)); + Assert.That(t[2], Is.False); + Assert.That(t[3], Is.EqualTo(TimeSpan.FromSeconds(5))); + + { // Deconstruct + t.Deconstruct(out string item1, out int item2, out bool item3, out TimeSpan item4); + Assert.That(item1, Is.EqualTo("Hello")); + Assert.That(item2, Is.EqualTo(123)); + Assert.That(item3, Is.False); + Assert.That(item4, Is.EqualTo(TimeSpan.FromSeconds(5))); + } + + t = STuple.Wrap(arr, 1, 2); + Assert.That(t, Is.Not.Null); + Assert.That(t.Count, Is.EqualTo(2)); + Assert.That(t[0], Is.EqualTo(123)); + Assert.That(t[1], Is.False); + + // changing the underyling array should change the tuple + // DON'T DO THIS IN ACTUAL CODE!!! + + arr[1] = 456; + arr[2] = true; + Log($"t = {t}"); + + Assert.That(t[0], Is.EqualTo(456)); + Assert.That(t[1], Is.True); + + { // Deconstruct + t.Deconstruct(out int item1, out bool item2); + Assert.That(item1, Is.EqualTo(456)); + Assert.That(item2, Is.True); + } + + } + + [Test] + public void Test_Tuple_FromObjects() + { + // STuple.FromObjects(...) does a copy of the items of the array + + var arr = new object[] { "Hello", 123, false, TimeSpan.FromSeconds(5) }; + + var t = STuple.FromObjects(arr); + Log($"t = {t}"); + Assert.That(t, Is.Not.Null); + Assert.That(t.Count, Is.EqualTo(4)); + Assert.That(t[0], Is.EqualTo("Hello")); + Assert.That(t[1], Is.EqualTo(123)); + Assert.That(t[2], Is.False); + Assert.That(t[3], Is.EqualTo(TimeSpan.FromSeconds(5))); + + { // Deconstruct + t.Deconstruct(out string item1, out int item2, out bool item3, out TimeSpan item4); + Assert.That(item1, Is.EqualTo("Hello")); + Assert.That(item2, Is.EqualTo(123)); + Assert.That(item3, Is.False); + Assert.That(item4, Is.EqualTo(TimeSpan.FromSeconds(5))); + } + + t = STuple.FromObjects(arr, 1, 2); + Log($"t = {t}"); + Assert.That(t, Is.Not.Null); + Assert.That(t.Count, Is.EqualTo(2)); + Assert.That(t[0], Is.EqualTo(123)); + Assert.That(t[1], Is.False); + + { // Deconstruct + t.Deconstruct(out int item1, out bool item2); + Assert.That(item1, Is.EqualTo(123)); + Assert.That(item2, Is.False); + } + + // changing the underyling array should NOT change the tuple + + arr[1] = 456; + arr[2] = true; + Log($"t = {t}"); + + Assert.That(t[0], Is.EqualTo(123)); + Assert.That(t[1], Is.False); + + } + + [Test] + public void Test_Tuple_FromArray() + { + var items = new[] { "Bonjour", "le", "Monde" }; + + var t = STuple.FromArray(items); + Log($"t = {t}"); + Assert.That(t, Is.Not.Null); + Assert.That(t.Count, Is.EqualTo(3)); + Assert.That(t[0], Is.EqualTo("Bonjour")); + Assert.That(t[1], Is.EqualTo("le")); + Assert.That(t[2], Is.EqualTo("Monde")); + + { // Deconstruct + t.Deconstruct(out string item1, out string item2, out string item3); + Assert.That(item1, Is.EqualTo("Bonjour")); + Assert.That(item2, Is.EqualTo("le")); + Assert.That(item3, Is.EqualTo("Monde")); + } + + t = STuple.FromArray(items, 1, 2); + Log($"t = {t}"); + Assert.That(t, Is.Not.Null); + Assert.That(t.Count, Is.EqualTo(2)); + Assert.That(t[0], Is.EqualTo("le")); + Assert.That(t[1], Is.EqualTo("Monde")); + { // Deconstruct + t.Deconstruct(out string item1, out string item2); + Assert.That(item1, Is.EqualTo("le")); + Assert.That(item2, Is.EqualTo("Monde")); + } + + // changing the underlying array should NOT change the tuple + items[1] = "ze"; + Log($"t = {t}"); + + Assert.That(t[0], Is.EqualTo("le")); + } + + [Test] + public void Test_Tuple_Negative_Indexing() + { + var t1 = STuple.Create("hello world"); + Assert.That(t1.Get(-1), Is.EqualTo("hello world")); + Assert.That(t1[-1], Is.EqualTo("hello world")); + + var t2 = STuple.Create("hello world", 123); + Assert.That(t2.Get(-1), Is.EqualTo(123)); + Assert.That(t2.Get(-2), Is.EqualTo("hello world")); + Assert.That(t2[-1], Is.EqualTo(123)); + Assert.That(t2[-2], Is.EqualTo("hello world")); + + var t3 = STuple.Create("hello world", 123, false); + Assert.That(t3.Get(-1), Is.False); + Assert.That(t3.Get(-2), Is.EqualTo(123)); + Assert.That(t3.Get(-3), Is.EqualTo("hello world")); + Assert.That(t3[-1], Is.False); + Assert.That(t3[-2], Is.EqualTo(123)); + Assert.That(t3[-3], Is.EqualTo("hello world")); + + var t4 = STuple.Create("hello world", 123, false, 1234L); + Assert.That(t4.Get(-1), Is.EqualTo(1234L)); + Assert.That(t4.Get(-2), Is.False); + Assert.That(t4.Get(-3), Is.EqualTo(123)); + Assert.That(t4.Get(-4), Is.EqualTo("hello world")); + Assert.That(t4[-1], Is.EqualTo(1234L)); + Assert.That(t4[-2], Is.False); + Assert.That(t4[-3], Is.EqualTo(123)); + Assert.That(t4[-4], Is.EqualTo("hello world")); + + var t5 = STuple.Create("hello world", 123, false, 1234L, -1234); + Assert.That(t5.Get(-1), Is.EqualTo(-1234)); + Assert.That(t5.Get(-2), Is.EqualTo(1234L)); + Assert.That(t5.Get(-3), Is.False); + Assert.That(t5.Get(-4), Is.EqualTo(123)); + Assert.That(t5.Get(-5), Is.EqualTo("hello world")); + Assert.That(t5[-1], Is.EqualTo(-1234)); + Assert.That(t5[-2], Is.EqualTo(1234L)); + Assert.That(t5[-3], Is.False); + Assert.That(t5[-4], Is.EqualTo(123)); + Assert.That(t5[-5], Is.EqualTo("hello world")); + + // ReSharper disable once RedundantExplicitParamsArrayCreation + var tn = STuple.Create(new object[] { "hello world", 123, false, 1234, -1234, "six" }); + Assert.That(tn.Get(-1), Is.EqualTo("six")); + Assert.That(tn.Get(-2), Is.EqualTo(-1234)); + Assert.That(tn.Get(-3), Is.EqualTo(1234)); + Assert.That(tn.Get(-4), Is.False); + Assert.That(tn.Get(-5), Is.EqualTo(123)); + Assert.That(tn.Get(-6), Is.EqualTo("hello world")); + Assert.That(tn[-1], Is.EqualTo("six")); + Assert.That(tn[-2], Is.EqualTo(-1234)); + Assert.That(tn[-3], Is.EqualTo(1234)); + Assert.That(tn[-4], Is.False); + Assert.That(tn[-5], Is.EqualTo(123)); + Assert.That(tn[-6], Is.EqualTo("hello world")); + } + + [Test] + public void Test_Tuple_First_And_Last() + { + // tuple.First() should be equivalent to tuple.Get(0) + // tuple.Last() should be equivalent to tuple.Get(-1) + + var t1 = STuple.Create(1); + Assert.That(t1.First(), Is.EqualTo(1)); + Assert.That(t1.First(), Is.EqualTo("1")); + Assert.That(((ITuple)t1).Last(), Is.EqualTo(1)); + Assert.That(((ITuple)t1).Last(), Is.EqualTo("1")); + + var t2 = STuple.Create(1, 2); + Assert.That(t2.First(), Is.EqualTo(1)); + Assert.That(t2.First(), Is.EqualTo("1")); + Assert.That(t2.Last, Is.EqualTo(2)); + Assert.That(((ITuple)t2).Last(), Is.EqualTo(2)); + Assert.That(((ITuple)t2).Last(), Is.EqualTo("2")); + + var t3 = STuple.Create(1, 2, 3); + Assert.That(t3.First(), Is.EqualTo(1)); + Assert.That(t3.First(), Is.EqualTo("1")); + Assert.That(t3.Last, Is.EqualTo(3)); + Assert.That(((ITuple)t3).Last(), Is.EqualTo(3)); + Assert.That(((ITuple)t3).Last(), Is.EqualTo("3")); + + var t4 = STuple.Create(1, 2, 3, 4); + Assert.That(t4.First(), Is.EqualTo(1)); + Assert.That(t4.First(), Is.EqualTo("1")); + Assert.That(t4.Last, Is.EqualTo(4)); + Assert.That(((ITuple)t4).Last(), Is.EqualTo(4)); + Assert.That(((ITuple)t4).Last(), Is.EqualTo("4")); + + var t5 = STuple.Create(1, 2, 3, 4, 5); + Assert.That(t5.First(), Is.EqualTo(1)); + Assert.That(t5.First(), Is.EqualTo("1")); + Assert.That(t5.Last, Is.EqualTo(5)); + Assert.That(((ITuple)t5).Last(), Is.EqualTo(5)); + Assert.That(((ITuple)t5).Last(), Is.EqualTo("5")); + + var tn = STuple.Create(1, 2, 3, 4, 5, 6); + Assert.That(tn.First(), Is.EqualTo(1)); + Assert.That(tn.First(), Is.EqualTo("1")); + Assert.That(tn.Last(), Is.EqualTo(6)); + Assert.That(tn.Last(), Is.EqualTo("6")); + + Assert.That(() => ((ITuple) STuple.Empty).First(), Throws.InstanceOf()); + Assert.That(() => ((ITuple) STuple.Empty).Last(), Throws.InstanceOf()); + } + + [Test] + public void Test_Tuple_CreateBoxed() + { + ITuple tuple; + + tuple = STuple.CreateBoxed(default(object)); + Assert.That(tuple.Count, Is.EqualTo(1)); + Assert.That(tuple[0], Is.Null); + + tuple = STuple.CreateBoxed(1); + Assert.That(tuple.Count, Is.EqualTo(1)); + Assert.That(tuple[0], Is.EqualTo(1)); + + tuple = STuple.CreateBoxed(1L); + Assert.That(tuple.Count, Is.EqualTo(1)); + Assert.That(tuple[0], Is.EqualTo(1L)); + + tuple = STuple.CreateBoxed(false); + Assert.That(tuple.Count, Is.EqualTo(1)); + Assert.That(tuple[0], Is.False); + + tuple = STuple.CreateBoxed("hello"); + Assert.That(tuple.Count, Is.EqualTo(1)); + Assert.That(tuple[0], Is.EqualTo("hello")); + + tuple = STuple.CreateBoxed(new byte[] { 1, 2, 3 }); + Assert.That(tuple.Count, Is.EqualTo(1)); + Assert.That(tuple[0], Is.EqualTo(new byte[] { 1, 2, 3 }.AsSlice())); + } + + [Test] + public void Test_Tuple_Embedded_Tuples() + { + // (A,B).Append((C,D)) should return (A,B,(C,D)) (length 3) and not (A,B,C,D) (length 4) + + STuple x = STuple.Create("A", "B"); + STuple y = STuple.Create("C", "D"); + + // using the instance method that returns a STuple + ITuple z = x.Append(y); + Log(z); + Assert.That(z, Is.Not.Null); + Assert.That(z.Count, Is.EqualTo(3)); + Assert.That(z[0], Is.EqualTo("A")); + Assert.That(z[1], Is.EqualTo("B")); + Assert.That(z[2], Is.EqualTo(y)); + var t = z.Get(2); + Assert.That(t, Is.Not.Null); + Assert.That(t.Count, Is.EqualTo(2)); + Assert.That(t[0], Is.EqualTo("C")); + Assert.That(t[1], Is.EqualTo("D")); + + // casted down to the interface ITuple + z = ((ITuple)x).Append((ITuple)y); + Log(z); + Assert.That(z, Is.Not.Null); + Assert.That(z.Count, Is.EqualTo(3)); + Assert.That(z[0], Is.EqualTo("A")); + Assert.That(z[1], Is.EqualTo("B")); + Assert.That(z[2], Is.EqualTo(y)); + t = z.Get(2); + Assert.That(t, Is.Not.Null); + Assert.That(t.Count, Is.EqualTo(2)); + Assert.That(t[0], Is.EqualTo("C")); + Assert.That(t[1], Is.EqualTo("D")); + + // composite index key "(prefix, value, id)" + ITuple subspace = STuple.Create(123, 42); + ITuple value = STuple.Create(2014, 11, 6); // Indexing a date value (Y, M, D) + const string ID = "Doc123"; + z = subspace.Append(value, ID); + Log(z); + Assert.That(z.Count, Is.EqualTo(4)); + } + + [Test] + public void Test_Tuple_With() + { + //note: important to always cast to (ITuple) to be sure that we don't call specialized instance methods (tested elsewhere) + ITuple t; + bool called; + + // Size 1 + + t = STuple.Create(123); + called = false; + t.With((int a) => + { + called = true; + Assert.That(a, Is.EqualTo(123)); + }); + Assert.That(called, Is.True); + Assert.That(t.With((int a) => + { + Assert.That(a, Is.EqualTo(123)); + return 42; + }), Is.EqualTo(42)); + Assert.That(() => t.With((int a) => throw new InvalidOperationException("BOOM")), Throws.InvalidOperationException.With.Message.EqualTo("BOOM")); + + // Size 2 + + t = t.Append("abc"); + called = false; + t.With((int a, string b) => + { + called = true; + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + }); + Assert.That(called, Is.True); + Assert.That(t.With((int a, string b) => + { + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + return 42; + }), Is.EqualTo(42)); + + // Size 3 + + t = t.Append(3.14f); + called = false; + t.With((int a, string b, float c) => + { + called = true; + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + Assert.That(c, Is.EqualTo(3.14f)); + }); + Assert.That(called, Is.True); + Assert.That(t.With((int a, string b, float c) => + { + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + Assert.That(c, Is.EqualTo(3.14f)); + return 42; + }), Is.EqualTo(42)); + + // Size 4 + + t = t.Append(true); + called = false; + t.With((int a, string b, float c, bool d) => + { + called = true; + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + Assert.That(c, Is.EqualTo(3.14f)); + Assert.That(d, Is.True); + }); + Assert.That(called, Is.True); + Assert.That(t.With((int a, string b, float c, bool d) => + { + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + Assert.That(c, Is.EqualTo(3.14f)); + Assert.That(d, Is.True); + return 42; + }), Is.EqualTo(42)); + + // Size 5 + + t = t.Append('z'); + called = false; + t.With((int a, string b, float c, bool d, char e) => + { + called = true; + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + Assert.That(c, Is.EqualTo(3.14f)); + Assert.That(d, Is.True); + Assert.That(e, Is.EqualTo('z')); + }); + Assert.That(called, Is.True); + Assert.That(t.With((int a, string b, float c, bool d, char e) => + { + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + Assert.That(c, Is.EqualTo(3.14f)); + Assert.That(d, Is.True); + Assert.That(e, Is.EqualTo('z')); + return 42; + }), Is.EqualTo(42)); + + // Size 6 + + t = t.Append(Math.PI); + called = false; + t.With((int a, string b, float c, bool d, char e, double f) => + { + called = true; + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + Assert.That(c, Is.EqualTo(3.14f)); + Assert.That(d, Is.True); + Assert.That(e, Is.EqualTo('z')); + Assert.That(f, Is.EqualTo(Math.PI)); + }); + Assert.That(called, Is.True); + Assert.That(t.With((int a, string b, float c, bool d, char e, double f) => + { + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + Assert.That(c, Is.EqualTo(3.14f)); + Assert.That(d, Is.True); + Assert.That(e, Is.EqualTo('z')); + Assert.That(f, Is.EqualTo(Math.PI)); + return 42; + }), Is.EqualTo(42)); + + // Size 7 + + t = t.Append(IPAddress.Loopback); + called = false; + t.With((int a, string b, float c, bool d, char e, double f, IPAddress g) => + { + called = true; + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + Assert.That(c, Is.EqualTo(3.14f)); + Assert.That(d, Is.True); + Assert.That(e, Is.EqualTo('z')); + Assert.That(f, Is.EqualTo(Math.PI)); + Assert.That(g, Is.EqualTo(IPAddress.Loopback)); + }); + Assert.That(called, Is.True); + Assert.That(t.With((int a, string b, float c, bool d, char e, double f, IPAddress g) => + { + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + Assert.That(c, Is.EqualTo(3.14f)); + Assert.That(d, Is.True); + Assert.That(e, Is.EqualTo('z')); + Assert.That(f, Is.EqualTo(Math.PI)); + Assert.That(g, Is.EqualTo(IPAddress.Loopback)); + return 42; + }), Is.EqualTo(42)); + + // Size 8 + + t = t.Append(DateTime.MaxValue); + called = false; + t.With((int a, string b, float c, bool d, char e, double f, IPAddress g, DateTime h) => + { + called = true; + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + Assert.That(c, Is.EqualTo(3.14f)); + Assert.That(d, Is.True); + Assert.That(e, Is.EqualTo('z')); + Assert.That(f, Is.EqualTo(Math.PI)); + Assert.That(g, Is.EqualTo(IPAddress.Loopback)); + Assert.That(h, Is.EqualTo(DateTime.MaxValue)); + }); + Assert.That(called, Is.True); + Assert.That(t.With((int a, string b, float c, bool d, char e, double f, IPAddress g, DateTime h) => + { + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + Assert.That(c, Is.EqualTo(3.14f)); + Assert.That(d, Is.True); + Assert.That(e, Is.EqualTo('z')); + Assert.That(f, Is.EqualTo(Math.PI)); + Assert.That(g, Is.EqualTo(IPAddress.Loopback)); + Assert.That(h, Is.EqualTo(DateTime.MaxValue)); + return 42; + }), Is.EqualTo(42)); + } + + [Test] + public void Test_Tuple_With_Struct() + { + // calling With() on the structs is faster + + STuple t1 = STuple.Create(123); + t1.With((a) => + { + Assert.That(a, Is.EqualTo(123)); + }); + Assert.That(t1.With((a) => + { + Assert.That(a, Is.EqualTo(123)); + return 42; + }), Is.EqualTo(42)); + + STuple t2 = STuple.Create(123, "abc"); + t2.With((a, b) => + { + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + }); + Assert.That(t2.With((a, b) => + { + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + return 42; + }), Is.EqualTo(42)); + + STuple t3 = STuple.Create(123, "abc", 3.14f); + t3.With((a, b, c) => + { + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + Assert.That(c, Is.EqualTo(3.14f)); + }); + Assert.That(t3.With((a, b, c) => + { + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + Assert.That(c, Is.EqualTo(3.14f)); + return 42; + }), Is.EqualTo(42)); + + STuple t4 = STuple.Create(123, "abc", 3.14f, true); + t4.With((a, b, c, d) => + { + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + Assert.That(c, Is.EqualTo(3.14f)); + Assert.That(d, Is.True); + }); + Assert.That(t4.With((a, b, c, d) => + { + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + Assert.That(c, Is.EqualTo(3.14f)); + Assert.That(d, Is.True); + return 42; + }), Is.EqualTo(42)); + + STuple t5 = STuple.Create(123, "abc", 3.14f, true, 'z'); + t5.With((a, b, c, d, e) => + { + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + Assert.That(c, Is.EqualTo(3.14f)); + Assert.That(d, Is.True); + Assert.That(e, Is.EqualTo('z')); + }); + Assert.That(t5.With((a, b, c, d, e) => + { + Assert.That(a, Is.EqualTo(123)); + Assert.That(b, Is.EqualTo("abc")); + Assert.That(c, Is.EqualTo(3.14f)); + Assert.That(d, Is.True); + Assert.That(e, Is.EqualTo('z')); + return 42; + }), Is.EqualTo(42)); + + //TODO: add more if we ever add struct tuples with 6 or more items + } + + [Test] + public void Test_Tuple_Of_Size() + { + // OfSize(n) check the size and return the tuple if it passed + // VerifySize(n) only check the size + // Both should throw if tuple is null, or not the expected size + + void Verify(ITuple t) + { + for (int i = 0; i <= 10; i++) + { + if (t.Count > i) + { + Assert.That(() => t.OfSize(i), Throws.InstanceOf()); + Assert.That(t.OfSizeAtLeast(i), Is.SameAs(t)); + Assert.That(() => t.OfSizeAtMost(i), Throws.InstanceOf()); + } + else if (t.Count < i) + { + Assert.That(() => t.OfSize(i), Throws.InstanceOf()); + Assert.That(() => t.OfSizeAtLeast(i), Throws.InstanceOf()); + Assert.That(t.OfSizeAtMost(i), Is.SameAs(t)); + } + else + { + Assert.That(t.OfSize(i), Is.SameAs(t)); + Assert.That(t.OfSizeAtLeast(i), Is.SameAs(t)); + Assert.That(t.OfSizeAtMost(i), Is.SameAs(t)); + } + } + } + + Verify(STuple.Empty); + Verify(STuple.Create(123)); + Verify(STuple.Create(123, "abc")); + Verify(STuple.Create(123, "abc", 3.14f)); + Verify(STuple.Create(123, "abc", 3.14f, true)); + Verify(STuple.Create(123, "abc", 3.14f, true, 'z')); + Verify(STuple.FromArray(new[] { "hello", "world", "!" })); + Verify(STuple.FromEnumerable(Enumerable.Range(0, 10))); + + Verify(STuple.Create(123, "abc", 3.14f, true, 'z')[0, 2]); + Verify(STuple.Create(123, "abc", 3.14f, true, 'z')[1, 4]); + Verify(STuple.FromEnumerable(Enumerable.Range(0, 50)).Substring(15, 6)); + + // ReSharper disable ExpressionIsAlwaysNull + ITuple none = null; + Assert.That(() => none.OfSize(0), Throws.ArgumentNullException); + Assert.That(() => none.OfSizeAtLeast(0), Throws.ArgumentNullException); + Assert.That(() => none.OfSizeAtMost(0), Throws.ArgumentNullException); + // ReSharper restore ExpressionIsAlwaysNull + } + + [Test] + public void Test_Tuple_Truncate() + { + ITuple t = STuple.Create("Hello", 123, false, TimeSpan.FromSeconds(5), "World"); + + var head = t.Truncate(1); + Assert.That(head, Is.Not.Null); + Assert.That(head.Count, Is.EqualTo(1)); + Assert.That(head[0], Is.EqualTo("Hello")); + + head = t.Truncate(2); + Assert.That(head, Is.Not.Null); + Assert.That(head.Count, Is.EqualTo(2)); + Assert.That(head[0], Is.EqualTo("Hello")); + Assert.That(head[1], Is.EqualTo(123)); + + head = t.Truncate(5); + Assert.That(head, Is.EqualTo(t)); + + var tail = t.Truncate(-1); + Assert.That(tail, Is.Not.Null); + Assert.That(tail.Count, Is.EqualTo(1)); + Assert.That(tail[0], Is.EqualTo("World")); + + tail = t.Truncate(-2); + Assert.That(tail, Is.Not.Null); + Assert.That(tail.Count, Is.EqualTo(2)); + Assert.That(tail[0], Is.EqualTo(TimeSpan.FromSeconds(5))); + Assert.That(tail[1], Is.EqualTo("World")); + + tail = t.Truncate(-5); + Assert.That(tail, Is.EqualTo(t)); + + Assert.That(t.Truncate(0), Is.EqualTo(STuple.Empty)); + Assert.That(() => t.Truncate(6), Throws.InstanceOf()); + Assert.That(() => t.Truncate(-6), Throws.InstanceOf()); + + Assert.That(() => STuple.Empty.Truncate(1), Throws.InstanceOf()); + Assert.That(() => STuple.Create("Hello", "World").Truncate(3), Throws.InstanceOf()); + Assert.That(() => STuple.Create("Hello", "World").Truncate(-3), Throws.InstanceOf()); + } + + [Test] + public void Test_Tuple_As() + { + // ITuple.As<...>() adds types to an untyped ITuple + ITuple t; + + t = STuple.Create("Hello"); + var t1 = t.As(); + Assert.That(t1.Item1, Is.EqualTo("Hello")); + + t = STuple.Create("Hello", 123); + var t2 = t.As(); + Assert.That(t2.Item1, Is.EqualTo("Hello")); + Assert.That(t2.Item2, Is.EqualTo(123)); + + t = STuple.Create("Hello", 123, false); + var t3 = t.As(); + Assert.That(t3.Item1, Is.EqualTo("Hello")); + Assert.That(t3.Item2, Is.EqualTo(123)); + Assert.That(t3.Item3, Is.False); + + var t4 = STuple + .Create("Hello", 123, false, TimeSpan.FromSeconds(5)) + .As(); + Assert.That(t4.Item1, Is.EqualTo("Hello")); + Assert.That(t4.Item2, Is.EqualTo(123)); + Assert.That(t4.Item3, Is.False); + Assert.That(t4.Item4, Is.EqualTo(TimeSpan.FromSeconds(5))); + + t = STuple.Create("Hello", 123, false, TimeSpan.FromSeconds(5), "World"); + var t5 = t.As(); + Assert.That(t5.Item1, Is.EqualTo("Hello")); + Assert.That(t5.Item2, Is.EqualTo(123)); + Assert.That(t5.Item3, Is.False); + Assert.That(t5.Item4, Is.EqualTo(TimeSpan.FromSeconds(5))); + Assert.That(t5.Item5, Is.EqualTo("World")); + } + + [Test] + public void Test_Cast_To_BCL_Tuples() + { + // implicit: Tuple => ITuple + // explicit: ITuple => Tuple + + var t1 = STuple.Create("Hello"); + var b1 = (Tuple) t1; // explicit + Assert.That(b1, Is.Not.Null); + Assert.That(b1.Item1, Is.EqualTo("Hello")); + STuple r1 = t1; // implicit + Assert.That(r1.Item1, Is.EqualTo("Hello")); + + var t2 = STuple.Create("Hello", 123); + var b2 = (Tuple)t2; // explicit + Assert.That(b2, Is.Not.Null); + Assert.That(b2.Item1, Is.EqualTo("Hello")); + Assert.That(b2.Item2, Is.EqualTo(123)); + STuple r2 = t2; // implicit + Assert.That(r2.Item1, Is.EqualTo("Hello")); + Assert.That(r2.Item2, Is.EqualTo(123)); + + var t3 = STuple.Create("Hello", 123, false); + var b3 = (Tuple)t3; // explicit + Assert.That(b3, Is.Not.Null); + Assert.That(b3.Item1, Is.EqualTo("Hello")); + Assert.That(b3.Item2, Is.EqualTo(123)); + Assert.That(b3.Item3, Is.False); + STuple r3 = t3; // implicit + Assert.That(r3.Item1, Is.EqualTo("Hello")); + Assert.That(r3.Item2, Is.EqualTo(123)); + Assert.That(r3.Item3, Is.False); + + var t4 = STuple.Create("Hello", 123, false, TimeSpan.FromSeconds(5)); + var b4 = (Tuple)t4; // explicit + Assert.That(b4, Is.Not.Null); + Assert.That(b4.Item1, Is.EqualTo("Hello")); + Assert.That(b4.Item2, Is.EqualTo(123)); + Assert.That(b4.Item3, Is.False); + Assert.That(b4.Item4, Is.EqualTo(TimeSpan.FromSeconds(5))); + STuple r4 = t4; // implicit + Assert.That(r4.Item1, Is.EqualTo("Hello")); + Assert.That(r4.Item2, Is.EqualTo(123)); + Assert.That(r4.Item3, Is.False); + Assert.That(r4.Item4, Is.EqualTo(TimeSpan.FromSeconds(5))); + + var t5 = STuple.Create("Hello", 123, false, TimeSpan.FromSeconds(5), "World"); + var b5 = (Tuple)t5; // explicit + Assert.That(b5, Is.Not.Null); + Assert.That(b5.Item1, Is.EqualTo("Hello")); + Assert.That(b5.Item2, Is.EqualTo(123)); + Assert.That(b5.Item3, Is.False); + Assert.That(b5.Item4, Is.EqualTo(TimeSpan.FromSeconds(5))); + Assert.That(b5.Item5, Is.EqualTo("World")); + STuple r5 = t5; // implicit + Assert.That(r5.Item1, Is.EqualTo("Hello")); + Assert.That(r5.Item2, Is.EqualTo(123)); + Assert.That(r5.Item3, Is.False); + Assert.That(r5.Item4, Is.EqualTo(TimeSpan.FromSeconds(5))); + Assert.That(r5.Item5, Is.EqualTo("World")); + + } + + [Test] + public void Test_Tuple_Stringify() + { + // typed tuples + Assert.That(STuple.Empty.ToString(), Is.EqualTo("()")); + Assert.That(STuple.Create("hello world").ToString(), Is.EqualTo(@"(""hello world"",)")); + Assert.That(STuple.Create(true).ToString(), Is.EqualTo("(true,)")); + Assert.That(STuple.Create(123).ToString(), Is.EqualTo("(123,)")); + Assert.That(STuple.Create(123U).ToString(), Is.EqualTo("(123,)")); + Assert.That(STuple.Create(123L).ToString(), Is.EqualTo("(123,)")); + Assert.That(STuple.Create(123UL).ToString(), Is.EqualTo("(123,)")); + Assert.That(STuple.Create(123.4d).ToString(), Is.EqualTo("(123.4,)")); + Assert.That(STuple.Create(123.4f).ToString(), Is.EqualTo("(123.4,)")); + Assert.That(STuple.Create(Guid.Parse("102cb0aa-2151-4c72-9e9d-61cf2980cbd0")).ToString(), Is.EqualTo("({102cb0aa-2151-4c72-9e9d-61cf2980cbd0},)")); + Assert.That(STuple.Create(Uuid128.Parse("102cb0aa-2151-4c72-9e9d-61cf2980cbd0")).ToString(), Is.EqualTo("({102cb0aa-2151-4c72-9e9d-61cf2980cbd0},)")); + Assert.That(STuple.Create(Uuid64.Parse("102cb0aa-21514c72")).ToString(), Is.EqualTo("({102CB0AA-21514C72},)")); + Assert.That(STuple.Create(new byte[] { 0x02, 0x41, 0x42, 0x43, 0x00 }).ToString(), Is.EqualTo("(`<02>ABC<00>`,)")); + Assert.That(STuple.Create(new byte[] { 0x02, 0x41, 0x42, 0x43, 0x00 }.AsSlice()).ToString(), Is.EqualTo("(`<02>ABC<00>`,)")); + + Assert.That(STuple.Create("Hello", 123, "World", '!', false).ToString(), Is.EqualTo(@"(""Hello"", 123, ""World"", '!', false)")); + } + + #endregion + + #region Splicing... + + private static void VerifyTuple(string message, ITuple t, object[] expected) + { + // count + if (t.Count != expected.Length) + { +#if DEBUG + if (System.Diagnostics.Debugger.IsAttached) System.Diagnostics.Debugger.Break(); +#endif + Assert.Fail("{0}: Count mismatch between observed {1} and expected {2} for tuple of type {3}", message, t, STuple.Formatter.ToString(expected), t.GetType().Name); + } + + // direct access + for (int i = 0; i < expected.Length; i++) + { + Assert.That(ComparisonHelper.AreSimilar(t[i], expected[i]), Is.True, "{0}: t[{1}] != expected[{1}]", message, i); + } + + // iterator + int p = 0; + foreach (var obj in t) + { + if (p >= expected.Length) Assert.Fail("Spliced iterator overshoot at t[{0}] = {1}", p, obj); + Assert.That(ComparisonHelper.AreSimilar(obj, expected[p]), Is.True, "{0}: Iterator[{1}], {2} ~= {3}", message, p, obj, expected[p]); + ++p; + } + Assert.That(p, Is.EqualTo(expected.Length), "{0}: t.GetEnumerator() returned only {1} elements out of {2} exected", message, p, expected.Length); + + // CopyTo + var tmp = new object[expected.Length]; + t.CopyTo(tmp, 0); + for (int i = 0; i < tmp.Length; i++) + { + Assert.That(ComparisonHelper.AreSimilar(tmp[i], expected[i]), Is.True, "{0}: CopyTo[{1}], {2} ~= {3}", message, i, tmp[i], expected[i]); + } + + // Memoize + //tmp = t.Memoize().ToArray(); + //for (int i = 0; i < tmp.Length; i++) + //{ + // Assert.That(ComparisonHelper.AreSimilar(tmp[i], expected[i]), Is.True, "{0}: Memoize.Items[{1}], {2} ~= {3}", message, i, tmp[i], expected[i]); + //} + + // Append + //if (!(t is SlicedTuple)) + { + var u = t.Append("last"); + Assert.That(u.Get(-1), Is.EqualTo("last")); + tmp = u.ToArray(); + for (int i = 0; i < tmp.Length - 1; i++) + { + Assert.That(ComparisonHelper.AreSimilar(tmp[i], expected[i]), Is.True, "{0}: Appended[{1}], {2} ~= {3}", message, i, tmp[i], expected[i]); + } + } + } + + [Test] + public void Test_Can_Splice_ListTuple() + { + var items = new object[] { "hello", "world", 123, "foo", 456, "bar" }; + // 0 1 2 3 4 5 + // -6 -5 -4 -3 -2 -1 + + var tuple = new ListTuple(items); + Assert.That(tuple.Count, Is.EqualTo(6)); + + // get all + VerifyTuple("[:]", tuple[null, null], items); + VerifyTuple("[:]", tuple[null, 6], items); + VerifyTuple("[:]", tuple[0, null], items); + VerifyTuple("[:]", tuple[0, 6], items); + VerifyTuple("[:]", tuple[0, null], items); + VerifyTuple("[:]", tuple[-6, null], items); + VerifyTuple("[:]", tuple[-6, 6], items); + + // tail + VerifyTuple("[n:]", tuple[4, null], new object[] { 456, "bar" }); + VerifyTuple("[n:+]", tuple[4, 6], new object[] { 456, "bar" }); + VerifyTuple("[-n:+]", tuple[-2, 6], new object[] { 456, "bar" }); + VerifyTuple("[-n:-]", tuple[-2, null], new object[] { 456, "bar" }); + + // head + VerifyTuple("[:n]", tuple[null, 3], new object[] { "hello", "world", 123 }); + VerifyTuple("[0:n]", tuple[0, 3], new object[] { "hello", "world", 123 }); + VerifyTuple("[0:-n]", tuple[0, -3], new object[] { "hello", "world", 123 }); + VerifyTuple("[-:n]", tuple[-6, 3], new object[] { "hello", "world", 123 }); + VerifyTuple("[-:-n]", tuple[-6, -3], new object[] { "hello", "world", 123 }); + + // single + VerifyTuple("[0:1]", tuple[0, 1], new object[] { "hello" }); + VerifyTuple("[-6:-5]", tuple[-6, -5], new object[] { "hello" }); + VerifyTuple("[1:2]", tuple[1, 2], new object[] { "world" }); + VerifyTuple("[-5:-4]", tuple[-5, -4], new object[] { "world" }); + VerifyTuple("[5:6]", tuple[5, 6], new object[] { "bar" }); + VerifyTuple("[-1:]", tuple[-1, null], new object[] { "bar" }); + + // chunk + VerifyTuple("[2:4]", tuple[2, 4], new object[] { 123, "foo" }); + VerifyTuple("[2:-2]", tuple[2, -2], new object[] { 123, "foo" }); + VerifyTuple("[-4:4]", tuple[-4, 4], new object[] { 123, "foo" }); + VerifyTuple("[-4:-2]", tuple[-4, -2], new object[] { 123, "foo" }); + + // remove first + VerifyTuple("[1:]", tuple[1, null], new object[] { "world", 123, "foo", 456, "bar" }); + VerifyTuple("[1:+]", tuple[1, 6], new object[] { "world", 123, "foo", 456, "bar" }); + VerifyTuple("[-5:]", tuple[-5, null], new object[] { "world", 123, "foo", 456, "bar" }); + VerifyTuple("[-5:+]", tuple[-5, 6], new object[] { "world", 123, "foo", 456, "bar" }); + + // remove last + VerifyTuple("[:5]", tuple[null, 5], new object[] { "hello", "world", 123, "foo", 456 }); + VerifyTuple("[:-1]", tuple[null, -1], new object[] { "hello", "world", 123, "foo", 456 }); + VerifyTuple("[0:5]", tuple[0, 5], new object[] { "hello", "world", 123, "foo", 456 }); + VerifyTuple("[0:-1]", tuple[0, -1], new object[] { "hello", "world", 123, "foo", 456 }); + + // out of range + VerifyTuple("[2:7]", tuple[2, 7], new object[] { 123, "foo", 456, "bar" }); + VerifyTuple("[2:42]", tuple[2, 42], new object[] { 123, "foo", 456, "bar" }); + VerifyTuple("[2:123456]", tuple[2, 123456], new object[] { 123, "foo", 456, "bar" }); + VerifyTuple("[-7:2]", tuple[-7, 2], new object[] { "hello", "world" }); + VerifyTuple("[-42:2]", tuple[-42, 2], new object[] { "hello", "world" }); + } + + private static object[] GetRange(int fromIncluded, int toExcluded, int count) + { + if (count == 0) return new object[0]; + + if (fromIncluded < 0) fromIncluded += count; + if (toExcluded < 0) toExcluded += count; + + if (toExcluded > count) toExcluded = count; + var tmp = new object[toExcluded - fromIncluded]; + for (int i = 0; i < tmp.Length; i++) tmp[i] = new string((char) (65 + fromIncluded + i), 1); + return tmp; + } + + [Test] + public void Test_Randomized_Splices() + { + // Test a random mix of sizes, and indexes... + + const int N = 100 * 1000; + + var tuples = new ITuple[14]; + tuples[0] = STuple.Empty; + tuples[1] = STuple.Create("A"); + tuples[2] = STuple.Create("A", "B"); + tuples[3] = STuple.Create("A", "B", "C"); + tuples[4] = STuple.Create("A", "B", "C", "D"); + tuples[5] = STuple.Create("A", "B", "C", "D", "E"); + tuples[6] = STuple.Create("A", "B", "C", "D", "E", "F"); + tuples[7] = STuple.Create("A", "B", "C", "D", "E", "F", "G"); + tuples[8] = STuple.Create("A", "B", "C", "D", "E", "F", "G", "H"); + tuples[9] = STuple.Create("A", "B", "C", "D", "E", "F", "G", "H", "I"); + tuples[10]= STuple.Create("A", "B", "C", "D", "E", "F", "G", "H", "I", "J"); + tuples[11] = new JoinedTuple(tuples[6], STuple.Create("G", "H", "I", "J", "K")); + tuples[12] = new LinkedTuple(tuples[11], "L"); + tuples[13] = new LinkedTuple(STuple.Create("A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L"), "M"); + +#if false + LogPartial("Checking tuples"); + + foreach (var tuple in tuples) + { + var t = STuple.Unpack(tuple.ToSlice()); + Assert.That(t.Equals(tuple), Is.True, t.ToString() + " != unpack(" + tuple.ToString() + ")"); + } +#endif + + var rnd = new Random(123456); + + Log($"Generating {N:N0} random tuples:"); + for (int i = 0; i < N; i++) + { + if (i % 1000 == 0) Log($"- {100.0 * i / N:N1} %"); + var len = rnd.Next(tuples.Length); + var tuple = tuples[len]; + if (tuple.Count != len) + { + Assert.That(tuple.Count, Is.EqualTo(len), "Invalid length for tuple {0}", tuple); + } + + string prefix = tuple.ToString(); + + //if (rnd.Next(5) == 0) + //{ // randomly pack/unpack + // tuple = STuple.Unpack(tuple.ToSlice()); + // prefix = "unpacked:" + prefix; + //} + //else if (rnd.Next(5) == 0) + //{ // randomly memoize + // tuple = tuple.Memoize(); + // prefix = "memoized:" + prefix; + //} + + switch (rnd.Next(6)) + { + case 0: + { // [:+rnd] + int x = rnd.Next(len); + VerifyTuple(prefix + "[:" + x.ToString() + "]", tuple[null, x], GetRange(0, x, len)); + break; + } + case 1: + { // [+rnd:] + int x = rnd.Next(len); + VerifyTuple(prefix + "[" + x.ToString() + ":]", tuple[x, null], GetRange(x, int.MaxValue, len)); + break; + } + case 2: + { // [:-rnd] + int x = -1 - rnd.Next(len); + VerifyTuple(prefix + "[:" + x.ToString() + "]", tuple[null, x], GetRange(0, len + x, len)); + break; + } + case 3: + { // [-rnd:] + int x = -1 - rnd.Next(len); + VerifyTuple(prefix + "[" + x.ToString() + ":]", tuple[x, null], GetRange(len + x, int.MaxValue, len)); + break; + } + case 4: + { // [rnd:rnd] + int x = rnd.Next(len); + int y; + do { y = rnd.Next(len); } while (y < x); + VerifyTuple(prefix + " [" + x.ToString() + ":" + y.ToString() + "]", tuple[x, y], GetRange(x, y, len)); + break; + } + case 5: + { // [-rnd:-rnd] + int x = -1 - rnd.Next(len); + int y; + do { y = -1 - rnd.Next(len); } while (y < x); + VerifyTuple(prefix + " [" + x.ToString() + ":" + y.ToString() + "]", tuple[x, y], GetRange(len + x, len + y, len)); + break; + } + } + + } + Log("> success"); + + } + + #endregion + + #region Equality / Comparison + + private static void AssertEquality(ITuple x, ITuple y) + { + Assert.That(x.Equals(y), Is.True, "x.Equals(y)"); + Assert.That(x.Equals((object)y), Is.True, "x.Equals((object)y)"); + Assert.That(y.Equals(x), Is.True, "y.Equals(x)"); + Assert.That(y.Equals((object)x), Is.True, "y.Equals((object)y"); + } + + private static void AssertInequality(ITuple x, ITuple y) + { + Assert.That(x.Equals(y), Is.False, "!x.Equals(y)"); + Assert.That(x.Equals((object)y), Is.False, "!x.Equals((object)y)"); + Assert.That(y.Equals(x), Is.False, "!y.Equals(x)"); + Assert.That(y.Equals((object)x), Is.False, "!y.Equals((object)y"); + } + + [Test] + public void Test_Tuple_Equals() + { + var t1 = STuple.Create(1, 2); + // self equality + AssertEquality(t1, t1); + + var t2 = STuple.Create(1, 2); + // same type equality + AssertEquality(t1, t2); + + var t3 = STuple.Create(new object[] { 1, 2 }); + // other tuple type equality + AssertEquality(t1, t3); + + var t4 = STuple.Create(1).Append(2); + // multi step + AssertEquality(t1, t4); + } + + [Test] + public void Test_Tuple_Similar() + { + var t1 = STuple.Create(1, 2); + var t2 = STuple.Create((long)1, (short)2); + var t3 = STuple.Create("1", "2"); + var t4 = STuple.Create(new object[] { 1, 2L }); + //var t5 = STuple.Unpack(Slice.Unescape("<02>1<00><15><02>")); + + AssertEquality(t1, t1); + AssertEquality(t1, t2); + AssertEquality(t1, t3); + AssertEquality(t1, t4); + //AssertEquality(t1, t5); + AssertEquality(t2, t2); + AssertEquality(t2, t3); + AssertEquality(t2, t4); + //AssertEquality(t2, t5); + AssertEquality(t3, t3); + AssertEquality(t3, t4); + //AssertEquality(t3, t5); + AssertEquality(t4, t4); + //AssertEquality(t4, t5); + //AssertEquality(t5, t5); + } + + [Test] + public void Test_Tuple_Not_Equal() + { + var t1 = STuple.Create(1, 2); + + var x1 = STuple.Create(2, 1); + var x2 = STuple.Create("11", "22"); + var x3 = STuple.Create(1, 2, 3); + //var x4 = STuple.Unpack(Slice.Unescape("<15><01>")); + + AssertInequality(t1, x1); + AssertInequality(t1, x2); + AssertInequality(t1, x3); + //AssertInequality(t1, x4); + + AssertInequality(x1, x2); + AssertInequality(x1, x3); + //AssertInequality(x1, x4); + AssertInequality(x2, x3); + //AssertInequality(x2, x4); + //AssertInequality(x3, x4); + } + + [Test] + public void Test_Tuple_Substring_Equality() + { + ITuple x = STuple.FromArray(new[] {"A", "C"}); + ITuple y = STuple.FromArray(new[] {"A", "B", "C"}); + + Assert.That(x.Substring(0, 1), Is.EqualTo(y.Substring(0, 1))); + Assert.That(x.Substring(1, 1), Is.EqualTo(y.Substring(2, 1))); + + ITuple a = x.Substring(0, 1); + ITuple b = y.Substring(0, 1); + Assert.That(a.Equals(b), Is.True); + Assert.That(a.Equals((object)b), Is.True); + Assert.That(object.Equals(a, b), Is.True); + Assert.That(STuple.Equals(a, b), Is.True); + Assert.That(STuple.Equivalent(a, b), Is.True); + + // this is very unfortunate, but 'a == b' does NOT work because ITuple is an interface, and there is no known way to make it work :( + // ReSharper disable PossibleUnintendedReferenceComparison + // ReSharper disable CannotApplyEqualityOperatorToType + Assert.That(a == b, Is.False, "Tuples A and B, even if they contain the same values, are pointers to two different instances on the heap, and should not ReferenceEquals !"); + // ReSharper restore CannotApplyEqualityOperatorToType + // ReSharper restore PossibleUnintendedReferenceComparison + + // It should work on STuple<..> though (but with a compiler warning) + STuple aa = STuple.Create("A"); + STuple bb = STuple.Create("A"); + // ReSharper disable CannotApplyEqualityOperatorToType + Assert.That(aa == bb, Is.True, "Operator '==' should work on struct tuples."); + // ReSharper restore CannotApplyEqualityOperatorToType + Assert.That(aa.Equals(bb), Is.True, "Equals(..) should work on struct tuples."); + STuple cc = STuple.Create(new string('A', 1)); // make sure we have an "A" string that is not the same pointers as the others + Assert.That(aa.Item1, Is.Not.SameAs(cc.Item1), "Did your compiler optimize the new string('A', 1). If so, need to find another way"); + Assert.That(aa.Equals(cc), Is.True, "Equals(..) should compare the values, not the pointers."); + + + } + + [Test] + public void Test_Tuple_String_AutoCast() + { + // 'a' ~= "A" + AssertEquality(STuple.Create("A"), STuple.Create('A')); + AssertInequality(STuple.Create("A"), STuple.Create('B')); + AssertInequality(STuple.Create("A"), STuple.Create('a')); + + // ASCII ~= Unicode + AssertEquality(STuple.Create("ABC"), STuple.Create(Slice.FromStringAscii("ABC"))); + AssertInequality(STuple.Create("ABC"), STuple.Create(Slice.FromStringAscii("DEF"))); + AssertInequality(STuple.Create("ABC"), STuple.Create(Slice.FromStringAscii("abc"))); + + // 'a' ~= ASCII 'a' + AssertEquality(STuple.Create(Slice.FromStringAscii("A")), STuple.Create('A')); + AssertInequality(STuple.Create(Slice.FromStringAscii("A")), STuple.Create('B')); + AssertInequality(STuple.Create(Slice.FromStringAscii("A")), STuple.Create('a')); + } + + [Test] + public void Test_Tuple_Comparers() + { + { + var cmp = STuple.EqualityComparer.Default; + Assert.That(cmp.Equals(STuple.Create(123), STuple.Create(123)), Is.True, "(123,) == (123,)"); + Assert.That(cmp.Equals(STuple.Create(123), STuple.Create(456)), Is.False, "(123,) != (456,)"); + Assert.That(cmp.GetHashCode(STuple.Create(123)), Is.EqualTo(STuple.Create(123).GetHashCode())); + Assert.That(cmp.GetHashCode(STuple.Create(123)), Is.Not.EqualTo(STuple.Create(456).GetHashCode())); + } + { + var cmp = STuple.EqualityComparer.Default; + Assert.That(cmp.Equals(STuple.Create("foo"), STuple.Create("foo")), Is.True, "('foo',) == ('foo',)"); + Assert.That(cmp.Equals(STuple.Create("foo"), STuple.Create("bar")), Is.False, "('foo',) != ('bar',)"); + Assert.That(cmp.GetHashCode(STuple.Create("foo")), Is.EqualTo(STuple.Create("foo").GetHashCode())); + Assert.That(cmp.GetHashCode(STuple.Create("foo")), Is.Not.EqualTo(STuple.Create("bar").GetHashCode())); + } + + { + var cmp = STuple.EqualityComparer.Default; + Assert.That(cmp.Equals(STuple.Create("foo", 123), STuple.Create("foo", 123)), Is.True, "('foo',123) == ('foo',123)"); + Assert.That(cmp.Equals(STuple.Create("foo", 123), STuple.Create("bar", 123)), Is.False, "('foo',123) != ('bar',123)"); + Assert.That(cmp.Equals(STuple.Create("foo", 123), STuple.Create("foo", 456)), Is.False, "('foo',123) != ('foo',456)"); + Assert.That(cmp.GetHashCode(STuple.Create("foo", 123)), Is.EqualTo(STuple.Create("foo", 123).GetHashCode())); + Assert.That(cmp.GetHashCode(STuple.Create("foo", 123)), Is.Not.EqualTo(STuple.Create("foo", 456).GetHashCode())); + } + + { + var cmp = STuple.EqualityComparer.Default; + Assert.That(cmp.Equals(STuple.Create("foo", true, 123), STuple.Create("foo", true, 123)), Is.True, "('foo',true,123) == ('foo',true,123)"); + Assert.That(cmp.Equals(STuple.Create("foo", true, 123), STuple.Create("bar", true, 123)), Is.False, "('foo',true,123) != ('bar',true,123)"); + Assert.That(cmp.Equals(STuple.Create("foo", true, 123), STuple.Create("foo", false, 123)), Is.False, "('foo',true,123) != ('foo',false,123)"); + Assert.That(cmp.Equals(STuple.Create("foo", true, 123), STuple.Create("foo", true, 456)), Is.False, "('foo',true,123) != ('foo',true,456)"); + Assert.That(cmp.GetHashCode(STuple.Create("foo", true, 123)), Is.EqualTo(STuple.Create("foo", true, 123).GetHashCode())); + Assert.That(cmp.GetHashCode(STuple.Create("foo", true, 123)), Is.Not.EqualTo(STuple.Create("foo", true, 456).GetHashCode())); + } + + { + var cmp = STuple.EqualityComparer.Default; + Assert.That(cmp.Equals(STuple.Create("foo", true, 123, -1L), STuple.Create("foo", true, 123, -1L)), Is.True, "('foo',true,123,-1) == ('foo',true,123,-1)"); + Assert.That(cmp.Equals(STuple.Create("foo", true, 123, -1L), STuple.Create("bar", true, 123, -1L)), Is.False, "('foo',true,123,-1) != ('bar',true,123,-1)"); + Assert.That(cmp.Equals(STuple.Create("foo", true, 123, -1L), STuple.Create("foo", false, 123, -1L)), Is.False, "('foo',true,123,-1) != ('foo',false,123,-1)"); + Assert.That(cmp.Equals(STuple.Create("foo", true, 123, -1L), STuple.Create("foo", true, 456, -1L)), Is.False, "('foo',true,123,-1) != ('foo',true,456,-1)"); + Assert.That(cmp.Equals(STuple.Create("foo", true, 123, -1L), STuple.Create("foo", true, 123, -2L)), Is.False, "('foo',true,123,-1) != ('foo',true,123,-2)"); + Assert.That(cmp.GetHashCode(STuple.Create("foo", true, 123, -1L)), Is.EqualTo(STuple.Create("foo", true, 123, -1L).GetHashCode())); + Assert.That(cmp.GetHashCode(STuple.Create("foo", true, 123, -1L)), Is.Not.EqualTo(STuple.Create("foo", true, 456, 123L).GetHashCode())); + } + + { + var cmp = STuple.EqualityComparer.Default; + Assert.That(cmp.Equals(STuple.Create("foo", true, 123, -1L, "narf"), STuple.Create("foo", true, 123, -1L, "narf")), Is.True, "('foo',true,123,-1) == ('foo',true,123,-1,'narf')"); + Assert.That(cmp.Equals(STuple.Create("foo", true, 123, -1L, "narf"), STuple.Create("bar", true, 123, -1L, "narf")), Is.False, "('foo',true,123,-1) != ('bar',true,123,-1,'narf')"); + Assert.That(cmp.Equals(STuple.Create("foo", true, 123, -1L, "narf"), STuple.Create("foo", false, 123, -1L, "narf")), Is.False, "('foo',true,123,-1) != ('foo',false,123,-1,'narf')"); + Assert.That(cmp.Equals(STuple.Create("foo", true, 123, -1L, "narf"), STuple.Create("foo", true, 456, -1L, "narf")), Is.False, "('foo',true,123,-1) != ('foo',true,456,-1,'narf')"); + Assert.That(cmp.Equals(STuple.Create("foo", true, 123, -1L, "narf"), STuple.Create("foo", true, 123, -2L, "narf")), Is.False, "('foo',true,123,-1) != ('foo',true,123,-2,'narf')"); + Assert.That(cmp.Equals(STuple.Create("foo", true, 123, -1L, "narf"), STuple.Create("foo", true, 123, -1L, "zort")), Is.False, "('foo',true,123,-1) != ('foo',true,123,-1,'zort')"); + Assert.That(cmp.GetHashCode(STuple.Create("foo", true, 123, -1L, "narf")), Is.EqualTo(STuple.Create("foo", true, 123, -1L, "narf").GetHashCode())); + Assert.That(cmp.GetHashCode(STuple.Create("foo", true, 123, -1L, "narf")), Is.Not.EqualTo(STuple.Create("foo", true, 456, -1L, "narf").GetHashCode())); + } + + { + var cmp = STuple.EqualityComparer.Default; + Assert.That(cmp.Equals(STuple.Create("foo", true, 123, -1L, "narf", Math.PI), STuple.Create("foo", true, 123, -1L, "narf", Math.PI)), Is.True, "('foo',true,123,-1) == ('foo',true,123,-1,'narf',PI)"); + Assert.That(cmp.Equals(STuple.Create("foo", true, 123, -1L, "narf", Math.PI), STuple.Create("bar", true, 123, -1L, "narf", Math.PI)), Is.False, "('foo',true,123,-1) != ('bar',true,123,-1,'narf',PI)"); + Assert.That(cmp.Equals(STuple.Create("foo", true, 123, -1L, "narf", Math.PI), STuple.Create("foo", false, 123, -1L, "narf", Math.PI)), Is.False, "('foo',true,123,-1) != ('foo',false,123,-1,'narf',PI)"); + Assert.That(cmp.Equals(STuple.Create("foo", true, 123, -1L, "narf", Math.PI), STuple.Create("foo", true, 456, -1L, "narf", Math.PI)), Is.False, "('foo',true,123,-1) != ('foo',true,456,-1,'narf',PI)"); + Assert.That(cmp.Equals(STuple.Create("foo", true, 123, -1L, "narf", Math.PI), STuple.Create("foo", true, 123, -2L, "narf", Math.PI)), Is.False, "('foo',true,123,-1) != ('foo',true,123,-2,'narf',PI)"); + Assert.That(cmp.Equals(STuple.Create("foo", true, 123, -1L, "narf", Math.PI), STuple.Create("foo", true, 123, -1L, "zort", Math.PI)), Is.False, "('foo',true,123,-1) != ('foo',true,123,-1,'zort',PI)"); + Assert.That(cmp.Equals(STuple.Create("foo", true, 123, -1L, "narf", Math.PI), STuple.Create("foo", true, 123, -1L, "narf", Math.E)), Is.False, "('foo',true,123,-1) != ('foo',true,123,-1,'narf',E)"); + Assert.That(cmp.GetHashCode(STuple.Create("foo", true, 123, -1L, "narf", Math.PI)), Is.EqualTo(STuple.Create("foo", true, 123, -1L, "narf", Math.PI).GetHashCode())); + Assert.That(cmp.GetHashCode(STuple.Create("foo", true, 123, -1L, "narf", Math.PI)), Is.Not.EqualTo(STuple.Create("foo", true, 123, -1L, "narf", Math.E).GetHashCode())); + } + + } + + #endregion + + #region Formatters + + [Test] + public void Test_Default_TupleFormatter_For_Common_Types() + { + + // common simple types + Assert.That(TupleFormatter.Default, Is.InstanceOf>()); + Assert.That(TupleFormatter.Default, Is.InstanceOf>()); + Assert.That(TupleFormatter.Default, Is.InstanceOf>()); + + // corner cases + Assert.That(TupleFormatter.Default, Is.InstanceOf>()); + //Assert.That(TupleFormatter.Default, Is.InstanceOf>()); + + // ITupleFormattable types + Assert.That(TupleFormatter.Default, Is.InstanceOf>()); + } + + [Test] + public void Test_Format_Common_Types() + { + Assert.That(TupleFormatter.Default.ToTuple(123), Is.EqualTo(STuple.Create(123))); + Assert.That(TupleFormatter.Default.FromTuple(STuple.Create(123)), Is.EqualTo(123)); + + Assert.That(TupleFormatter.Default.ToTuple(true), Is.EqualTo(STuple.Create(true))); + Assert.That(TupleFormatter.Default.FromTuple(STuple.Create(true)), Is.True); + + Assert.That(TupleFormatter.Default.ToTuple("hello"), Is.EqualTo(STuple.Create("hello"))); + Assert.That(TupleFormatter.Default.FromTuple(STuple.Create("hello")), Is.EqualTo("hello")); + + var t = STuple.Create(new object[] { "hello", 123, false }); + Assert.That(TupleFormatter.Default.ToTuple(t), Is.SameAs(t)); + Assert.That(TupleFormatter.Default.FromTuple(t), Is.SameAs(t)); + + var thing = new Thing { Foo = 123, Bar = "hello" }; + Assert.That(TupleFormatter.Default.ToTuple(thing), Is.EqualTo(STuple.Create(123, "hello"))); + + var thing2 = TupleFormatter.Default.FromTuple(STuple.Create(456, "world")); + Assert.That(thing2, Is.Not.Null); + Assert.That(thing2.Foo, Is.EqualTo(456)); + Assert.That(thing2.Bar, Is.EqualTo("world")); + + } + + [Test] + public void Test_Create_Appender_Formatter() + { + // create an appender formatter that will always add the values after the same prefix + + var fmtr = TupleFormatter.CreateAppender(STuple.Create("hello", "world")); + Assert.That(fmtr, Is.InstanceOf>()); + + Assert.That(fmtr.ToTuple(123), Is.EqualTo(STuple.Create("hello", "world", 123))); + Assert.That(fmtr.ToTuple(456), Is.EqualTo(STuple.Create("hello", "world", 456))); + Assert.That(fmtr.ToTuple(-1), Is.EqualTo(STuple.Create("hello", "world", -1))); + + Assert.That(fmtr.FromTuple(STuple.Create("hello", "world", 42)), Is.EqualTo(42)); + Assert.That(fmtr.FromTuple(STuple.Create("hello", "world", -1)), Is.EqualTo(-1)); + + Assert.That(() => fmtr.FromTuple(null), Throws.ArgumentNullException); + Assert.That(() => fmtr.FromTuple(STuple.Empty), Throws.InstanceOf()); + Assert.That(() => fmtr.FromTuple(STuple.Create("hello", "world", 42, 77)), Throws.InstanceOf(), "Too many values"); + Assert.That(() => fmtr.FromTuple(STuple.Create("hello_world", 42)), Throws.InstanceOf(), "not enough values"); + Assert.That(() => fmtr.FromTuple(STuple.Create("world", "hello", "42")), Throws.InstanceOf(), "incorrect type"); + Assert.That(() => fmtr.FromTuple(STuple.Create(42)), Throws.InstanceOf(), "missing prefix"); + Assert.That(() => fmtr.FromTuple(STuple.Create("extra", "hello", "world", 42)), Throws.InstanceOf(), "prefix must match exactly"); + Assert.That(() => fmtr.FromTuple(STuple.Create("Hello", "World", 42)), Throws.InstanceOf(), "case sensitive"); + } + + #endregion + + #region Deformatters + + [Test] + public void Test_Can_Deformat_Simple_Tuples() + { + + void Check(string expr, ITuple expected) + { + Log("> " + expr); + var actual = STuple.Deformatter.Parse(expr); + if (!expected.Equals(actual)) + { + Log("- EXPECTED: " + expected); + Log("- ACTUAL : " + actual); + Log("- " + TuPack.Pack(actual)); + Log("- " + TuPack.Pack(expected)); + Assert.That(actual, Is.EqualTo(expected), expr); + } + } + + Check("()", STuple.Empty); + Check("(true)", STuple.Create(true)); + Check("(false)", STuple.Create(false)); + Check("(123)", STuple.Create(123)); + Check("(-42)", STuple.Create(-42)); + Check("(123.4)", STuple.Create(123.4d)); + Check("(1E10)", STuple.Create(1E10)); + Check("('x')", STuple.Create('x')); + Check("(\"Hello World\")", STuple.Create("Hello World")); + Check("(\"Foo\\\"Bar\\tBaz\")", STuple.Create("Foo\"Bar\tBaz")); + Check("({4626466c-fdac-4230-af3a-4029fab668ab})", STuple.Create(Guid.Parse("4626466c-fdac-4230-af3a-4029fab668ab"))); + + Check("(\"Hello\",123,false)", STuple.Create("Hello", 123, false)); + Check("('M',123456789,{4626466c-fdac-4230-af3a-4029fab668ab})", STuple.Create('M', 123456789, Guid.Parse("4626466c-fdac-4230-af3a-4029fab668ab"))); + Check("(123, true , \"Hello\")", STuple.Create(123, true, "Hello")); + + Check("(\"Hello\",(123,true),\"World!\")", STuple.Create("Hello", STuple.Create(123, true), "World!")); + Check("(9223372036854775807,)", STuple.Create(long.MaxValue)); + Check("(-9223372036854775808,)", STuple.Create(long.MinValue)); + Check("(18446744073709551615,)", STuple.Create(ulong.MaxValue)); + Check("(3.1415926535897931, 2.7182818284590451)", STuple.Create(Math.PI, Math.E)); + Check("(123E45,-123E-45)", STuple.Create(123E45, -123E-45)); + } + + #endregion + + #region Bench.... + +#if false + + [Test] + public void Bench_Tuple_Unpack_Random() + { + const int N = 100 * 1000; + + Slice FUNKY_ASCII = Slice.FromAscii("bonjour\x00le\x00\xFFmonde"); + string FUNKY_STRING = "hello\x00world"; + string UNICODE_STRING = "héllø 世界"; + + LogPartial("Creating {0:N0} random tuples", N); + var tuples = new List(N); + var rnd = new Random(777); + var guids = Enumerable.Range(0, 10).Select(_ => Guid.NewGuid()).ToArray(); + var uuid128s = Enumerable.Range(0, 10).Select(_ => Uuid128.NewUuid()).ToArray(); + var uuid64s = Enumerable.Range(0, 10).Select(_ => Uuid64.NewUuid()).ToArray(); + var fuzz = new byte[1024 + 1000]; rnd.NextBytes(fuzz); + var sw = Stopwatch.StartNew(); + for (int i = 0; i < N; i++) + { + ITuple tuple = STuple.Empty; + int s = 1 + (int)Math.Sqrt(rnd.Next(128)); + if (i % (N / 100) == 0) LogPartial('.'); + for (int j = 0; j < s; j++) + { + switch (rnd.Next(17)) + { + case 0: tuple = tuple.Append(rnd.Next(255)); break; + case 1: tuple = tuple.Append(-1 - rnd.Next(255)); break; + case 2: tuple = tuple.Append(256 + rnd.Next(65536 - 256)); break; + case 3: tuple = tuple.Append(rnd.Next(int.MaxValue)); break; + case 4: tuple = tuple.Append((rnd.Next(int.MaxValue) << 32) | rnd.Next(int.MaxValue)); break; + case 5: tuple = tuple.Append(new string('A', 1 + rnd.Next(16))); break; + case 6: tuple = tuple.Append(new string('B', 8 + (int)Math.Sqrt(rnd.Next(1024)))); break; + case 7: tuple = tuple.Append(UNICODE_STRING); break; + case 8: tuple = tuple.Append(FUNKY_STRING); break; + case 9: tuple = tuple.Append(FUNKY_ASCII); break; + case 10: tuple = tuple.Append(guids[rnd.Next(10)]); break; + case 11: tuple = tuple.Append(uuid128s[rnd.Next(10)]); break; + case 12: tuple = tuple.Append(uuid64s[rnd.Next(10)]); break; + case 13: tuple = tuple.Append(Slice.Create(fuzz, rnd.Next(1000), 1 + (int)Math.Sqrt(rnd.Next(1024)))); break; + case 14: tuple = tuple.Append(default(string)); break; + case 15: tuple = tuple.Append("hello"); break; + case 16: tuple = tuple.Append(rnd.Next(2) == 0); break; + } + } + tuples.Add(tuple); + } + sw.Stop(); + Log(" done in {0:N3} sec", sw.Elapsed.TotalSeconds); + Log(" > {0:N0} items", tuples.Sum(x => x.Count)); + Log(" > {0}", tuples[42]); + Log(); + + LogPartial("Packing tuples..."); + sw.Restart(); + var slices = STuple.Pack(tuples); + sw.Stop(); + Log(" done in {0:N3} sec", sw.Elapsed.TotalSeconds); + Log(" > {0:N0} tps", N / sw.Elapsed.TotalSeconds); + Log(" > {0:N0} bytes", slices.Sum(x => x.Count)); + Log(" > {0}", slices[42]); + Log(); + + LogPartial("Unpacking tuples..."); + sw.Restart(); + var unpacked = slices.Select(slice => STuple.Unpack(slice)).ToList(); + sw.Stop(); + Log(" done in {0:N3} sec", sw.Elapsed.TotalSeconds); + Log(" > {0:N0} tps", N / sw.Elapsed.TotalSeconds); + Log(" > {0}", unpacked[42]); + Log(); + + LogPartial("Comparing ..."); + sw.Restart(); + tuples.Zip(unpacked, (x, y) => x.Equals(y)).All(b => b); + sw.Stop(); + Log(" done in {0:N3} sec", sw.Elapsed.TotalSeconds); + Log(); + + LogPartial("Tuples.ToString ..."); + sw.Restart(); + var strings = tuples.Select(x => x.ToString()).ToList(); + sw.Stop(); + Log(" done in {0:N3} sec", sw.Elapsed.TotalSeconds); + Log(" > {0:N0} chars", strings.Sum(x => x.Length)); + Log(" > {0}", strings[42]); + Log(); + + LogPartial("Unpacked.ToString ..."); + sw.Restart(); + strings = unpacked.Select(x => x.ToString()).ToList(); + sw.Stop(); + Log(" done in {0:N3} sec", sw.Elapsed.TotalSeconds); + Log(" > {0:N0} chars", strings.Sum(x => x.Length)); + Log(" > {0}", strings[42]); + Log(); + + LogPartial("Memoizing ..."); + sw.Restart(); + var memoized = tuples.Select(x => x.Memoize()).ToList(); + sw.Stop(); + Log(" done in {0:N3} sec", sw.Elapsed.TotalSeconds); + } + +#endif + + #endregion + + private class Thing : ITupleFormattable + { + + public int Foo { get; set; } + public string Bar { get; set; } + + ITuple ITupleFormattable.ToTuple() + { + return STuple.Create(this.Foo, this.Bar); + } + + void ITupleFormattable.FromTuple(ITuple tuple) + { + this.Foo = tuple.Get(0); + this.Bar = tuple.Get(1); + } + } + + #region System.ValueTuple integration... + + [Test] + public void Test_Implicit_Cast_STuple_To_ValueTuple() + { + { + ValueTuple t = STuple.Create(11); + Assert.That(t.Item1, Is.EqualTo(11)); + } + { + (int, int) t = STuple.Create(11, 22); + Assert.That(t.Item1, Is.EqualTo(11)); + Assert.That(t.Item2, Is.EqualTo(22)); + } + { + (int, int, int) t = STuple.Create(11, 22, 33); + Assert.That(t.Item1, Is.EqualTo(11)); + Assert.That(t.Item2, Is.EqualTo(22)); + Assert.That(t.Item3, Is.EqualTo(33)); + } + { + (int, int, int, int) t = STuple.Create(11, 22, 33, 44); + Assert.That(t.Item1, Is.EqualTo(11)); + Assert.That(t.Item2, Is.EqualTo(22)); + Assert.That(t.Item3, Is.EqualTo(33)); + Assert.That(t.Item4, Is.EqualTo(44)); + } + { + (int, int, int, int, int) t = STuple.Create(11, 22, 33, 44, 55); + Assert.That(t.Item1, Is.EqualTo(11)); + Assert.That(t.Item2, Is.EqualTo(22)); + Assert.That(t.Item3, Is.EqualTo(33)); + Assert.That(t.Item4, Is.EqualTo(44)); + Assert.That(t.Item5, Is.EqualTo(55)); + } + { + (int, int, int, int, int, int) t = STuple.Create(11, 22, 33, 44, 55, 66); + Assert.That(t.Item1, Is.EqualTo(11)); + Assert.That(t.Item2, Is.EqualTo(22)); + Assert.That(t.Item3, Is.EqualTo(33)); + Assert.That(t.Item4, Is.EqualTo(44)); + Assert.That(t.Item5, Is.EqualTo(55)); + Assert.That(t.Item6, Is.EqualTo(66)); + } + } + + [Test] + public void Test_Implicit_Cast_ValueTuple_To_STuple() + { + { + STuple t = ValueTuple.Create(11); + Assert.That(t.Item1, Is.EqualTo(11)); + } + { + STuple t = (11, 22); + Assert.That(t.Item1, Is.EqualTo(11)); + Assert.That(t.Item2, Is.EqualTo(22)); + } + { + STuple t = (11, 22, 33); + Assert.That(t.Item1, Is.EqualTo(11)); + Assert.That(t.Item2, Is.EqualTo(22)); + Assert.That(t.Item3, Is.EqualTo(33)); + } + { + STuple t = (11, 22, 33, 44); + Assert.That(t.Item1, Is.EqualTo(11)); + Assert.That(t.Item2, Is.EqualTo(22)); + Assert.That(t.Item3, Is.EqualTo(33)); + Assert.That(t.Item4, Is.EqualTo(44)); + } + { + STuple t = (11, 22, 33, 44, 55); + Assert.That(t.Item1, Is.EqualTo(11)); + Assert.That(t.Item2, Is.EqualTo(22)); + Assert.That(t.Item3, Is.EqualTo(33)); + Assert.That(t.Item4, Is.EqualTo(44)); + Assert.That(t.Item5, Is.EqualTo(55)); + } + { + STuple t = (11, 22, 33, 44, 55, 66); + Assert.That(t.Item1, Is.EqualTo(11)); + Assert.That(t.Item2, Is.EqualTo(22)); + Assert.That(t.Item3, Is.EqualTo(33)); + Assert.That(t.Item4, Is.EqualTo(44)); + Assert.That(t.Item5, Is.EqualTo(55)); + Assert.That(t.Item6, Is.EqualTo(66)); + } + } + + private static (int, int) ProduceValueTuple(int item1, int item2) => (item1, item2); + + private static int[] ConsumeValueTuple(STuple t) => new[] { t.Item1, t.Item2 }; + + private static STuple ProduceSTuple(int item1, int item2) => STuple.Create(item1, item2); + + private static int[] ConsumeSTuple(STuple t) => new[] { t.Item1, t.Item2 }; + + [Test] + public void Test_Can_AutoCast_Transparently() + { + + { // (int, int) => STuple + var res = ConsumeSTuple(ProduceValueTuple(1234, 5)); + Assert.That(res[0], Is.EqualTo(1234)); + Assert.That(res[1], Is.EqualTo(5)); + } + { // literal => STuple + var res = ConsumeSTuple((1234, 5)); + Assert.That(res[0], Is.EqualTo(1234)); + Assert.That(res[1], Is.EqualTo(5)); + } + { // STuple => (int, int) + var res = ConsumeValueTuple(ProduceSTuple(1234, 5)); + Assert.That(res[0], Is.EqualTo(1234)); + Assert.That(res[1], Is.EqualTo(5)); + } + } + + [Test] + public void Test_Deconstruct_STuple() + { + { + STuple.Create(11, 22).Deconstruct(out int a, out int b); + Assert.That(a, Is.EqualTo(11)); + Assert.That(b, Is.EqualTo(22)); + } + { + STuple.Create(11, 22, 33).Deconstruct(out int a, out int b, out int c); + Assert.That(a, Is.EqualTo(11)); + Assert.That(b, Is.EqualTo(22)); + Assert.That(c, Is.EqualTo(33)); + } + { + STuple.Create(11, 22, 33, 44).Deconstruct(out int a, out int b, out int c, out int d); + Assert.That(a, Is.EqualTo(11)); + Assert.That(b, Is.EqualTo(22)); + Assert.That(c, Is.EqualTo(33)); + Assert.That(d, Is.EqualTo(44)); + } + { + STuple.Create(11, 22, 33, 44, 55).Deconstruct(out int a, out int b, out int c, out int d, out int e); + Assert.That(a, Is.EqualTo(11)); + Assert.That(b, Is.EqualTo(22)); + Assert.That(c, Is.EqualTo(33)); + Assert.That(d, Is.EqualTo(44)); + Assert.That(e, Is.EqualTo(55)); + } + { + STuple.Create(11, 22, 33, 44, 55, 66).Deconstruct(out int a, out int b, out int c, out int d, out int e, out int f); + Assert.That(a, Is.EqualTo(11)); + Assert.That(b, Is.EqualTo(22)); + Assert.That(c, Is.EqualTo(33)); + Assert.That(d, Is.EqualTo(44)); + Assert.That(e, Is.EqualTo(55)); + Assert.That(f, Is.EqualTo(66)); + } + } + + [Test] + public void Test_Deconstruct_STuple_TupleSyntax() + { + { + (var a, var b) = STuple.Create(11, 22); + Assert.That(a, Is.EqualTo(11)); + Assert.That(b, Is.EqualTo(22)); + } + { + (var a, var b, var c) = STuple.Create(11, 22, 33); + Assert.That(a, Is.EqualTo(11)); + Assert.That(b, Is.EqualTo(22)); + Assert.That(c, Is.EqualTo(33)); + } + { + (var a, var b, var c, var d) = STuple.Create(11, 22, 33, 44); + Assert.That(a, Is.EqualTo(11)); + Assert.That(b, Is.EqualTo(22)); + Assert.That(c, Is.EqualTo(33)); + Assert.That(d, Is.EqualTo(44)); + } + { + (var a, var b, var c, var d, var e) = STuple.Create(11, 22, 33, 44, 55); + Assert.That(a, Is.EqualTo(11)); + Assert.That(b, Is.EqualTo(22)); + Assert.That(c, Is.EqualTo(33)); + Assert.That(d, Is.EqualTo(44)); + Assert.That(e, Is.EqualTo(55)); + } + { + (var a, var b, var c, var d, var e, var f) = STuple.Create(11, 22, 33, 44, 55, 66); + Assert.That(a, Is.EqualTo(11)); + Assert.That(b, Is.EqualTo(22)); + Assert.That(c, Is.EqualTo(33)); + Assert.That(d, Is.EqualTo(44)); + Assert.That(e, Is.EqualTo(55)); + Assert.That(f, Is.EqualTo(66)); + } + } + + #endregion + + } +} diff --git a/FoundationDB.Tests/Utils/TypeConvertersFacts.cs b/FoundationDB.Tests/Utils/TypeConvertersFacts.cs new file mode 100644 index 000000000..21b1f6ef9 --- /dev/null +++ b/FoundationDB.Tests/Utils/TypeConvertersFacts.cs @@ -0,0 +1,176 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace FoundationDB.Client.Converters.Tests +{ + using System; + using Doxense.Runtime.Converters; + using FoundationDB.Client.Tests; + using NUnit.Framework; + + [TestFixture] + public class TypeConvertersFacts : FdbTest + { + + [Test] + public void Test_Can_Convert_Numbers_To_Bool() + { + Assert.That(TypeConverters.Convert(0), Is.False); + Assert.That(TypeConverters.Convert(0), Is.False); + Assert.That(TypeConverters.Convert(0), Is.False); + Assert.That(TypeConverters.Convert(0), Is.False); + Assert.That(TypeConverters.Convert(0), Is.False); + Assert.That(TypeConverters.Convert(0), Is.False); + Assert.That(TypeConverters.Convert(0), Is.False); + Assert.That(TypeConverters.Convert(0), Is.False); + Assert.That(TypeConverters.Convert(0.0f), Is.False); + Assert.That(TypeConverters.Convert(float.NaN), Is.False); + Assert.That(TypeConverters.Convert(0.0d), Is.False); + Assert.That(TypeConverters.Convert(double.NaN), Is.False); + + Assert.That(TypeConverters.Convert(123), Is.True); + Assert.That(TypeConverters.Convert(123), Is.True); + Assert.That(TypeConverters.Convert(123), Is.True); + Assert.That(TypeConverters.Convert(123), Is.True); + Assert.That(TypeConverters.Convert(123), Is.True); + Assert.That(TypeConverters.Convert(123), Is.True); + Assert.That(TypeConverters.Convert(123), Is.True); + Assert.That(TypeConverters.Convert(123), Is.True); + Assert.That(TypeConverters.Convert(123.0f), Is.True); + Assert.That(TypeConverters.Convert(123.0d), Is.True); + } + + [Test] + public void Test_Can_Convert_Numbers_To_Int32() + { + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123)); + Assert.That(TypeConverters.Convert(123.0f), Is.EqualTo(123)); + Assert.That(TypeConverters.Convert(123.0d), Is.EqualTo(123)); + } + + [Test] + public void Test_Can_Convert_Numbers_To_UInt32() + { + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123U)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123U)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123U)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123U)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123U)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123U)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123U)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123U)); + Assert.That(TypeConverters.Convert(123.0f), Is.EqualTo(123U)); + Assert.That(TypeConverters.Convert(123.0d), Is.EqualTo(123U)); + } + + [Test] + public void Test_Can_Convert_Numbers_To_Int64() + { + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123L)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123L)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123L)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123L)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123L)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123L)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123L)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123L)); + Assert.That(TypeConverters.Convert(123.0f), Is.EqualTo(123L)); + Assert.That(TypeConverters.Convert(123.0d), Is.EqualTo(123L)); + } + + [Test] + public void Test_Can_Convert_Numbers_To_UInt64() + { + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123UL)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123UL)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123UL)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123UL)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123UL)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123UL)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123UL)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123UL)); + Assert.That(TypeConverters.Convert(123.0f), Is.EqualTo(123UL)); + Assert.That(TypeConverters.Convert(123.0d), Is.EqualTo(123UL)); + } + + [Test] + public void Test_Can_Convert_Numbers_To_Single() + { + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123f)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123f)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123f)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123f)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123f)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123f)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123f)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123f)); + Assert.That(TypeConverters.Convert(123.0f), Is.EqualTo(123f)); + Assert.That(TypeConverters.Convert(123.0d), Is.EqualTo(123f)); + } + + [Test] + public void Test_Can_Convert_Numbers_To_Double() + { + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123d)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123d)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123d)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123d)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123d)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123d)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123d)); + Assert.That(TypeConverters.Convert(123), Is.EqualTo(123d)); + Assert.That(TypeConverters.Convert(123.0f), Is.EqualTo(123d)); + Assert.That(TypeConverters.Convert(123.0d), Is.EqualTo(123d)); + } + + [Test] + public void Test_Can_Convert_Numbers_To_String() + { + Assert.That(TypeConverters.Convert(123), Is.EqualTo("123")); + Assert.That(TypeConverters.Convert(123), Is.EqualTo("123")); + Assert.That(TypeConverters.Convert(123), Is.EqualTo("123")); + Assert.That(TypeConverters.Convert(123), Is.EqualTo("123")); + Assert.That(TypeConverters.Convert(123), Is.EqualTo("123")); + Assert.That(TypeConverters.Convert(123), Is.EqualTo("123")); + Assert.That(TypeConverters.Convert(123), Is.EqualTo("123")); + Assert.That(TypeConverters.Convert(123), Is.EqualTo("123")); + Assert.That(TypeConverters.Convert(123.0f), Is.EqualTo("123")); + Assert.That(TypeConverters.Convert(123.4f), Is.EqualTo("123.4")); + Assert.That(TypeConverters.Convert(123.0d), Is.EqualTo("123")); + Assert.That(TypeConverters.Convert(123.4d), Is.EqualTo("123.4")); + } + + } +} diff --git a/FoundationDB.Tests/Uuid128Facts.cs b/FoundationDB.Tests/Utils/Uuid128Facts.cs similarity index 84% rename from FoundationDB.Tests/Uuid128Facts.cs rename to FoundationDB.Tests/Utils/Uuid128Facts.cs index 253b760c7..252b594d1 100644 --- a/FoundationDB.Tests/Uuid128Facts.cs +++ b/FoundationDB.Tests/Utils/Uuid128Facts.cs @@ -1,5 +1,5 @@ #region BSD Licence -/* Copyright (c) 2013, Doxense SARL +/* Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without @@ -28,14 +28,13 @@ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY namespace FoundationDB.Client.Tests { - using FoundationDB.Client; - using NUnit.Framework; using System; using System.Collections.Generic; using System.Linq; + using NUnit.Framework; [TestFixture] - public class UuidFacts + public class Uuid128Facts : FdbTest { [Test] public void Test_Uuid_Empty() @@ -126,6 +125,46 @@ public void Test_Uuid_NewUuid() Assert.That(uuid.ToGuid().ToString(), Is.EqualTo(uuid.ToString())); } + [Test] + public void Test_Uuid_Increment() + { + var @base = Uuid128.Parse("6be5d394-03a6-42ab-aac2-89b7d9312402"); + Log(@base); + //DumpHexa(@base.ToByteArray()); + + { // +1 + var uuid = @base.Increment(1); + Log(uuid); + //DumpHexa(uuid.ToByteArray()); + Assert.That(uuid.ToString(), Is.EqualTo("6be5d394-03a6-42ab-aac2-89b7d9312403")); + } + { // +256 + var uuid = @base.Increment(256); + Log(uuid); + //DumpHexa(uuid.ToByteArray()); + Assert.That(uuid.ToString(), Is.EqualTo("6be5d394-03a6-42ab-aac2-89b7d9312502")); + } + { // almost overflow (low) + var uuid = @base.Increment(0x553D764826CEDBFDUL); // delta nécessaire pour avoir 0xFFFFFFFFFFFFFFFF a la fin + Log(uuid); + //DumpHexa(uuid.ToByteArray()); + Assert.That(uuid.ToString(), Is.EqualTo("6be5d394-03a6-42ab-ffff-ffffffffffff")); + } + { // overflow (low) + var uuid = @base.Increment(0x553D764826CEDBFEUL); // encore 1 de plus pour trigger l'overflow + Log(uuid); + //DumpHexa(uuid.ToByteArray()); + Assert.That(uuid.ToString(), Is.EqualTo("6be5d394-03a6-42ac-0000-000000000000")); + } + { // overflow (cascade) + var uuid = Uuid128.Parse("ffffffff-ffff-ffff-ffff-ffffffffffff").Increment(1); + Log(uuid); + //DumpHexa(uuid.ToByteArray()); + Assert.That(uuid.ToString(), Is.EqualTo("00000000-0000-0000-0000-000000000000")); + } + + } + [Test] public void Test_Uuid_ToSlice() { @@ -134,7 +173,7 @@ public void Test_Uuid_ToSlice() Assert.That(uuid.ToSlice().Offset, Is.GreaterThanOrEqualTo(0)); Assert.That(uuid.ToSlice().Array, Is.Not.Null); Assert.That(uuid.ToSlice().Array.Length, Is.GreaterThanOrEqualTo(16)); - Assert.That(uuid.ToSlice(), Is.EqualTo(Slice.Create(uuid.ToByteArray()))); + Assert.That(uuid.ToSlice(), Is.EqualTo(uuid.ToByteArray().AsSlice())); Assert.That(uuid.ToSlice().GetBytes(), Is.EqualTo(uuid.ToByteArray())); } diff --git a/FoundationDB.Tests/Utils/Uuid64Facts.cs b/FoundationDB.Tests/Utils/Uuid64Facts.cs new file mode 100644 index 000000000..74244cd96 --- /dev/null +++ b/FoundationDB.Tests/Utils/Uuid64Facts.cs @@ -0,0 +1,690 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +//#define ENABLE_SPAN + +// ReSharper disable AssignNullToNotNullAttribute +namespace FoundationDB.Client.Tests +{ + using System; + using System.Collections.Generic; + using System.Linq; + using Doxense.Memory; + using NUnit.Framework; + + [TestFixture] + public class Uuid64Facts : FdbTest + { + [Test] + public void Test_Uuid64_Empty() + { + Assert.That(Uuid64.Empty.ToString(), Is.EqualTo("00000000-00000000")); + Assert.That(Uuid64.Empty, Is.EqualTo(default(Uuid64))); + Assert.That(Uuid64.Empty, Is.EqualTo(new Uuid64(0L))); + Assert.That(Uuid64.Empty, Is.EqualTo(new Uuid64(0UL))); + Assert.That(Uuid64.Empty, Is.EqualTo(Uuid64.Read(new byte[8]))); + } + + [Test] + public void Test_Uuid64_Casting() + { + // implicit + Uuid64 a = (long)0; + Uuid64 b = (long)42; + Uuid64 c = (long)0xDEADBEEF; + Uuid64 d = 0xBADC0FFEE0DDF00DUL; + Uuid64 e = ulong.MaxValue; + + // ToUInt64 + Assert.That(a.ToUInt64(), Is.EqualTo(0UL)); + Assert.That(b.ToUInt64(), Is.EqualTo(42UL)); + Assert.That(c.ToUInt64(), Is.EqualTo(3735928559UL)); + Assert.That(d.ToUInt64(), Is.EqualTo(13464654573299691533UL)); + Assert.That(e.ToUInt64(), Is.EqualTo(ulong.MaxValue)); + + // ToInt64 + Assert.That(a.ToInt64(), Is.EqualTo(0L)); + Assert.That(b.ToInt64(), Is.EqualTo(42L)); + Assert.That(c.ToInt64(), Is.EqualTo(3735928559L)); + Assert.That(d.ToInt64(), Is.EqualTo(-4982089500409860083L)); + Assert.That(e.ToInt64(), Is.EqualTo(-1L)); + + // explict + Assert.That((long)a, Is.EqualTo(0)); + Assert.That((long)b, Is.EqualTo(42)); + Assert.That((long)c, Is.EqualTo(0xDEADBEEF)); + Assert.That((ulong)d, Is.EqualTo(13464654573299691533UL)); + Assert.That((ulong)e, Is.EqualTo(ulong.MaxValue)); + Assert.That((long)e, Is.EqualTo(-1L)); + } + + [Test] + public void Test_Uuid64_ToString() + { + var guid = new Uuid64(0xBADC0FFEE0DDF00DUL); + Assert.That(guid.ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); + Assert.That(guid.ToString(), Is.EqualTo("BADC0FFE-E0DDF00D")); + Assert.That(guid.ToString("X"), Is.EqualTo("BADC0FFEE0DDF00D")); + Assert.That(guid.ToString("B"), Is.EqualTo("{BADC0FFE-E0DDF00D}")); + Assert.That(guid.ToString("C"), Is.EqualTo("G2eGAUq82Hd")); + + guid = new Uuid64(0xDEADBEEFUL); + Assert.That(guid.ToUInt64(), Is.EqualTo(0xDEADBEEFUL)); + Assert.That(guid.ToString(), Is.EqualTo("00000000-DEADBEEF")); + Assert.That(guid.ToString("X"), Is.EqualTo("00000000DEADBEEF")); + Assert.That(guid.ToString("B"), Is.EqualTo("{00000000-DEADBEEF}")); + Assert.That(guid.ToString("C"), Is.EqualTo("44pZgF")); + } + + [Test] + public void Test_Uuid64_Parse_Hexa16() + { + // string + + Assert.That(Uuid64.Parse("badc0ffe-e0ddf00d").ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); + Assert.That(Uuid64.Parse("BADC0FFE-E0DDF00D").ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL), "Should be case-insensitive"); + + Assert.That(Uuid64.Parse("badc0ffee0ddf00d").ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); + Assert.That(Uuid64.Parse("BADC0FFEE0DDF00D").ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL), "Should be case-insensitive"); + + Assert.That(Uuid64.Parse("{badc0ffe-e0ddf00d}").ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); + Assert.That(Uuid64.Parse("{BADC0FFE-E0DDF00D}").ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL), "Should be case-insensitive"); + + Assert.That(Uuid64.Parse("{badc0ffee0ddf00d}").ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); + Assert.That(Uuid64.Parse("{BADC0FFEE0DDF00D}").ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL), "should be case-insensitive"); + + Assert.That(Uuid64.Parse("00000000-deadbeef").ToUInt64(), Is.EqualTo(0xDEADBEEFUL)); + Assert.That(Uuid64.Parse("{00000000-deadbeef}").ToUInt64(), Is.EqualTo(0xDEADBEEFUL)); + + // errors + Assert.That(() => Uuid64.Parse(default(string)), Throws.ArgumentNullException); + Assert.That(() => Uuid64.Parse("hello"), Throws.InstanceOf()); + Assert.That(() => Uuid64.Parse("12345678-9ABCDEFG"), Throws.InstanceOf(), "Invalid hexa character 'G'"); + Assert.That(() => Uuid64.Parse("00000000-0000000 "), Throws.InstanceOf(), "Two short + extra space"); + Assert.That(() => Uuid64.Parse("zzzzzzzz-zzzzzzzz"), Throws.InstanceOf(), "Invalid char"); + Assert.That(() => Uuid64.Parse("badc0ffe-e0ddf00"), Throws.InstanceOf(), "Missing last char"); + Assert.That(() => Uuid64.Parse("baadc0ffe-e0ddf00"), Throws.InstanceOf(), "'-' at invalid position"); + Assert.That(() => Uuid64.Parse("badc0fe-ee0ddf00d"), Throws.InstanceOf(), "'-' at invalid position"); + Assert.That(() => Uuid64.Parse("badc0ffe-e0ddf00d "), Throws.InstanceOf(), "Extra space at the end"); + Assert.That(() => Uuid64.Parse(" badc0ffe-e0ddf00d"), Throws.InstanceOf(), "Extra space at the start"); + +#if ENABLE_SPAN + // span from string + + Assert.That(Uuid64.Parse("badc0ffe-e0ddf00d".AsSpan()).ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); + Assert.That(Uuid64.Parse("badc0ffee0ddf00d".AsSpan()).ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); + Assert.That(Uuid64.Parse("hello badc0ffe-e0ddf00d world!".AsSpan().Slice(6, 17)).ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); + Assert.That(Uuid64.Parse("hello badc0ffee0ddf00d world!".AsSpan().Slice(6, 16)).ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); + + // span from char[] + + Assert.That(Uuid64.Parse("badc0ffe-e0ddf00d".ToCharArray().AsSpan()).ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); + Assert.That(Uuid64.Parse("badc0ffee0ddf00d".ToCharArray().AsSpan()).ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); + Assert.That(Uuid64.Parse("hello badc0ffe-e0ddf00d world!".ToCharArray().AsSpan().Slice(6, 17)).ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); + Assert.That(Uuid64.Parse("hello badc0ffee0ddf00d world!".ToCharArray().AsSpan().Slice(6, 16)).ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); + + // span from stackalloc + + unsafe + { + char* buf = stackalloc char[64]; + var span = new Span(buf, 64); + + span.Clear(); + "badc0ffe-e0ddf00d".AsSpan().CopyTo(span); + Assert.That(Uuid64.Parse(span.Slice(0, 17)).ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); + + span.Clear(); + "badc0ffee0ddf00d".AsSpan().CopyTo(span); + Assert.That(Uuid64.Parse(span.Slice(0, 16)).ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); + + span.Clear(); + "{badc0ffe-e0ddf00d}".AsSpan().CopyTo(span); + Assert.That(Uuid64.Parse(span.Slice(0, 19)).ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); + + span.Clear(); + "{badc0ffee0ddf00d}".AsSpan().CopyTo(span); + Assert.That(Uuid64.Parse(span.Slice(0, 18)).ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); + } +#endif + } + + [Test] + public void Test_Uuid64_ToString_Base62() + { + char[] chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz".ToCharArray(); + Assert.That(chars.Length, Is.EqualTo(62)); + + // single digit + for (int i = 0; i < 62;i++) + { + Assert.That(new Uuid64(i).ToString("C"), Is.EqualTo(chars[i].ToString())); + Assert.That(new Uuid64(i).ToString("Z"), Is.EqualTo("0000000000" + chars[i])); + } + + // two digits + for (int j = 1; j < 62; j++) + { + var prefix = chars[j].ToString(); + for (int i = 0; i < 62; i++) + { + Assert.That(new Uuid64(j * 62 + i).ToString("C"), Is.EqualTo(prefix + chars[i])); + Assert.That(new Uuid64(j * 62 + i).ToString("Z"), Is.EqualTo("000000000" + prefix + chars[i])); + } + } + + // 4 digits + var rnd = new Random(); + for (int i = 0; i < 100 * 1000; i++) + { + var a = rnd.Next(2) == 0 ? 0 : rnd.Next(62); + var b = rnd.Next(2) == 0 ? 0 : rnd.Next(62); + var c = rnd.Next(2) == 0 ? 0 : rnd.Next(62); + var d = rnd.Next(62); + + ulong x = (ulong)a; + x += 62 * (ulong)b; + x += 62 * 62 * (ulong)c; + x += 62 * 62 * 62 * (ulong)d; + var uuid = new Uuid64(x); + + // no padding + string expected = + d > 0 ? ("" + chars[d] + chars[c] + chars[b] + chars[a]) : + c > 0 ? ("" + chars[c] + chars[b] + chars[a]) : + b > 0 ? ("" + chars[b] + chars[a]) : + ("" + chars[a]); + Assert.That(uuid.ToString("C"), Is.EqualTo(expected)); + + // padding + Assert.That(uuid.ToString("Z"), Is.EqualTo("0000000" + chars[d] + chars[c] + chars[b] + chars[a])); + } + + // Numbers of the form 62^n should be encoded as '1' followed by n x '0', for n from 0 to 10 + ulong val = 1; + for (int i = 0; i <= 10; i++) + { + Assert.That(new Uuid64(val).ToString("C"), Is.EqualTo("1" + new string('0', i)), "62^{0}", i); + val *= 62; + } + + // Numbers of the form 62^n - 1 should be encoded as n x 'z', for n from 1 to 10 + val = 0; + for (int i = 1; i <= 10; i++) + { + val += 61; + Assert.That(new Uuid64(val).ToString("C"), Is.EqualTo(new string('z', i)), "62^{0} - 1", i); + val *= 62; + } + + // well known values + Assert.That(new Uuid64(0xB45B07).ToString("C"), Is.EqualTo("narf")); + Assert.That(new Uuid64(0xE0D0ED).ToString("C"), Is.EqualTo("zort")); + Assert.That(new Uuid64(0xDEADBEEF).ToString("C"), Is.EqualTo("44pZgF")); + Assert.That(new Uuid64(0xDEADBEEF).ToString("Z"), Is.EqualTo("0000044pZgF")); + Assert.That(new Uuid64(0xBADC0FFEE0DDF00DUL).ToString("C"), Is.EqualTo("G2eGAUq82Hd")); + Assert.That(new Uuid64(0xBADC0FFEE0DDF00DUL).ToString("Z"), Is.EqualTo("G2eGAUq82Hd")); + + Assert.That(new Uuid64(255).ToString("C"), Is.EqualTo("47")); + Assert.That(new Uuid64(ushort.MaxValue).ToString("C"), Is.EqualTo("H31")); + Assert.That(new Uuid64(uint.MaxValue).ToString("C"), Is.EqualTo("4gfFC3")); + Assert.That(new Uuid64(ulong.MaxValue - 1).ToString("C"), Is.EqualTo("LygHa16AHYE")); + Assert.That(new Uuid64(ulong.MaxValue).ToString("C"), Is.EqualTo("LygHa16AHYF")); + } + + [Test] + public void Test_Uuid64_Parse_Base62() + { + + Assert.That(Uuid64.FromBase62("").ToUInt64(), Is.EqualTo(0)); + Assert.That(Uuid64.FromBase62("0").ToUInt64(), Is.EqualTo(0)); + Assert.That(Uuid64.FromBase62("9").ToUInt64(), Is.EqualTo(9)); + Assert.That(Uuid64.FromBase62("A").ToUInt64(), Is.EqualTo(10)); + Assert.That(Uuid64.FromBase62("Z").ToUInt64(), Is.EqualTo(35)); + Assert.That(Uuid64.FromBase62("a").ToUInt64(), Is.EqualTo(36)); + Assert.That(Uuid64.FromBase62("z").ToUInt64(), Is.EqualTo(61)); + Assert.That(Uuid64.FromBase62("10").ToUInt64(), Is.EqualTo(62)); + Assert.That(Uuid64.FromBase62("zz").ToUInt64(), Is.EqualTo(3843)); + Assert.That(Uuid64.FromBase62("100").ToUInt64(), Is.EqualTo(3844)); + Assert.That(Uuid64.FromBase62("zzzzzzzzzz").ToUInt64(), Is.EqualTo(839299365868340223UL)); + Assert.That(Uuid64.FromBase62("10000000000").ToUInt64(), Is.EqualTo(839299365868340224UL)); + Assert.That(Uuid64.FromBase62("LygHa16AHYF").ToUInt64(), Is.EqualTo(ulong.MaxValue), "ulong.MaxValue in base 62"); + + // well known values + + Assert.That(Uuid64.FromBase62("narf").ToUInt64(), Is.EqualTo(0xB45B07)); + Assert.That(Uuid64.FromBase62("zort").ToUInt64(), Is.EqualTo(0xE0D0ED)); + Assert.That(Uuid64.FromBase62("44pZgF").ToUInt64(), Is.EqualTo(0xDEADBEEF)); + Assert.That(Uuid64.FromBase62("0000044pZgF").ToUInt64(), Is.EqualTo(0xDEADBEEF)); + + Assert.That(Uuid64.FromBase62("G2eGAUq82Hd").ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); + + Assert.That(Uuid64.FromBase62("4gfFC3").ToUInt64(), Is.EqualTo(uint.MaxValue)); + Assert.That(Uuid64.FromBase62("000004gfFC3").ToUInt64(), Is.EqualTo(uint.MaxValue)); + + + // invalid chars + Assert.That(() => Uuid64.FromBase62("/"), Throws.InstanceOf()); + Assert.That(() => Uuid64.FromBase62("@"), Throws.InstanceOf()); + Assert.That(() => Uuid64.FromBase62("["), Throws.InstanceOf()); + Assert.That(() => Uuid64.FromBase62("`"), Throws.InstanceOf()); + Assert.That(() => Uuid64.FromBase62("{"), Throws.InstanceOf()); + Assert.That(() => Uuid64.FromBase62("zaz/"), Throws.InstanceOf()); + Assert.That(() => Uuid64.FromBase62("z/o&r=g"), Throws.InstanceOf()); + + // overflow + Assert.That(() => Uuid64.FromBase62("zzzzzzzzzzz"), Throws.InstanceOf(), "62^11 - 1 => OVERFLOW"); + Assert.That(() => Uuid64.FromBase62("LygHa16AHYG"), Throws.InstanceOf(), "ulong.MaxValue + 1 => OVERFLOW"); + + // invalid length + Assert.That(() => Uuid64.FromBase62(default(string)), Throws.ArgumentNullException); + Assert.That(() => Uuid64.FromBase62("100000000000"), Throws.InstanceOf(), "62^11 => TOO BIG"); + + } + + [Test] + public void Test_Uuid64_NewUid() + { + var a = Uuid64.NewUuid(); + var b = Uuid64.NewUuid(); + Assert.That(a.ToUInt64(), Is.Not.EqualTo(b.ToUInt64())); + Assert.That(a, Is.Not.EqualTo(b)); + + const int N = 1 * 1000; + var uids = new HashSet(); + for (int i = 0; i < N; i++) + { + var uid = Uuid64.NewUuid(); + if (uids.Contains(uid.ToUInt64())) Assert.Fail("Duplicate Uuid64 generated: {0}", uid); + uids.Add(uid.ToUInt64()); + } + Assert.That(uids.Count, Is.EqualTo(N)); + } + + [Test] + public void Test_Uuid64RangomGenerator_NewUid() + { + var gen = Uuid64RandomGenerator.Default; + Assert.That(gen, Is.Not.Null); + + var a = gen.NewUuid(); + var b = gen.NewUuid(); + Assert.That(a.ToUInt64(), Is.Not.EqualTo(b.ToUInt64())); + Assert.That(a, Is.Not.EqualTo(b)); + + const int N = 1 * 1000; + var uids = new HashSet(); + for (int i = 0; i < N; i++) + { + var uid = gen.NewUuid(); + if (uids.Contains(uid.ToUInt64())) Assert.Fail("Duplicate Uuid64 generated: {0}", uid); + uids.Add(uid.ToUInt64()); + } + Assert.That(uids.Count, Is.EqualTo(N)); + } + + [Test] + public void Test_Uuid64_Equality_Check() + { + var a = new Uuid64(42); + var b = new Uuid64(42); + var c = new Uuid64(40) + 2; + var d = new Uuid64(0xDEADBEEF); + + // Equals(Uuid64) + Assert.That(a.Equals(a), Is.True, "a == a"); + Assert.That(a.Equals(b), Is.True, "a == b"); + Assert.That(a.Equals(c), Is.True, "a == c"); + Assert.That(a.Equals(d), Is.False, "a != d"); + + // == Uuid64 + Assert.That(a == b, Is.True, "a == b"); + Assert.That(a == c, Is.True, "a == c"); + Assert.That(a == d, Is.False, "a != d"); + + // != Uuid64 + Assert.That(a != b, Is.False, "a == b"); + Assert.That(a != c, Is.False, "a == c"); + Assert.That(a != d, Is.True, "a != d"); + + // == numbers + Assert.That(a == 42L, Is.True, "a == 42"); + Assert.That(a == 42UL, Is.True, "a == 42"); + Assert.That(d == 42L, Is.False, "d != 42"); + Assert.That(d == 42UL, Is.False, "d != 42"); + + // != numbers + Assert.That(a != 42L, Is.False, "a == 42"); + Assert.That(a != 42UL, Is.False, "a == 42"); + Assert.That(d != 42L, Is.True, "d != 42"); + Assert.That(d != 42UL, Is.True, "d != 42"); + + // Equals(objecct) + Assert.That(a.Equals((object)a), Is.True, "a == a"); + Assert.That(a.Equals((object)b), Is.True, "a == b"); + Assert.That(a.Equals((object)c), Is.True, "a == c"); + Assert.That(a.Equals((object)d), Is.False, "a != d"); + Assert.That(a.Equals((object)42L), Is.True, "a == 42"); + Assert.That(a.Equals((object)42UL), Is.True, "a == 42"); + Assert.That(d.Equals((object)42L), Is.False, "d != 42"); + Assert.That(d.Equals((object)42UL), Is.False, "d != 42"); + + } + + [Test] + public void Test_Uuid64_Ordering() + { + var a = new Uuid64(42); + var a2 = new Uuid64(42); + var b = new Uuid64(77); + + Assert.That(a.CompareTo(a), Is.EqualTo(0)); + Assert.That(a.CompareTo(b), Is.EqualTo(-1)); + Assert.That(b.CompareTo(a), Is.EqualTo(+1)); + + Assert.That(a < b, Is.True, "a < b"); + Assert.That(a <= b, Is.True, "a <= b"); + Assert.That(a < a2, Is.False, "a < a"); + Assert.That(a <= a2, Is.True, "a <= a"); + + Assert.That(a > b, Is.False, "a > b"); + Assert.That(a >= b, Is.False, "a >= b"); + Assert.That(a > a2, Is.False, "a > a"); + Assert.That(a >= a2, Is.True, "a >= a"); + + // parsed from string + Assert.That(Uuid64.Parse("137bcf31-0c8873a2") < Uuid64.Parse("604bdf8a-2512b4ad"), Is.True); + Assert.That(Uuid64.Parse("d8f17a26-82adb1a4") < Uuid64.Parse("22abbf33-1b2c1db0"), Is.False); + Assert.That(Uuid64.Parse("{137bcf31-0c8873a2}") > Uuid64.Parse("{604bdf8a-2512b4ad}"), Is.False); + Assert.That(Uuid64.Parse("{d8f17a26-82adb1a4}") > Uuid64.Parse("{22abbf33-1b2c1db0}"), Is.True); + Assert.That(Uuid64.FromBase62("2w6CTjUiXVp") < Uuid64.FromBase62("DVM0UnynZ1Q"), Is.True); + Assert.That(Uuid64.FromBase62("0658JY2ORSJ") > Uuid64.FromBase62("FMPaNaMEUWc"), Is.False); + + // verify byte ordering + var c = new Uuid64(0x0000000100000002); + var d = new Uuid64(0x0000000200000001); + Assert.That(c.CompareTo(d), Is.EqualTo(-1)); + Assert.That(d.CompareTo(c), Is.EqualTo(+1)); + + // verify that we can sort an array of Uuid64 + var uids = new Uuid64[100]; + for (int i = 0; i < uids.Length; i++) + { + uids[i] = Uuid64.NewUuid(); + } + Assume.That(uids, Is.Not.Ordered, "This can happen with a very small probability. Please try again"); + Array.Sort(uids); + Assert.That(uids, Is.Ordered); + + // ordering should be preserved in integer or textual form + + Assert.That(uids.Select(x => x.ToUInt64()), Is.Ordered, "order should be preserved when ordering by unsigned value"); + //note: ToInt64() will not work because of negative values + Assert.That(uids.Select(x => x.ToString()), Is.Ordered.Using(StringComparer.Ordinal), "order should be preserved when ordering by text (hexa)"); + Assert.That(uids.Select(x => x.ToString("Z")), Is.Ordered.Using(StringComparer.Ordinal), "order should be preserved when ordering by text (base62)"); + //note: ToString("C") will not work for ordering because it will produce "z" > "aa", instead of expected "0z" < "aa" + } + + [Test] + public void Test_Uuid64_Arithmetic() + { + var uid = Uuid64.Empty; + + Assert.That(uid + 42L, Is.EqualTo(new Uuid64(42))); + Assert.That(uid + 42UL, Is.EqualTo(new Uuid64(42))); + uid++; + Assert.That(uid.ToInt64(), Is.EqualTo(1)); + uid++; + Assert.That(uid.ToInt64(), Is.EqualTo(2)); + uid--; + Assert.That(uid.ToInt64(), Is.EqualTo(1)); + uid--; + Assert.That(uid.ToInt64(), Is.EqualTo(0)); + + uid = Uuid64.NewUuid(); + + Assert.That(uid + 123L, Is.EqualTo(new Uuid64(uid.ToInt64() + 123))); + Assert.That(uid + 123UL, Is.EqualTo(new Uuid64(uid.ToUInt64() + 123))); + + Assert.That(uid - 123L, Is.EqualTo(new Uuid64(uid.ToInt64() - 123))); + Assert.That(uid - 123UL, Is.EqualTo(new Uuid64(uid.ToUInt64() - 123))); + } + + [Test] + public void Test_Uuid64_Read_From_Bytes() + { + // test buffer with included padding + byte[] buf = { 0x55, 0x55, 0x55, 0x55, /* start */ 0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF, /* stop */ 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA }; + var original = Uuid64.Parse("01234567-89ABCDEF"); + Assume.That(original.ToUInt64(), Is.EqualTo(0x0123456789ABCDEF)); + +#if ENABLE_SPAN + // ReadOnlySpan + Assert.That(Uuid64.Read(buf.AsSpan(4, 8)), Is.EqualTo(original)); +#endif + + // Slice + Assert.That(Uuid64.Read(buf.AsSlice(4, 8)), Is.EqualTo(original)); + + // byte[] + Assert.That(Uuid64.Read(buf.AsSlice(4, 8).GetBytesOrEmpty()), Is.EqualTo(original)); + + unsafe + { + fixed (byte* ptr = &buf[4]) + { + Assert.That(Uuid64.Read(ptr, 8), Is.EqualTo(original)); +#if ENABLE_SPAN + Assert.That(Uuid64.Read(new ReadOnlySpan(ptr, 8)), Is.EqualTo(original)); +#endif + } + } + } + +#if ENABLE_SPAN + + [Test] + public void Test_UUid64_WriteTo() + { + var original = Uuid64.Parse("01234567-89ABCDEF"); + Assume.That(original.ToUInt64(), Is.EqualTo(0x0123456789ABCDEF)); + + // span with more space + var scratch = Slice.Repeat(0xAA, 16); + original.WriteTo(scratch.AsSpan()); + Assert.That(scratch.ToString("X"), Is.EqualTo("01 23 45 67 89 AB CD EF AA AA AA AA AA AA AA AA")); + + // span with no offset and exact size + scratch = Slice.Repeat(0xAA, 16); + original.WriteTo(scratch.AsSpan(0, 8)); + Assert.That(scratch.ToString("X"), Is.EqualTo("01 23 45 67 89 AB CD EF AA AA AA AA AA AA AA AA")); + + // span with offset + scratch = Slice.Repeat(0xAA, 16); + original.WriteTo(scratch.AsSpan(4)); + Assert.That(scratch.ToString("X"), Is.EqualTo("AA AA AA AA 01 23 45 67 89 AB CD EF AA AA AA AA")); + + // span with offset and exact size + scratch = Slice.Repeat(0xAA, 16); + original.WriteTo(scratch.AsSpan(4, 8)); + Assert.That(scratch.ToString("X"), Is.EqualTo("AA AA AA AA 01 23 45 67 89 AB CD EF AA AA AA AA")); + + scratch = Slice.Repeat(0xAA, 16); + original.WriteToUnsafe(scratch.Array, scratch.Offset); + Assert.That(scratch.ToString("X"), Is.EqualTo("01 23 45 67 89 AB CD EF AA AA AA AA AA AA AA AA")); + + unsafe + { + byte* buf = stackalloc byte[16]; + var span = USlice.FromUnmanagedPointer(buf, 16); + span.Fill(0xAA); + + original.WriteToUnsafe(buf + 2); + Assert.That(span.ToString("X"), Is.EqualTo("AA AA 01 23 45 67 89 AB CD EF AA AA AA AA AA AA")); + } + + // errors + + Assert.That(() => original.WriteTo(Span.Empty), Throws.InstanceOf(), "Target buffer is empty"); + Assert.That(() => original.WriteTo(null, 8), Throws.InstanceOf(), "Target buffer is null"); + Assert.That(() => original.WriteTo(null, 0), Throws.InstanceOf(), "Target buffer is null"); + + scratch = Slice.Repeat(0xAA, 16); + Assert.That(() => original.WriteTo(scratch.AsSpan(0, 7)), Throws.InstanceOf(), "Target buffer is too small"); + Assert.That(scratch.ToString("X"), Is.EqualTo("AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA"), "Buffer should not have been overwritten!"); + + } + + [Test] + public void Test_Uuid64_TryWriteTo() + { + var original = Uuid64.Parse("01234567-89ABCDEF"); + Assume.That(original.ToUInt64(), Is.EqualTo(0x0123456789ABCDEF)); + + // span with more space + var scratch = Slice.Repeat(0xAA, 16); + Assert.That(original.TryWriteTo(scratch.AsSpan()), Is.True); + Assert.That(scratch.ToString("X"), Is.EqualTo("01 23 45 67 89 AB CD EF AA AA AA AA AA AA AA AA")); + + // span with no offset and exact size + scratch = Slice.Repeat(0xAA, 16); + Assert.That(original.TryWriteTo(scratch.AsSpan(0, 8)), Is.True); + Assert.That(scratch.ToString("X"), Is.EqualTo("01 23 45 67 89 AB CD EF AA AA AA AA AA AA AA AA")); + + // span with offset + scratch = Slice.Repeat(0xAA, 16); + Assert.That(original.TryWriteTo(scratch.AsSpan(4)), Is.True); + Assert.That(scratch.ToString("X"), Is.EqualTo("AA AA AA AA 01 23 45 67 89 AB CD EF AA AA AA AA")); + + // span with offset and exact size + scratch = Slice.Repeat(0xAA, 16); + Assert.That(original.TryWriteTo(scratch.AsSpan(4, 8)), Is.True); + Assert.That(scratch.ToString("X"), Is.EqualTo("AA AA AA AA 01 23 45 67 89 AB CD EF AA AA AA AA")); + + // errors + + Assert.That(original.TryWriteTo(Span.Empty), Is.False, "Target buffer is empty"); + + scratch = Slice.Repeat(0xAA, 16); + Assert.That(original.TryWriteTo(scratch.AsSpan(0, 7)), Is.False, "Target buffer is too small"); + Assert.That(scratch.ToString("X"), Is.EqualTo("AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA"), "Buffer should not have been overwritten!"); + + } + +#else + + [Test] + public void Test_UUid64_WriteTo() + { + var original = Uuid64.Parse("01234567-89ABCDEF"); + Assume.That(original.ToUInt64(), Is.EqualTo(0x0123456789ABCDEF)); + + // span with more space + var scratch = Slice.Repeat(0xAA, 16); + original.WriteTo(scratch); + Assert.That(scratch.ToString("X"), Is.EqualTo("01 23 45 67 89 AB CD EF AA AA AA AA AA AA AA AA")); + + // span with no offset and exact size + scratch = Slice.Repeat(0xAA, 16); + original.WriteTo(scratch.Substring(0, 8)); + Assert.That(scratch.ToString("X"), Is.EqualTo("01 23 45 67 89 AB CD EF AA AA AA AA AA AA AA AA")); + + // span with offset + scratch = Slice.Repeat(0xAA, 16); + original.WriteTo(scratch.Substring(4)); + Assert.That(scratch.ToString("X"), Is.EqualTo("AA AA AA AA 01 23 45 67 89 AB CD EF AA AA AA AA")); + + // span with offset and exact size + scratch = Slice.Repeat(0xAA, 16); + original.WriteTo(scratch.Substring(4, 8)); + Assert.That(scratch.ToString("X"), Is.EqualTo("AA AA AA AA 01 23 45 67 89 AB CD EF AA AA AA AA")); + + scratch = Slice.Repeat(0xAA, 16); + original.WriteToUnsafe(scratch.Array, scratch.Offset); + Assert.That(scratch.ToString("X"), Is.EqualTo("01 23 45 67 89 AB CD EF AA AA AA AA AA AA AA AA")); + + unsafe + { + byte* buf = stackalloc byte[16]; + UnsafeHelpers.FillUnsafe(buf, 16, 0xAA); + + original.WriteToUnsafe(buf + 2); + Assert.That(Slice.Copy(buf, 16).ToString("X"), Is.EqualTo("AA AA 01 23 45 67 89 AB CD EF AA AA AA AA AA AA")); + } + + // errors + + Assert.That(() => original.WriteTo(Slice.Empty), Throws.InstanceOf(), "Target buffer is empty"); + Assert.That(() => original.WriteTo(null, 8), Throws.InstanceOf(), "Target buffer is null"); + Assert.That(() => original.WriteTo(null, 0), Throws.InstanceOf(), "Target buffer is null"); + + scratch = Slice.Repeat(0xAA, 16); + Assert.That(() => original.WriteTo(scratch.Substring(0, 7)), Throws.InstanceOf(), "Target buffer is too small"); + Assert.That(scratch.ToString("X"), Is.EqualTo("AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA"), "Buffer should not have been overwritten!"); + + } + + [Test] + public void Test_Uuid64_TryWriteTo() + { + var original = Uuid64.Parse("01234567-89ABCDEF"); + Assume.That(original.ToUInt64(), Is.EqualTo(0x0123456789ABCDEF)); + + // span with more space + var scratch = Slice.Repeat(0xAA, 16); + Assert.That(original.TryWriteTo(scratch), Is.True); + Assert.That(scratch.ToString("X"), Is.EqualTo("01 23 45 67 89 AB CD EF AA AA AA AA AA AA AA AA")); + + // span with no offset and exact size + scratch = Slice.Repeat(0xAA, 16); + Assert.That(original.TryWriteTo(scratch.Substring(0, 8)), Is.True); + Assert.That(scratch.ToString("X"), Is.EqualTo("01 23 45 67 89 AB CD EF AA AA AA AA AA AA AA AA")); + + // span with offset + scratch = Slice.Repeat(0xAA, 16); + Assert.That(original.TryWriteTo(scratch.Substring(4)), Is.True); + Assert.That(scratch.ToString("X"), Is.EqualTo("AA AA AA AA 01 23 45 67 89 AB CD EF AA AA AA AA")); + + // span with offset and exact size + scratch = Slice.Repeat(0xAA, 16); + Assert.That(original.TryWriteTo(scratch.Substring(4, 8)), Is.True); + Assert.That(scratch.ToString("X"), Is.EqualTo("AA AA AA AA 01 23 45 67 89 AB CD EF AA AA AA AA")); + + // errors + + Assert.That(original.TryWriteTo(Slice.Empty), Is.False, "Target buffer is empty"); + + scratch = Slice.Repeat(0xAA, 16); + Assert.That(original.TryWriteTo(scratch.Substring(0, 7)), Is.False, "Target buffer is too small"); + Assert.That(scratch.ToString("X"), Is.EqualTo("AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA"), "Buffer should not have been overwritten!"); + + } +#endif + } + +} diff --git a/FoundationDB.Tests/Uuid64Facts.cs b/FoundationDB.Tests/Uuid64Facts.cs deleted file mode 100644 index 6b9b4cf54..000000000 --- a/FoundationDB.Tests/Uuid64Facts.cs +++ /dev/null @@ -1,420 +0,0 @@ -#region BSD Licence -/* Copyright (c) 2013, Doxense SARL -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Doxense nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion - -namespace FoundationDB.Client.Tests -{ - using FoundationDB.Client; - using NUnit.Framework; - using System; - using System.Collections.Generic; - using System.Linq; - - [TestFixture] - public class Uuid64Facts - { - [Test] - public void Test_Uuid64_Empty() - { - Assert.That(Uuid64.Empty.ToString(), Is.EqualTo("00000000-00000000")); - Assert.That(Uuid64.Empty, Is.EqualTo(default(Uuid64))); - Assert.That(Uuid64.Empty, Is.EqualTo(new Uuid64(0L))); - Assert.That(Uuid64.Empty, Is.EqualTo(new Uuid64(0UL))); - Assert.That(Uuid64.Empty, Is.EqualTo(new Uuid64(new byte[8]))); - } - - [Test] - public void Test_Uuid64_Casting() - { - // implicit - Uuid64 a = (long)0; - Uuid64 b = (long)42; - Uuid64 c = (long)0xDEADBEEF; - Uuid64 d = 0xBADC0FFEE0DDF00DUL; - Uuid64 e = ulong.MaxValue; - - // ToUInt64 - Assert.That(a.ToUInt64(), Is.EqualTo(0UL)); - Assert.That(b.ToUInt64(), Is.EqualTo(42UL)); - Assert.That(c.ToUInt64(), Is.EqualTo(3735928559UL)); - Assert.That(d.ToUInt64(), Is.EqualTo(13464654573299691533UL)); - Assert.That(e.ToUInt64(), Is.EqualTo(ulong.MaxValue)); - - // ToInt64 - Assert.That(a.ToInt64(), Is.EqualTo(0L)); - Assert.That(b.ToInt64(), Is.EqualTo(42L)); - Assert.That(c.ToInt64(), Is.EqualTo(3735928559L)); - Assert.That(d.ToInt64(), Is.EqualTo(-4982089500409860083L)); - Assert.That(e.ToInt64(), Is.EqualTo(-1L)); - - // explict - Assert.That((long)a, Is.EqualTo(0)); - Assert.That((long)b, Is.EqualTo(42)); - Assert.That((long)c, Is.EqualTo(0xDEADBEEF)); - Assert.That((ulong)d, Is.EqualTo(13464654573299691533UL)); - Assert.That((ulong)e, Is.EqualTo(ulong.MaxValue)); - Assert.That((long)e, Is.EqualTo(-1L)); - } - - [Test] - public void Test_Uuid64_ToString() - { - var guid = new Uuid64(0xBADC0FFEE0DDF00DUL); - Assert.That(guid.ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); - Assert.That(guid.ToString(), Is.EqualTo("badc0ffe-e0ddf00d")); - Assert.That(guid.ToString("X"), Is.EqualTo("badc0ffee0ddf00d")); - Assert.That(guid.ToString("B"), Is.EqualTo("{badc0ffe-e0ddf00d}")); - Assert.That(guid.ToString("C"), Is.EqualTo("G2eGAUq82Hd")); - - guid = new Uuid64(0xDEADBEEFUL); - Assert.That(guid.ToUInt64(), Is.EqualTo(0xDEADBEEFUL)); - Assert.That(guid.ToString(), Is.EqualTo("00000000-deadbeef")); - Assert.That(guid.ToString("X"), Is.EqualTo("00000000deadbeef")); - Assert.That(guid.ToString("B"), Is.EqualTo("{00000000-deadbeef}")); - Assert.That(guid.ToString("C"), Is.EqualTo("44pZgF")); - } - - [Test] - public void Test_Uuid64_Parse_Hexa16() - { - var uuid = Uuid64.Parse("badc0ffe-e0ddf00d"); - Assert.That(uuid.ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); - - uuid = Uuid64.Parse("{badc0ffe-e0ddf00d}"); - Assert.That(uuid.ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); - - uuid = Uuid64.Parse("00000000-deadbeef"); - Assert.That(uuid.ToUInt64(), Is.EqualTo(0xDEADBEEFUL)); - - uuid = Uuid64.Parse("{00000000-deadbeef}"); - Assert.That(uuid.ToUInt64(), Is.EqualTo(0xDEADBEEFUL)); - } - - [Test] - public void Test_Uuid64_ToString_Base62() - { - char[] chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz".ToCharArray(); - Assert.That(chars.Length, Is.EqualTo(62)); - - // single digit - for (int i = 0; i < 62;i++) - { - Assert.That(new Uuid64(i).ToString("C"), Is.EqualTo(chars[i].ToString())); - Assert.That(new Uuid64(i).ToString("Z"), Is.EqualTo("0000000000" + chars[i])); - } - - // two digits - for (int j = 1; j < 62; j++) - { - var prefix = chars[j].ToString(); - for (int i = 0; i < 62; i++) - { - Assert.That(new Uuid64(j * 62 + i).ToString("C"), Is.EqualTo(prefix + chars[i])); - Assert.That(new Uuid64(j * 62 + i).ToString("Z"), Is.EqualTo("000000000" + prefix + chars[i])); - } - } - - // 4 digits - var rnd = new Random(); - for (int i = 0; i < 100 * 1000; i++) - { - var a = rnd.Next(2) == 0 ? 0 : rnd.Next(62); - var b = rnd.Next(2) == 0 ? 0 : rnd.Next(62); - var c = rnd.Next(2) == 0 ? 0 : rnd.Next(62); - var d = rnd.Next(62); - - ulong x = (ulong)a; - x += 62 * (ulong)b; - x += 62 * 62 * (ulong)c; - x += 62 * 62 * 62 * (ulong)d; - var uuid = new Uuid64(x); - - // no padding - string expected = - d > 0 ? ("" + chars[d] + chars[c] + chars[b] + chars[a]) : - c > 0 ? ("" + chars[c] + chars[b] + chars[a]) : - b > 0 ? ("" + chars[b] + chars[a]) : - ("" + chars[a]); - Assert.That(uuid.ToString("C"), Is.EqualTo(expected)); - - // padding - Assert.That(uuid.ToString("Z"), Is.EqualTo("0000000" + chars[d] + chars[c] + chars[b] + chars[a])); - } - - // Numbers of the form 62^n should be encoded as '1' followed by n x '0', for n from 0 to 10 - ulong val = 1; - for (int i = 0; i <= 10; i++) - { - Assert.That(new Uuid64(val).ToString("C"), Is.EqualTo("1" + new string('0', i)), "62^{0}", i); - val *= 62; - } - - // Numbers of the form 62^n - 1 should be encoded as n x 'z', for n from 1 to 10 - val = 0; - for (int i = 1; i <= 10; i++) - { - val += 61; - Assert.That(new Uuid64(val).ToString("C"), Is.EqualTo(new string('z', i)), "62^{0} - 1", i); - val *= 62; - } - - // well known values - Assert.That(new Uuid64(0xB45B07).ToString("C"), Is.EqualTo("narf")); - Assert.That(new Uuid64(0xE0D0ED).ToString("C"), Is.EqualTo("zort")); - Assert.That(new Uuid64(0xDEADBEEF).ToString("C"), Is.EqualTo("44pZgF")); - Assert.That(new Uuid64(0xDEADBEEF).ToString("Z"), Is.EqualTo("0000044pZgF")); - Assert.That(new Uuid64(0xBADC0FFEE0DDF00DUL).ToString("C"), Is.EqualTo("G2eGAUq82Hd")); - Assert.That(new Uuid64(0xBADC0FFEE0DDF00DUL).ToString("Z"), Is.EqualTo("G2eGAUq82Hd")); - - Assert.That(new Uuid64(255).ToString("C"), Is.EqualTo("47")); - Assert.That(new Uuid64(ushort.MaxValue).ToString("C"), Is.EqualTo("H31")); - Assert.That(new Uuid64(uint.MaxValue).ToString("C"), Is.EqualTo("4gfFC3")); - Assert.That(new Uuid64(ulong.MaxValue - 1).ToString("C"), Is.EqualTo("LygHa16AHYE")); - Assert.That(new Uuid64(ulong.MaxValue).ToString("C"), Is.EqualTo("LygHa16AHYF")); - } - - [Test] - public void Test_Uuid64_Parse_Base62() - { - - Assert.That(Uuid64.Parse("0").ToUInt64(), Is.EqualTo(0)); - Assert.That(Uuid64.Parse("9").ToUInt64(), Is.EqualTo(9)); - Assert.That(Uuid64.Parse("A").ToUInt64(), Is.EqualTo(10)); - Assert.That(Uuid64.Parse("Z").ToUInt64(), Is.EqualTo(35)); - Assert.That(Uuid64.Parse("a").ToUInt64(), Is.EqualTo(36)); - Assert.That(Uuid64.Parse("z").ToUInt64(), Is.EqualTo(61)); - Assert.That(Uuid64.Parse("10").ToUInt64(), Is.EqualTo(62)); - Assert.That(Uuid64.Parse("zz").ToUInt64(), Is.EqualTo(3843)); - Assert.That(Uuid64.Parse("100").ToUInt64(), Is.EqualTo(3844)); - Assert.That(Uuid64.Parse("zzzzzzzzzz").ToUInt64(), Is.EqualTo(839299365868340223UL)); - Assert.That(Uuid64.Parse("10000000000").ToUInt64(), Is.EqualTo(839299365868340224UL)); - Assert.That(Uuid64.Parse("LygHa16AHYF").ToUInt64(), Is.EqualTo(ulong.MaxValue), "ulong.MaxValue in base 62"); - - // well known values - - Assert.That(Uuid64.Parse("narf").ToUInt64(), Is.EqualTo(0xB45B07)); - Assert.That(Uuid64.Parse("zort").ToUInt64(), Is.EqualTo(0xE0D0ED)); - Assert.That(Uuid64.Parse("44pZgF").ToUInt64(), Is.EqualTo(0xDEADBEEF)); - Assert.That(Uuid64.Parse("0000044pZgF").ToUInt64(), Is.EqualTo(0xDEADBEEF)); - - Assert.That(Uuid64.Parse("G2eGAUq82Hd").ToUInt64(), Is.EqualTo(0xBADC0FFEE0DDF00DUL)); - - Assert.That(Uuid64.Parse("4gfFC3").ToUInt64(), Is.EqualTo(uint.MaxValue)); - Assert.That(Uuid64.Parse("000004gfFC3").ToUInt64(), Is.EqualTo(uint.MaxValue)); - - - // invalid chars - Assert.That(() => Uuid64.Parse("/"), Throws.InstanceOf()); - Assert.That(() => Uuid64.Parse("@"), Throws.InstanceOf()); - Assert.That(() => Uuid64.Parse("["), Throws.InstanceOf()); - Assert.That(() => Uuid64.Parse("`"), Throws.InstanceOf()); - Assert.That(() => Uuid64.Parse("{"), Throws.InstanceOf()); - Assert.That(() => Uuid64.Parse("zaz/"), Throws.InstanceOf()); - Assert.That(() => Uuid64.Parse("z/o&r=g"), Throws.InstanceOf()); - - // overflow - Assert.That(() => Uuid64.Parse("zzzzzzzzzzz"), Throws.InstanceOf(), "62^11 - 1 => OVERFLOW"); - Assert.That(() => Uuid64.Parse("LygHa16AHYG"), Throws.InstanceOf(), "ulong.MaxValue + 1 => OVERFLOW"); - - // invalid length - Assert.That(() => Uuid64.Parse(null), Throws.InstanceOf()); - Assert.That(() => Uuid64.Parse(""), Throws.InstanceOf()); - Assert.That(() => Uuid64.Parse("100000000000"), Throws.InstanceOf(), "62^11 => TOO BIG"); - - } - - [Test] - public void Test_Uuid64_NewUid() - { - var a = Uuid64.NewUuid(); - var b = Uuid64.NewUuid(); - Assert.That(a.ToUInt64(), Is.Not.EqualTo(b.ToUInt64())); - Assert.That(a, Is.Not.EqualTo(b)); - - const int N = 1 * 1000; - var uids = new HashSet(); - for (int i = 0; i < N; i++) - { - var uid = Uuid64.NewUuid(); - if (uids.Contains(uid.ToUInt64())) Assert.Fail("Duplicate Uuid64 generated: {0}", uid); - uids.Add(uid.ToUInt64()); - } - Assert.That(uids.Count, Is.EqualTo(N)); - } - - [Test] - public void Test_Uuid64RangomGenerator_NewUid() - { - var gen = Uuid64RandomGenerator.Default; - Assert.That(gen, Is.Not.Null); - - var a = gen.NewUuid(); - var b = gen.NewUuid(); - Assert.That(a.ToUInt64(), Is.Not.EqualTo(b.ToUInt64())); - Assert.That(a, Is.Not.EqualTo(b)); - - const int N = 1 * 1000; - var uids = new HashSet(); - for (int i = 0; i < N; i++) - { - var uid = gen.NewUuid(); - if (uids.Contains(uid.ToUInt64())) Assert.Fail("Duplicate Uuid64 generated: {0}", uid); - uids.Add(uid.ToUInt64()); - } - Assert.That(uids.Count, Is.EqualTo(N)); - } - - [Test] - public void Test_Uuid64_Equality_Check() - { - var a = new Uuid64(42); - var b = new Uuid64(42); - var c = new Uuid64(40) + 2; - var d = new Uuid64(0xDEADBEEF); - - // Equals(Uuid64) - Assert.That(a.Equals(a), Is.True, "a == a"); - Assert.That(a.Equals(b), Is.True, "a == b"); - Assert.That(a.Equals(c), Is.True, "a == c"); - Assert.That(a.Equals(d), Is.False, "a != d"); - - // == Uuid64 - Assert.That(a == b, Is.True, "a == b"); - Assert.That(a == c, Is.True, "a == c"); - Assert.That(a == d, Is.False, "a != d"); - - // != Uuid64 - Assert.That(a != b, Is.False, "a == b"); - Assert.That(a != c, Is.False, "a == c"); - Assert.That(a != d, Is.True, "a != d"); - - // == numbers - Assert.That(a == 42L, Is.True, "a == 42"); - Assert.That(a == 42UL, Is.True, "a == 42"); - Assert.That(d == 42L, Is.False, "d != 42"); - Assert.That(d == 42UL, Is.False, "d != 42"); - - // != numbers - Assert.That(a != 42L, Is.False, "a == 42"); - Assert.That(a != 42UL, Is.False, "a == 42"); - Assert.That(d != 42L, Is.True, "d != 42"); - Assert.That(d != 42UL, Is.True, "d != 42"); - - // Equals(objecct) - Assert.That(a.Equals((object)a), Is.True, "a == a"); - Assert.That(a.Equals((object)b), Is.True, "a == b"); - Assert.That(a.Equals((object)c), Is.True, "a == c"); - Assert.That(a.Equals((object)d), Is.False, "a != d"); - Assert.That(a.Equals((object)42L), Is.True, "a == 42"); - Assert.That(a.Equals((object)42UL), Is.True, "a == 42"); - Assert.That(d.Equals((object)42L), Is.False, "d != 42"); - Assert.That(d.Equals((object)42UL), Is.False, "d != 42"); - - } - - [Test] - public void Test_Uuid64_Ordering() - { - var a = new Uuid64(42); - var a2 = new Uuid64(42); - var b = new Uuid64(77); - - Assert.That(a.CompareTo(a), Is.EqualTo(0)); - Assert.That(a.CompareTo(b), Is.EqualTo(-1)); - Assert.That(b.CompareTo(a), Is.EqualTo(+1)); - - Assert.That(a < b, Is.True, "a < b"); - Assert.That(a <= b, Is.True, "a <= b"); - Assert.That(a < a2, Is.False, "a < a"); - Assert.That(a <= a2, Is.True, "a <= a"); - - Assert.That(a > b, Is.False, "a > b"); - Assert.That(a >= b, Is.False, "a >= b"); - Assert.That(a > a2, Is.False, "a > a"); - Assert.That(a >= a2, Is.True, "a >= a"); - - // parsed from string - Assert.That(new Uuid64("137bcf31-0c8873a2") < new Uuid64("604bdf8a-2512b4ad"), Is.True); - Assert.That(new Uuid64("d8f17a26-82adb1a4") < new Uuid64("22abbf33-1b2c1db0"), Is.False); - Assert.That(new Uuid64("{137bcf31-0c8873a2}") > new Uuid64("{604bdf8a-2512b4ad}"), Is.False); - Assert.That(new Uuid64("{d8f17a26-82adb1a4}") > new Uuid64("{22abbf33-1b2c1db0}"), Is.True); - Assert.That(new Uuid64("2w6CTjUiXVp") < new Uuid64("DVM0UnynZ1Q"), Is.True); - Assert.That(new Uuid64("0658JY2ORSJ") > new Uuid64("FMPaNaMEUWc"), Is.False); - - // verify byte ordering - var c = new Uuid64(0x0000000100000002); - var d = new Uuid64(0x0000000200000001); - Assert.That(c.CompareTo(d), Is.EqualTo(-1)); - Assert.That(d.CompareTo(c), Is.EqualTo(+1)); - - // verify that we can sort an array of Uuid64 - var uids = new Uuid64[100]; - for (int i = 0; i < uids.Length; i++) - { - uids[i] = Uuid64.NewUuid(); - } - Assume.That(uids, Is.Not.Ordered, "This can happen with a very small probability. Please try again"); - Array.Sort(uids); - Assert.That(uids, Is.Ordered); - - // ordering should be preserved in integer or textual form - - Assert.That(uids.Select(x => x.ToUInt64()), Is.Ordered, "order should be preserved when ordering by unsigned value"); - //note: ToInt64() will not work because of negative values - Assert.That(uids.Select(x => x.ToString()), Is.Ordered.Using(StringComparer.Ordinal), "order should be preserved when ordering by text (hexa)"); - Assert.That(uids.Select(x => x.ToString("Z")), Is.Ordered.Using(StringComparer.Ordinal), "order should be preserved when ordering by text (base62)"); - //note: ToString("C") will not work for ordering because it will produce "z" > "aa", instead of expected "0z" < "aa" - } - - [Test] - public void Test_Uuid64_Arithmetic() - { - var uid = Uuid64.Empty; - - Assert.That(uid + 42L, Is.EqualTo(new Uuid64(42))); - Assert.That(uid + 42UL, Is.EqualTo(new Uuid64(42))); - uid++; - Assert.That(uid.ToInt64(), Is.EqualTo(1)); - uid++; - Assert.That(uid.ToInt64(), Is.EqualTo(2)); - uid--; - Assert.That(uid.ToInt64(), Is.EqualTo(1)); - uid--; - Assert.That(uid.ToInt64(), Is.EqualTo(0)); - - uid = Uuid64.NewUuid(); - - Assert.That(uid + 123L, Is.EqualTo(new Uuid64(uid.ToInt64() + 123))); - Assert.That(uid + 123UL, Is.EqualTo(new Uuid64(uid.ToUInt64() + 123))); - - Assert.That(uid - 123L, Is.EqualTo(new Uuid64(uid.ToInt64() - 123))); - Assert.That(uid - 123UL, Is.EqualTo(new Uuid64(uid.ToUInt64() - 123))); - } - - } - -} diff --git a/FoundationDB.Tests/VersionStampFacts.cs b/FoundationDB.Tests/VersionStampFacts.cs new file mode 100644 index 000000000..2e190ecf4 --- /dev/null +++ b/FoundationDB.Tests/VersionStampFacts.cs @@ -0,0 +1,201 @@ +#region BSD Licence +/* Copyright (c) 2013-2018, Doxense SAS +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Doxense nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +#endregion + +namespace FoundationDB.Client.Tests +{ + using System; + using Doxense.Memory; + using NUnit.Framework; + + [TestFixture] + public class VersionStampFacts : FdbTest + { + + [Test] + public void Test_Incomplete_VersionStamp() + { + { // 80-bits (no user version) + var vs = VersionStamp.Incomplete(); + Log(vs); + Assert.That(vs.TransactionVersion, Is.EqualTo(ulong.MaxValue)); + Assert.That(vs.TransactionOrder, Is.EqualTo(ushort.MaxValue)); + Assert.That(vs.IsIncomplete, Is.True); + Assert.That(vs.HasUserVersion, Is.False, "80-bits VersionStamps don't have a user version"); + Assert.That(vs.UserVersion, Is.Zero, "80-bits VersionStamps don't have a user version"); + + Assert.That(vs.GetLength(), Is.EqualTo(10)); + Assert.That(vs.ToSlice().ToHexaString(' '), Is.EqualTo("FF FF FF FF FF FF FF FF FF FF")); + Assert.That(vs.ToString(), Is.EqualTo("@?")); + } + + { // 96-bits, default user version + var vs = VersionStamp.Incomplete(0); + Log(vs); + Assert.That(vs.TransactionVersion, Is.EqualTo(ulong.MaxValue)); + Assert.That(vs.TransactionOrder, Is.EqualTo(ushort.MaxValue)); + Assert.That(vs.IsIncomplete, Is.True); + Assert.That(vs.HasUserVersion, Is.True, "96-bits VersionStamps have a user version"); + Assert.That(vs.UserVersion, Is.EqualTo(0)); + + Assert.That(vs.GetLength(), Is.EqualTo(12)); + Assert.That(vs.ToSlice().ToHexaString(' '), Is.EqualTo("FF FF FF FF FF FF FF FF FF FF 00 00")); + Assert.That(vs.ToString(), Is.EqualTo("@?#0")); + } + + { // 96 bits, custom user version + var vs = VersionStamp.Incomplete(123); + Log(vs); + Assert.That(vs.TransactionVersion, Is.EqualTo(ulong.MaxValue)); + Assert.That(vs.TransactionOrder, Is.EqualTo(ushort.MaxValue)); + Assert.That(vs.HasUserVersion, Is.True); + Assert.That(vs.UserVersion, Is.EqualTo(123)); + Assert.That(vs.IsIncomplete, Is.True); + Assert.That(vs.ToSlice().ToHexaString(' '), Is.EqualTo("FF FF FF FF FF FF FF FF FF FF 00 7B")); + Assert.That(vs.ToString(), Is.EqualTo("@?#123")); + } + + { // 96 bits, large user version + var vs = VersionStamp.Incomplete(12345); + Log(vs); + Assert.That(vs.TransactionVersion, Is.EqualTo(ulong.MaxValue)); + Assert.That(vs.TransactionOrder, Is.EqualTo(ushort.MaxValue)); + Assert.That(vs.HasUserVersion, Is.True); + Assert.That(vs.UserVersion, Is.EqualTo(12345)); + Assert.That(vs.IsIncomplete, Is.True); + Assert.That(vs.ToSlice().ToHexaString(' '), Is.EqualTo("FF FF FF FF FF FF FF FF FF FF 30 39")); + Assert.That(vs.ToString(), Is.EqualTo("@?#12345")); + } + + Assert.That(() => VersionStamp.Incomplete(-1), Throws.ArgumentException, "User version cannot be negative"); + Assert.That(() => VersionStamp.Incomplete(65536), Throws.ArgumentException, "User version cannot be larger than 0xFFFF"); + + { + var writer = default(SliceWriter); + writer.WriteFixed24BE(0xAAAAAA); + VersionStamp.Incomplete(123).WriteTo(ref writer); + writer.WriteFixed24BE(0xAAAAAA); + Assert.That(writer.ToSlice().ToHexaString(' '), Is.EqualTo("AA AA AA FF FF FF FF FF FF FF FF FF FF 00 7B AA AA AA")); + + var reader = new SliceReader(writer.ToSlice()); + reader.Skip(3); + var vs = VersionStamp.Parse(reader.ReadBytes(12)); + Assert.That(reader.Remaining, Is.EqualTo(3)); + + Assert.That(vs.TransactionVersion, Is.EqualTo(ulong.MaxValue)); + Assert.That(vs.TransactionOrder, Is.EqualTo(ushort.MaxValue)); + Assert.That(vs.UserVersion, Is.EqualTo(123)); + Assert.That(vs.IsIncomplete, Is.True); + } + + { + var buf = Slice.Repeat(0xAA, 18); + VersionStamp.Incomplete(123).WriteTo(buf.Substring(3, 12)); + Assert.That(buf.ToHexaString(' '), Is.EqualTo("AA AA AA FF FF FF FF FF FF FF FF FF FF 00 7B AA AA AA")); + } + } + + [Test] + public void Test_Complete_VersionStamp() + { + { // 80-bits, no user version + var vs = VersionStamp.Complete(0x0123456789ABCDEFUL, 123); + Log(vs); + Assert.That(vs.TransactionVersion, Is.EqualTo(0x0123456789ABCDEFUL)); + Assert.That(vs.TransactionOrder, Is.EqualTo(123)); + Assert.That(vs.HasUserVersion, Is.False); + Assert.That(vs.UserVersion, Is.Zero); + Assert.That(vs.IsIncomplete, Is.False); + Assert.That(vs.ToSlice().ToHexaString(' '), Is.EqualTo("01 23 45 67 89 AB CD EF 00 7B")); + Assert.That(vs.ToString(), Is.EqualTo("@81985529216486895-123")); + } + + { // 96 bits, default user version + var vs = VersionStamp.Complete(0x0123456789ABCDEFUL, 123, 0); + Log(vs); + Assert.That(vs.TransactionVersion, Is.EqualTo(0x0123456789ABCDEFUL)); + Assert.That(vs.TransactionOrder, Is.EqualTo(123)); + Assert.That(vs.HasUserVersion, Is.True); + Assert.That(vs.UserVersion, Is.Zero); + Assert.That(vs.IsIncomplete, Is.False); + Assert.That(vs.ToSlice().ToHexaString(' '), Is.EqualTo("01 23 45 67 89 AB CD EF 00 7B 00 00")); + Assert.That(vs.ToString(), Is.EqualTo("@81985529216486895-123#0")); + } + + { // custom user version + var vs = VersionStamp.Complete(0x0123456789ABCDEFUL, 123, 456); + Log(vs); + Assert.That(vs.TransactionVersion, Is.EqualTo(0x0123456789ABCDEFUL)); + Assert.That(vs.TransactionOrder, Is.EqualTo(123)); + Assert.That(vs.HasUserVersion, Is.True); + Assert.That(vs.UserVersion, Is.EqualTo(456)); + Assert.That(vs.IsIncomplete, Is.False); + Assert.That(vs.ToSlice().ToHexaString(' '), Is.EqualTo("01 23 45 67 89 AB CD EF 00 7B 01 C8")); + Assert.That(vs.ToString(), Is.EqualTo("@81985529216486895-123#456")); + } + + { // two bytes user version + var vs = VersionStamp.Complete(0x0123456789ABCDEFUL, 12345, 6789); + Log(vs); + Assert.That(vs.TransactionVersion, Is.EqualTo(0x0123456789ABCDEFUL)); + Assert.That(vs.TransactionOrder, Is.EqualTo(12345)); + Assert.That(vs.UserVersion, Is.EqualTo(6789)); + Assert.That(vs.IsIncomplete, Is.False); + Assert.That(vs.ToSlice().ToHexaString(' '), Is.EqualTo("01 23 45 67 89 AB CD EF 30 39 1A 85")); + Assert.That(vs.ToString(), Is.EqualTo("@81985529216486895-12345#6789")); + } + + Assert.That(() => VersionStamp.Complete(0x0123456789ABCDEFUL, 0, -1), Throws.ArgumentException, "User version cannot be negative"); + Assert.That(() => VersionStamp.Complete(0x0123456789ABCDEFUL, 0, 65536), Throws.ArgumentException, "User version cannot be larger than 0xFFFF"); + + { + var writer = default(SliceWriter); + writer.WriteFixed24BE(0xAAAAAA); + VersionStamp.Complete(0x0123456789ABCDEFUL, 123, 456).WriteTo(ref writer); + writer.WriteFixed24BE(0xAAAAAA); + Assert.That(writer.ToSlice().ToHexaString(' '), Is.EqualTo("AA AA AA 01 23 45 67 89 AB CD EF 00 7B 01 C8 AA AA AA")); + + var reader = new SliceReader(writer.ToSlice()); + reader.Skip(3); + var vs = VersionStamp.Parse(reader.ReadBytes(12)); + Assert.That(reader.Remaining, Is.EqualTo(3)); + + Assert.That(vs.TransactionVersion, Is.EqualTo(0x0123456789ABCDEFUL)); + Assert.That(vs.TransactionOrder, Is.EqualTo(123)); + Assert.That(vs.UserVersion, Is.EqualTo(456)); + Assert.That(vs.IsIncomplete, Is.False); + } + + { + var buf = Slice.Repeat(0xAA, 18); + VersionStamp.Complete(0x0123456789ABCDEFUL, 123, 456).WriteTo(buf.Substring(3, 12)); + Assert.That(buf.ToHexaString(' '), Is.EqualTo("AA AA AA 01 23 45 67 89 AB CD EF 00 7B 01 C8 AA AA AA")); + } + } + + } +} diff --git a/FoundationDB.Tests/packages.config b/FoundationDB.Tests/packages.config index bbae1a323..147974643 100644 --- a/FoundationDB.Tests/packages.config +++ b/FoundationDB.Tests/packages.config @@ -1,7 +1,7 @@  - - - - + + + + \ No newline at end of file diff --git a/FoundationDb.Client.sln.DotSettings b/FoundationDb.Client.sln.DotSettings index 47110b55c..778ade89c 100644 --- a/FoundationDb.Client.sln.DotSettings +++ b/FoundationDb.Client.sln.DotSettings @@ -38,6 +38,7 @@ ALWAYS_ADD 1 True + NEVER False False True @@ -50,19 +51,31 @@ False CHOP_ALWAYS CHOP_IF_LONG + True System System + False + True UseVarWhenEvident UseVarWhenEvident BE + TLS <Policy Inspect="True" Prefix="" Suffix="" Style="AaBb"><ExtraRule Prefix="" Suffix="" Style="AA_BB" /></Policy> <Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /> <Policy Inspect="True" Prefix="" Suffix="" Style="aaBb"><ExtraRule Prefix="_" Suffix="" Style="aaBb" /></Policy> <Policy Inspect="True" Prefix="" Suffix="" Style="aaBb"><ExtraRule Prefix="_" Suffix="" Style="aaBb" /></Policy> <Policy Inspect="True" Prefix="" Suffix="" Style="AaBb"><ExtraRule Prefix="" Suffix="" Style="AA_BB" /></Policy> - <Policy Inspect="True" Prefix="m_" Suffix="" Style="aaBb" /> - <Policy Inspect="True" Prefix="s_" Suffix="" Style="aaBb" /> + <Policy Inspect="True" Prefix="m_" Suffix="" Style="aaBb"><ExtraRule Prefix="" Suffix="" Style="AaBb" /></Policy> + <Policy Inspect="True" Prefix="s_" Suffix="" Style="aaBb"><ExtraRule Prefix="" Suffix="" Style="AaBb" /></Policy> <Policy Inspect="True" Prefix="s_" Suffix="" Style="aaBb"><ExtraRule Prefix="" Suffix="" Style="AaBb" /></Policy> <Policy Inspect="True" Prefix="" Suffix="" Style="AaBb"><ExtraRule Prefix="m_" Suffix="" Style="aaBb" /></Policy> + True + True + True + True + True + True True - True \ No newline at end of file + True + True + False \ No newline at end of file diff --git a/LICENSE.md b/LICENSE.md index 2a92e2397..40d8b56cd 100644 --- a/LICENSE.md +++ b/LICENSE.md @@ -1,4 +1,4 @@ -Copyright (c) 2013-2014, Doxense SAS +Copyright (c) 2013-2018, Doxense SAS All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/README.md b/README.md index ee9775c6c..e011b8047 100644 --- a/README.md +++ b/README.md @@ -1,115 +1,61 @@ FoundationDB.Net Client ======================= -__Important Notice__ - -> As you may or may not know, the FoundationDB K/V Store engine is no longer available for download, following the acquisition of FoundationDB LLC by Apple. This means that, even though the .NET binding itself is open source, it is not usable without access to a license for the underlying database engine. Given these circumstances, this means that this project is no longer under active development, and will now transition into a maintenance state. - -> If you are actively using the .NET FoundationDB Binding in your application, and are in a situation where you have an existing infrastructure to support, please know that this repository will stay up as long as is needed, and there are currently no plans to close it down. If you want to be safe, please feel free to clone or fork it! We will try to help you fix any bug or issues, as is humanely possible. Since the license of the .NET Binding is open source (3-clause BSD), we encourage you to fork it and use it in anyway you think is necessary to help you transition to something else. In any case, we urge you to contact people at FoundationDB to help you on that (according to the message on the Community Forum, you should get in touch with info@foundationdb.com) - -> If you were looking for a new database solution for a new project, you'll have to keep looking. Good luck, because FoundationDB was probably the best in its class. - -> Some parts of this project could probably be lifted up and used or recycled in some shape or form into other projects (ex: Tuples, Slice, AsyncLINQ, ...). Please get in touch if you are interested in this! - -> I'd like to thank everyone who participated in this project, and hope that you all will be able to land back on your feet! It was a fun ride and an invaluable experience. Merci à vous! - -__Q&A / tl;dr__ - -What is the situation regarding FoundationDB (the database)? -> Apple acquired the company that makes FoundationDB. It does not seem to be available for download anymore. That's all we know. Please contact info@foundationdb.com if you want more details. - -What is this project exactly? -> This project was a .NET wrapper for the underlying native Client API for the FoundationDB Key/Value Store, as well a some infrastructure to make it easier to work with key/value stores using C#. The wrapper itself is opensource, but it needs a native DLL (fdb_c.dll) to talk to the database, which was licensed by FoundationDB LLC and was closed source. - -Where can I find the SQL layer or parsers? -> This project only dealt specifically with the Key/Value Store engine, and did not have any link with either the SQL Layer or the SQL parser. We don't have any knowledge on the fate of these two projects. - -Is this project under active development? -> Not anymore. Bugs will be fixed but don't expect any major activity. - -Will this repository go away like the other ones? -> No plans for that. If you want to make sure, please clone or fork it! - -I'm using it in production, what gives? -> Please contact the folks at info@foundationdb.com ASAP! - -Then, what can I use to replace FoundationDB in my application? -> If you find something as good as FDB, please let us know :) - -What attributes should I look for, if I want something similar? -> You'll need an ordered key/value store, with multi-key transaction support, and ACID guarantees. Most of the Layers are designed to work with an underlying K/V store that offers strong ACID guarantees, and will probably not work with other K/V store that don't offer these. If we find something that works as a replacement, we'll make sure to update this notice. For know, you should maybe look at PostgreSQL. - -I have a bug, can you help us? -> Please open an Issue and if this is at the .NET binding level, we will try to help you if we can! - -Are there some interesting bits in this project worth looking at? -> Yes! Please take a look at the Tuple, Slice and FdbAsyncEnumerable classes! This was a fun project to make C# work well with an API mostly designed with dynamic languages (like Python or node.js) in mind. There is also the Memory Stoage project which was an attempt to have an in-memory version of the K/V store (for unit tests or embedded scenarios). - -__End of Important Notice__ - This code is licensed under the 3-clause BSD Licence. [![Build status](https://ci.appveyor.com/api/projects/status/83u4pd2ckevdtb57?svg=true)](https://ci.appveyor.com/project/KrzysFR/foundationdb-dotnet-client) -> __There is an incoming API change that may break a few things, regarding the use of Subspaces and Tuples__. The new API is currently in the `refac_tuples` branch, and will be merge soon in `master` for the 0.9 release. If you are just starting, you should probably use the new branch. If you already have existing code, there will be a document explaining the changes and how to fix your source code. __See [#42](https://github.com/Doxense/foundationdb-dotnet-client/pull/42) for more details.__ - -> __There is another incoming change, regarding the way FDB's Futures are implemented under the hood, which should fix some random crash issues under heavy load__. Some of the problems have already been fixed in master, but the new branch `future_ng` is a complete rework, in collaboration with the folks at FoundationDB. __See [#54](https://github.com/Doxense/foundationdb-dotnet-client/pull/54) for more details__ - -> Both these branches are already used in production. Once everything is merged and tested, we will be ready for 1.0. - -> The default API level selected on start is still 200 by default, but this will probably change to 300 by default for 1.0. If you are not doing it already, you should make sure to use `Fdb.UseApiVersion(200)` (or 300) if you want to lock your application to a specific level and be safe against future changes. - How to use ---------- ```CSharp -// note: most operations require a valid CancellationToken, which you need to provide -CancellationToken token = ....; // host-provided cancellation token +// note: most operations require a valid CancellationToken, which you need to obtain from the context (HTTP request, component lifetime, timeout, ...) +CancellationToken cancel = ....; // Connect to the db "DB" using the default cluster file using (var db = await Fdb.OpenAsync()) { // we will use a "Test" directory to isolate our test data - var location = await db.Directory.CreateOrOpenAsync("Test", token); - // this location will remember the allocated prefix, and - // automatically add it as a prefix to all our keys + var location = await db.Directory.CreateOrOpenAsync("Test", cancel); + // this location will remember the allocated prefix, and + // automatically add it as a prefix to all our keys // we need a transaction to be able to make changes to the db - // note: production code should use "db.WriteAsync(..., token)" instead - using (var trans = db.BeginTransaction(token)) + // note: production code should use "db.WriteAsync(..., cancel)" instead + using (var trans = db.BeginTransaction(cancel)) { - // For our convenience, we will use the Tuple Encoding format for our keys, - // which is accessible via the "location.Tuples" helper. We could have used - // any other encoding for the keys. Tuples are simple to use and have some - // intereseting ordering properties that make it easy to work with. - // => All our keys will be encoded as the packed tuple ({Test}, "foo"), - // making them very nice and compact. We could also use integers or GUIDs - // for the keys themselves. - + // For our convenience, we will use the Tuple Encoding format for our keys, + // which is accessible via the "location.Keys" helper. We could have used + // any other encoding for the keys. Tuples are simple to use and have some + // intereseting ordering properties that make it easy to work with. + // => All our keys will be encoded as the packed tuple ({Test}, "foo"), + // making them very nice and compact. We could also use integers or GUIDs + // for the keys themselves. + // Set "Hello" key to "World" - trans.Set( - location.Tuples.EncodeKey("Hello"), - Slice.FromString("World") // UTF-8 encoded string - ); + trans.Set( + location.Keys.Encode("Hello"), + Slice.FromString("World") // UTF-8 encoded string + ); // Set "Count" key to 42 trans.Set( - location.Tuples.EncodeKey("Count"), - Slice.FromInt32(42) // 1 byte - ); + location.Keys.Encode("Count"), + Slice.FromInt32(42) // 1 byte + ); // Atomically add 123 to "Total" trans.AtomicAdd( - location.Tuples.EncodeKey("Total"), - Slice.FromFixed32(123) // 4 bytes, Little Endian - ); + location.Keys.Encode("Total"), + Slice.FromFixed32(123) // 4 bytes, Little Endian + ); // Set bits 3, 9 and 30 in the bit map stored in the key "Bitmap" trans.AtomicOr( - location.Tuples.EncodeKey("Bitmap"), - Slice.FromFixed32((1 << 3) | (1 << 9) | (1 << 30)) // 4 bytes, Little Endian - ); + location.Keys.Encode("Bitmap"), + Slice.FromFixed32((1 << 3) | (1 << 9) | (1 << 30)) // 4 bytes, Little Endian + ); // commit the changes to the db await trans.CommitAsync(); @@ -118,20 +64,20 @@ using (var db = await Fdb.OpenAsync()) } // we also need a transaction to read from the db - // note: production code should use "db.ReadAsync(..., token)" instead. - using (var trans = db.BeginReadOnlyTransaction(token)) - { + // note: production code should use "db.ReadAsync(..., cancel)" instead. + using (var trans = db.BeginReadOnlyTransaction(cancel)) + { // Read ("Test", "Hello", ) as a string - Slice value = await trans.GetAsync(location.Tuples.EncodeKey("Hello")); + Slice value = await trans.GetAsync(location.Keys.Encode("Hello")); Console.WriteLine(value.ToUnicode()); // -> World // Read ("Test", "Count", ) as an int - value = await trans.GetAsync(location.Tuples.EncodeKey("Count")); + value = await trans.GetAsync(location.Keys.Encode("Count")); Console.WriteLine(value.ToInt32()); // -> 42 - + // missing keys give a result of Slice.Nil, which is the equivalent // of "key not found". - value = await trans.GetAsync(location.Tuples.EncodeKey("NotFound")); + value = await trans.GetAsync(location.Keys.Encode("NotFound")); Console.WriteLine(value.HasValue); // -> false Console.WriteLine(value == Slice.Nil); // -> true // note: there is also Slice.Empty that is returned for existing keys @@ -146,31 +92,31 @@ using (var db = await Fdb.OpenAsync()) // First we will create a subdirectory for our little array, // just so that is does not interfere with other things in the cluster. - var list = await location.CreateOrOpenAsync(db, "List", token); + var list = await location.CreateOrOpenAsync(db, "List", cancel); - // here we will use db.WriteAsync(...) that implements a retry loop. - // this helps protect you against intermitent failures by automatically - // retrying the lambda method you provided. - await db.WriteAsync((trans) => - { + // here we will use db.WriteAsync(...) that implements a retry loop. + // this helps protect you against intermitent failures by automatically + // retrying the lambda method you provided. + await db.WriteAsync((trans) => + { // add some data to the list with the format: (..., index) = value - trans.Set(list.Tuples.EncodeKey(0), Slice.FromString("AAA")); - trans.Set(list.Tuples.EncodeKey(1), Slice.FromString("BBB")); - trans.Set(list.Tuples.EncodeKey(2), Slice.FromString("CCC")); + trans.Set(list.Keys.Encode(0), Slice.FromString("AAA")); + trans.Set(list.Keys.Encode(1), Slice.FromString("BBB")); + trans.Set(list.Keys.Encode(2), Slice.FromString("CCC")); // The actual keys will be a concatenation of the prefix of 'list', // and a packed tuple containing the index. Since we are using the // Directory Layer, this should still be fairly small (between 4 // and 5 bytes). The values are raw slices, which means that your // application MUST KNOW that they are strings in order to decode // them. If you wan't any tool to be able to find out the type of - // your values, you can also use FdbTuple.Pack("AAA") to create + // your values, you can also use TuPack.EncodeKey("AAA") to create // the values, at the cost of 2 extra bytes per entry. // This is always a good idea to maintain a counter of keys in our array. // The cheapest way to do that, is to reuse the subspace key itself, which // is 'in' the subspace, but not 'inside': trans.Set(list.Key, Slice.FromFixed32(3)); - // We could use FdbTuple.Pack(3) here, but have a fixed size counter + // We could use TuPack.EncodeKey(3) here, but have a fixed size counter // makes it easy to use AtomicAdd(...) to increment (or decrement) the value // when adding or removing entries in the array. @@ -183,8 +129,8 @@ using (var db = await Fdb.OpenAsync()) // If something goes wrong with the database, this lambda will be called again, // until the problems goes away, or the retry loop decides that there is no point // in retrying anymore, and the exception will be re-thrown. - - }, token); // don't forget the cancellation token, which can stop the retry loop ! + + }, cancel); // don't forget the CancellationToken, which can stop the retry loop ! // We can read everything back in one shot, using an async "LINQ" query. var results = await db.QueryAsync((trans) => @@ -192,17 +138,17 @@ using (var db = await Fdb.OpenAsync()) // do a range query on the list subspace, which should return all the pairs // in the subspace, one for each entry in the array. // We exploit the fact that subspace.Tuples.ToRange() usually does not include - // the subspace prefix itself, because we don't want our counter to be returned + // the subspace prefix itself, because we don't want our counter to be returned // with the query itself. return trans // ask for all keys that are _inside_ our subspace - .GetRange(list.Tuples.ToRange()) + .GetRange(list.Keys.ToRange()) // transform the resultoing KeyValuePair into something // nicer to use, like a typed KeyValuePair .Select((kvp) => new KeyValuePair( // unpack the tuple and returns the last item as an int - list.Tuples.DecodeLast(kvp.Key), + list.Keys.DecodeLast(kvp.Key), // convert the value into an unicode string kvp.Value.ToUnicode() )) @@ -211,11 +157,11 @@ using (var db = await Fdb.OpenAsync()) // fetch ALL the values from the db! .Where((kvp) => kvp.Key % 2 == 0); - // note that QueryAsync() is a shortcut for calling ReadAsync(...) and then - // calling ToListAsync() on the async LINQ Query. If you want to call a - // different operator than ToListAsync(), just use ReadAsync() + // note that QueryAsync() is a shortcut for calling ReadAsync(...) and then + // calling ToListAsync() on the async LINQ Query. If you want to call a + // different operator than ToListAsync(), just use ReadAsync() - }, token); + }, cancel); // results.Count -> 2 // results[0] -> KeyValuePair(0, "AAA") @@ -234,7 +180,7 @@ Please note that the above sample is ok for a simple HelloWorld.exe app, but for - You should NOT open a new connection (`Fdb.OpenAsync()`) everytime you need to read or write something. You should open a single database instance somewhere in your startup code, and use that instance everywhere. If you are using a Repository pattern, you can store the IFdbDatabase instance there. Another option is to use a Dependency Injection framework -- You should probably not create and transactions yourself (`db.CreateTransaction()`), and instead prefer using the standard retry loops implemented by `db.ReadAsync(...)`, `db.WriteAsync(...)` and `db.ReadWriteAsync(...)` which will handle all the gory details for you. They will ensure that your transactions are retried in case of conflicts or transient errors. See https://foundationdb.com/key-value-store/documentation/developer-guide.html#conflict-ranges +- You should probably not create and transactions yourself (`db.CreateTransaction()`), and instead prefer using the standard retry loops implemented by `db.ReadAsync(...)`, `db.WriteAsync(...)` and `db.ReadWriteAsync(...)` which will handle all the gory details for you. They will ensure that your transactions are retried in case of conflicts or transient errors. See https://apple.github.io/foundationdb/developer-guide.html#conflict-ranges - Use the `Tuple Layer` to encode and decode your keys, if possible. This will give you a better experience overall, since all the logging filters and key formatters will try to decode tuples by default, and display `(42, "hello", true)` instead of the cryptic `<15>*<02>hello<00><15><01>`. For simple values like strings (ex: JSON text) or 32-bit/64-bit numbers, you can also use `Slice.FromString(...)`, or `Slice.FromInt32(...)`. For composite values, you can also use the Tuple encoding, if the elements types are simple (string, numbers, dates, ...). You can also use custom encoders via the `IKeyEncoder` and `IValueEncoder`, which you can get from the helper class `KeyValueEncoders`, or roll your own by implementing these interfaces. @@ -242,27 +188,23 @@ Please note that the above sample is ok for a simple HelloWorld.exe app, but for - You should NEVER block on Tasks by using .Wait() from non-async code. This will either dead-lock your application, or greatly degrade the performances. If you cannot do otherwise (ex: top-level call in a `void Main()` then at least wrap your code inside a `static async Task MainAsync(string[] args)` method, and do a `MainAsync(args).GetAwaiter().GetResult()`. -- Don't give in, and resist the tentation of passing `CancellationToken.None` everywhere! Try to obtain a valid `CancellationToken` from your execution context (HTTP host, Task Worker environment, ...). This will allow the environment to safely shutdown and abort all pending transactions, without any risks of data corruption. If you don't have any easy source (like in a unit test framework), then at list provide you own using a global `CancellationTokenSource` that you can `Cancel()` in your shutdown code path. From inside your transactional code, you can get back the token anytime via the `tr.Cancellation` property which will trigger if the transaction completes or is aborted. +- Don't give in, and resist the tenmptation of passing `CancellationToken.None` everywhere! Try to obtain a valid `CancellationToken` from your execution context (HTTP host, Task Worker environment, ...). This will allow the environment to safely shutdown and abort all pending transactions, without any risks of data corruption. If you don't have any easy source (like in a unit test framework), then at list provide you own using a global `CancellationTokenSource` that you can `Cancel()` in your shutdown code path. From inside your transactional code, you can get back the token anytime via the `tr.Cancellation` property which will trigger if the transaction completes or is aborted. How to build ------------ ### Visual Studio Solution -You will need Visual Studio .NET 2012 or 2013 and .NET 4.5 minimum to compile the solution. +You will need Visual Studio 2017 version 15.5 or above to build the solution (C# 7.2 and .NET Standard 2.0 support is required). -You will also need to obtain the 'fdb_c.dll' C API binding from the foundationdb.com wesite, by installing the client SDK: +You will also need to obtain the 'fdb_c.dll' C API binding from the foundationdb.org wesite, by installing the client SDK: -* Go to http://foundationdb.com/get/ and download the Windows x64 MSI. You can use the free Community edition that gives you unlimited server processes for development and testing. +* Go to https://www.foundationdb.org/download/ and download the Windows x64 MSI. You can use the free Community edition that gives you unlimited server processes for development and testing. * Install the MSI, selecting the default options. * Go to `C:\Program Files\foundationdb\bin\` and make sure that `fdb_c.dll` is there. * Open the FoundationDb.Client.sln file in Visual Studio 2012. * Choose the Release or Debug configuration, and rebuild the solution. -If you see errors on 'await' or 'async' keywords, please make sure that you are using Visual Studio 2012 or 2013 RC, and not an earlier version. - -If you see the error `Unable to locate '...\foundationdb-dotnet-client\.nuget\nuget.exe'` then you need to run the `Enable Nuget Package Restore` entry in the `Project` menu (or right click on the solution) that will reinstall nuget.exe in the .nuget folder. Also, Nuget should redownload the missing packages during the first build. - ### From the Command Line You can also build, test and compile the NuGet packages from the command line, using FAKE. @@ -282,7 +224,7 @@ If you get `System.UnauthorizedAccessException: Access to the path './build/outp When building for Mono/Linux this version will look for `libfdb_c.so` instead of `fdb_c.dll`. -More details on running FoundationDB on Linux can be found here: https://foundationdb.com/key-value-store/documentation/getting-started-linux.html +More details on running FoundationDB on Linux can be found here: https://apple.github.io/foundationdb/getting-started-linux.html How to build the NuGet packages ------------------------------- @@ -307,8 +249,8 @@ Hosting on IIS * The .NET API is async-only, and should only be called inside async methods. You should NEVER write something like `tr.GetAsync(...).Wait()` or 'tr.GetAsync(...).Result' because it will GREATLY degrade performances and prevent you from scaling up past a few concurrent requests. * The underlying client library will not run on a 32-bit Application Pool. You will need to move your web application to a 64-bit Application Pool. * If you are using IIS Express with an ASP.NET or ASP.NET MVC application from Visual Studio, you need to configure your IIS Express instance to run in 64-bit. With Visual Studio 2013, this can be done by checking Tools | Options | Projects and Solutions | Web Projects | Use the 64 bit version of IIS Express for web sites and projects -* The fdb_c.dll library can only be started once per process. This makes impractical to run an web application running inside a dedicated Application Domain alongside other application, on a shared host process. See http://community.foundationdb.com/questions/1146/using-foundationdb-in-a-webapi-2-project for more details. The only current workaround is to have a dedicated host process for this application, by making it run inside its own Application Pool. -* If you don't use the host's cancellation token for transactions and retry loops, deadlock can occur if the FoundationDB cluster is unavailable or under very heavy load. Please consider also using safe values for the DefaultTimeout and DefaultRetryLimit settings. +* The fdb_c.dll library can only be started once per process. This makes impractical to run an web application running inside a dedicated Application Domain alongside other application, on a shared host process. The only current workaround is to have a dedicated host process for this application, by making it run inside its own Application Pool. +* If you don't use the host's CancellationToken for transactions and retry loops, deadlock can occur if the FoundationDB cluster is unavailable or under very heavy load. Please consider also using safe values for the DefaultTimeout and DefaultRetryLimit settings. Hosting on OWIN --------------- @@ -319,7 +261,7 @@ Hosting on OWIN Implementation Notes -------------------- -Please refer to http://foundationdb.com/documentation/ to get an overview on the FoundationDB API, if you haven't already. +Please refer to https://apple.github.io/foundationdb/ to get an overview on the FoundationDB API, if you haven't already. This .NET binding has been modeled to be as close as possible to the other bindings (Python especially), while still having a '.NET' style API. @@ -331,7 +273,7 @@ There were a few design goals, that you may agree with or not: However, there are some key differences between Python and .NET that may cause problems: * Python's dynamic types and auto casting of Tuples values, are difficult to model in .NET (without relying on the DLR). The Tuple implementation try to be as dynamic as possible, but if you want to be safe, please try to only use strings, longs, booleans and byte[] to be 100% compatible with other bindings. You should refrain from using the untyped `tuple[index]` indexer (that returns an object), and instead use the generic `tuple.Get(index)` that will try to adapt the underlying type into a T. -* The Tuple layer uses ASCII and Unicode strings, while .NET only have Unicode strings. That means that all strings in .NET will be packed with prefix type 0x02 and byte arrays with prefix type 0x01. An ASCII string packed in Python will be seen as a byte[] unless you use `IFdbTuple.Get()` that will automatically convert it to Unicode. +* The Tuple layer uses ASCII and Unicode strings, while .NET only have Unicode strings. That means that all strings in .NET will be packed with prefix type 0x02 and byte arrays with prefix type 0x01. An ASCII string packed in Python will be seen as a byte[] unless you use `ITuple.Get()` that will automatically convert it to Unicode. * There is no dedicated 'UUID' type prefix, so that means that System.Guid would be serialized as byte arrays, and all instances of byte 0 would need to be escaped. Since `System.Guid` are frequently used as primary keys, I added a new custom type prefix (0x30) for 128-bits UUIDs and (0x31) for 64-bits UUIDs. This simplifies packing/unpacking and speeds up writing/reading/comparing Guid keys. The following files will be required by your application @@ -346,10 +288,9 @@ Known Limitations * The LINQ API is still a work in progress, and may change a lot. Simple LINQ queries, like Select() or Where() on the result of range queries (to convert Slice key/values into oter types) should work. * You cannot unload the fdb C native client from the process once the netork thread has started. You can stop the network thread once, but it does not support being restarted. This can cause problems when running under ASP.NET. * FoundationDB does not support long running batch or range queries if they take too much time. Such queries will fail with a 'past_version' error. -* See https://foundationdb.com/documentation/known-limitations.html for other known limitations of the FoundationDB database. +* See https://apple.github.io/foundationdb/known-limitations.html for other known limitations of the FoundationDB database. Contributing ------------ * It is important to point out that this solution uses tabs instead of spaces for various reasons. In order to ease the transition for people who want to start contributing and avoid having to switch their Visual Studio configuration manually an .editorconfig file has been added to the root folder of the solution. The easiest way to use this is to install the [Extension for Visual Studio](http://visualstudiogallery.msdn.microsoft.com/c8bccfe2-650c-4b42-bc5c-845e21f96328). This will switch visual studio's settings for white space in csharp files to use tabs. - diff --git a/Tuples.md b/Tuples.md index d1f9f855d..f73ce47d0 100644 --- a/Tuples.md +++ b/Tuples.md @@ -68,13 +68,13 @@ And quite frankly, if you have used other languages where tuples are first-class That's why we need a better API, in order to help us be more productive. -## IFdbTuple +## ITuple -The `IFdbTuple` interface, defined in `FoundationDB.Layers.Tuples` (TODO: update this if we rename it!), is the base of all the different tuples implementation, all targetting a specific use case. +The `ITuple` interface, defined in `FoundationDB.Layers.Tuples` (TODO: update this if we rename it!), is the base of all the different tuples implementation, all targetting a specific use case. This interface has the bare minimum API, thats must be implemented by each variant, and is in turn used by a set of extension methods that add more generic behavior that does NOT need to be replicated in all the variants. -There is also a static class, called `FdbTuple`, which holds a bunch of methods to create and handle all the different variants of tuples. +There is also a static class, called `STuple`, which holds a bunch of methods to create and handle all the different variants of tuples. _note: the interface is not called `ITuple` because 1) there is already an `ITuple` interface in the BCL (even though it is internal), and 2) we wouldn't be able to call our static helper class `Tuple` since it would collide with the BCL._ @@ -82,55 +82,55 @@ _note: the interface is not called `ITuple` because 1) there is already an `ITup Tuples need to adapt to different use case: some tuples should have a fixed size and types (like the BCL Tuples), some should have a variable length (like a vector or list). Some tuples should probably be structs (to reduce the number of allocation in tight loops), while others need to be reference types. And finally, some tuples could be thin wrappers around encoded binary blobs, and defer the decoding of items until they are accessed. -That's why there is multiple variants of tuples, all implementing the `IFdbTuple` interface: +That's why there is multiple variants of tuples, all implementing the `ITuple` interface: -- `FdbTuple`, `FdbTuple` (up to T5 right now) are the equivalent of the BCL's `Tuple` except that they are implemented as a struct. They are efficient when used as a temporary step to create bigger tuples, or when you have control of the actual type (in LINQ queries, inside your own private methods, ...). They are also ideal if you want type safety and nice intellisense support, since the types are known at compile time. -- `FdbListTuple` wraps an array of object[] and exposes a subset of this array. Getting a substring of this cheap since it does not have to copy the items. -- `FdbJoinedTuple` is a wrapper that glues together two tuples (of any type). -- `FdbLinkedTuple` is a special case of an FdbJoinedTupel, where we are only adding one value to an existing tuple. -- `FdbSlicedTuple` is a wrapper around a half-parsed binary representation of a tuple, and which will only decode items if they are accessed. In cases where you are only interested in part of a key, you won't waste CPU cycles decoding the other items. -- `FdbMemoizedTuple` will cache its binary representation, which is usefull when you have a common tuple prefix which is used everytime to construct other tuples. -- `FdbPrefixedTuple` is some sort of hybrid tuples whose binary representation always have a constant binary prefix, which may or may not be a valid binary tuple representation itself (need to use tuples with prefixes generated from a different encoding). +- `STuple`, `STuple` (up to T5 right now) are the equivalent of the BCL's `Tuple` except that they are implemented as a struct. They are efficient when used as a temporary step to create bigger tuples, or when you have control of the actual type (in LINQ queries, inside your own private methods, ...). They are also ideal if you want type safety and nice intellisense support, since the types are known at compile time. +- `ListTuple` wraps an array of object[] and exposes a subset of this array. Getting a substring of this cheap since it does not have to copy the items. +- `JoinedTuple` is a wrapper that glues together two tuples (of any type). +- `LinkedTuple` is a special case of an FdbJoinedTupel, where we are only adding one value to an existing tuple. +- `SlicedTuple` is a wrapper around a half-parsed binary representation of a tuple, and which will only decode items if they are accessed. In cases where you are only interested in part of a key, you won't waste CPU cycles decoding the other items. +- `MemoizedTuple` will cache its binary representation, which is usefull when you have a common tuple prefix which is used everytime to construct other tuples. +- `PrefixedTuple` is some sort of hybrid tuples whose binary representation always have a constant binary prefix, which may or may not be a valid binary tuple representation itself (need to use tuples with prefixes generated from a different encoding). ### Creating a tuple The most simple way to create a tuple, is from its elements: ```CSharp -var t = FdbTuple.Create("Hello", 123, Guid.NewGuid()); +var t = STuple.Create("Hello", 123, Guid.NewGuid()); ``` -The actual type of the tuple will be `FdbTuple` which is a struct. Since we are using the `var` keyword, then as long as `t` stays inside the method, it will not be boxed. +The actual type of the tuple will be `STuple` which is a struct. Since we are using the `var` keyword, then as long as `t` stays inside the method, it will not be boxed. We can also create a tuple by adding something to an existing tuples, even starting with the Empty tuple: ```CSharp -var t = FdbTuple.Empty.Append("Hello").Append(123).Append(Guid.NewGuid()); +var t = STuple.Empty.Append("Hello").Append(123).Append(Guid.NewGuid()); ``` -The good news here is that _t_ is still a struct of type `FdbTuple` and we did not produce any allocations: the Empty tuple is a singleton, and all the intermediate Append() returned structs of type `FdbTuple` and `FdbTuple`. There is of course a limit to the number of elements that can be added, before we have to switch to an array-based tuple variant. +The good news here is that _t_ is still a struct of type `STuple` and we did not produce any allocations: the Empty tuple is a singleton, and all the intermediate Append() returned structs of type `STuple` and `STuple`. There is of course a limit to the number of elements that can be added, before we have to switch to an array-based tuple variant. If we have a variable-size list of items, we can also create a tuple from it: ```CSharp IEnumerable xs = ....; // xs is a sequence of MyFoo objects, with an Id property (of type Guid) -var t = FdbTuple.FromSequence(xs.Select(x => x.Id)); +var t = STuple.FromSequence(xs.Select(x => x.Id)); ``` When all the elements or a tuple are of the same type, you can use specialized versions: ```CSharp var xs = new [] { "Bonjour", "le", "Monde!" }; -var t = FdbTuple.FromArray(xs); +var t = STuple.FromArray(xs); ``` If you were already using the BCL's Tuple, you can easily convert from one to the other, via a set of implicit and explicit cast operators: ```CSharp var bcl = Tuple.Create("Hello", 123, Guid.NewGuid()); -FdbTuple t = bcl; // implicit cast +STuple t = bcl; // implicit cast -var t = FdbTuple.Create("Hello", 123, Guid.NewGuid()); +var t = STuple.Create("Hello", 123, Guid.NewGuid()); Tuple bcl = (Tuple) t; // explicit cast ``` @@ -138,8 +138,8 @@ And for the more adventurous, you can of course create a tuple by copying the el ```CSharp var xs = new object[] { "Hello", 123, Guid.NewGuid() }; -var t1 = FdbTuple.FromObjects(xs); // => ("hello", 123, guid) -var t2 = FdbTuple.FromObjects(xs, 1, 2); // => (123, guid) +var t1 = STuple.FromObjects(xs); // => ("hello", 123, guid) +var t2 = STuple.FromObjects(xs, 1, 2); // => (123, guid) xs[1] = 456; // won't change the content of the tuples // t[1] => 123 ``` @@ -148,8 +148,8 @@ If you really want to push it, you can skip copying the items by wrapping an exi ```CSharp var xs = new object[] { "Hello", 123, Guid.NewGuid() }; -var t1 = FdbTuple.Wrap(xs); // no copy! -var t2 = FdbTuple.Wrap(xs, 1, 2); // no copy! +var t1 = STuple.Wrap(xs); // no copy! +var t2 = STuple.Wrap(xs, 1, 2); // no copy! xs[1] = 456; // will change the content of the tuples!! // t[1] => 456 ``` @@ -166,12 +166,12 @@ To help you verify that a tuple has the correct size before accessing its elemen - `t.OfSize(3)` checks that `t` is not null, and that `t.Count` is equal to 3, and then returns the tuple itself, so you can write: `t.OfSize(3).DoSomethingWichExceptsThreeElements()` - `t.OfSizeAtLeast(3)` (and `t.OfSizeAtMost(3)`) work the same, except they check that `t.Count >= 3` (or `t.Count <= 3`) -Of course, if you have one of the `FdbTuple` struct, you can skip this step, since the size if known at compile time. +Of course, if you have one of the `STuple` struct, you can skip this step, since the size if known at compile time. To read the content of a tuple, you can simply call `t.Get(index)`, where `index` is the offset _in the tuple_ of the element, and `T` is the type into which the value will be converted. ```CSharp -var t = FdbTuple.Create("hello", 123, Guid.NewGuid()); +var t = STuple.Create("hello", 123, Guid.NewGuid()); var x = t.Get(0); // => "hello" var y = t.Get(1); // => 123 var z = t.Get(2); // => guid @@ -180,7 +180,7 @@ var z = t.Get(2); // => guid If `index` is negative, then it is relative to the end of the tuple, where -1 is the last element, -2 is the next-to-last element, and -N is the first element. ```CSharp -var t = FdbTuple.Create("hello", 123, Guid.NewGuid()); +var t = STuple.Create("hello", 123, Guid.NewGuid()); var x = t.Get(-3); // => "hello" var y = t.Get(-2); // => 123 var z = t.Get(-1); // => guid @@ -193,13 +193,13 @@ Code that manipulate tuples can get complex pretty fast, so you need a way to di For that, every tuple overrides `ToString()` to return a nicely formatted string with a standardized format. ```CSharp -var t1 = FdbTuple.Create("hello", 123, Guid.NewGuid()); +var t1 = STuple.Create("hello", 123, Guid.NewGuid()); Console.WriteLine("t1 = {0}", t1); // => t1 = ("hello", 123, {773166b7-de74-4fcc-845c-84080cc89533}) -var t2 = FdbTuple.Create("hello"); +var t2 = STuple.Create("hello"); Console.WriteLine("t1 = {0}", t2); // => t2 = ("hello",) -var t3 = FdbTuple.Empty; +var t3 = STuple.Empty; Console.WriteLine("t3 = {0}", t3); // => t3 = () ``` @@ -213,11 +213,11 @@ Since a tuple is just a vector of elements, you can of course put a tuple inside This works: ```CSharp -var t1 = FdbTuple.Create("hello", FdbTuple(123, 456), Guid.NewGuid()); +var t1 = STuple.Create("hello", STuple(123, 456), Guid.NewGuid()); // t1 = ("hello", (123, 456), {773166b7-de74-4fcc-845c-84080cc89533}) -var t2 = FdbTuple.Create(FdbTuple.Create("a", "b")); +var t2 = STuple.Create(STuple.Create("a", "b")); // t2 = ((a, b),) -var t3 = FdbTuple.Create("hello", FdbTuple.Empty, "world"); +var t3 = STuple.Create("hello", STuple.Empty, "world"); // t3 = ("hello", (), "world"); ``` @@ -230,7 +230,7 @@ var productId = "B00CS8QSSK"; var locationId = new [] { "Europe", "France", "Lille" }; var orderId = Guid.NewGuid(); -var t = FdbTuple.Create(productId, FdbTuple.FromArray(locationId), orderId); +var t = STuple.Create(productId, STuple.FromArray(locationId), orderId); // t.Count => 3 // t[0] => "B00CS8QSSK" // t[1] => ("Europe", "France", "Lille") @@ -248,7 +248,7 @@ You can, though, modify tuples by returning a new tuple, with or without copying The most common case is to simply add a value to a tuple via the `t.Append(T value)` method. For example you have a base tuple (cached value), and you want to add a document ID. ```CSharp -var location = FdbTuple.Create("MyAwesomeApp", "Documents"); +var location = STuple.Create("MyAwesomeApp", "Documents"); var documentId = Guid.NewGuid(); var t = location.Append(document); @@ -260,11 +260,11 @@ Don't forget that if you Append a tuple, it will be added as a nested tuple! If you actually want to merge the elements of two tuples, when you can use the `t1.Concat(t2)` method, which return a new tuple with the elements of both t1 and t2. ```CSharp -var location = FdbTuple.Create("MyAwesomeApp", "OrdersByProduct"); +var location = STuple.Create("MyAwesomeApp", "OrdersByProduct"); var productId = "B00CS8QSSK"; var orderId = Guid.NewGuid(); -var t1 = FdbTuple.Create(productId, orderId) +var t1 = STuple.Create(productId, orderId) // t1 => ("B00CS8QSSK", {773166b7-de74-4fcc-845c-84080cc89533}) var t2 = location.Concat(t1); @@ -280,7 +280,7 @@ First, you can return a subset of a tuple via on of the `t.Substring(...)` metho The `Substring()` method works exactly the same way as for regulard strings. ```CSharp -var t = FdbTuple.Create(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); +var t = STuple.Create(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); var u = t.Substring(0, 3); // => (1, 2, 3) var v = t.Substring(5, 2); // => (6, 7) var w = t.Substring(7); // => (8, 9, 10) @@ -292,7 +292,7 @@ var w = v.Substring(-3); // => (8, 9, 10) The `t[from, to]` indexer gets some getting used to. If actual returns all the elements in the tuple with position `from <= p < to`, which means that the `to` is excluded. ```CSharp -var t = FdbTuple.Create(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); +var t = STuple.Create(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); var u = t[0, 3]; // => (1, 2, 3) var v = t[5, 7]; // => (6, 7) // rember that 'to' is excluded! @@ -307,7 +307,7 @@ var w = v[-3, null]; // => (8, 9, 10) If you are tired of writing `t.Substring(0, 3)` all the time, you can also use `t.Truncate(3)` which does the same thing. ```CSharp -var t = FdbTuple.Create(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); +var t = STuple.Create(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); var u = t.Truncate(3); // u => (1, 2, 3); var v = t.Truncate(-3); @@ -319,7 +319,7 @@ var v = t.Truncate(-3); When decoding keys using tuple, you wil often find yourself extracting a fixed number of arguments into local variables, and then constructing an instance of a Model class from your application. ```CSharp -public MyFooBar DecodeFoobar(IFdbTuple tuple) +public MyFooBar DecodeFoobar(ITuple tuple) { var x = tuple.Get(0); var y = tuple.Get(1); @@ -335,10 +335,10 @@ The keen eye will see the problems with this method: - what if tuple.Count is only 2 ? - you probably copy/pasted `var x = tuple.Get<...>(0)` two more times, and forgot to change the index to 1 and 2! _(even Notch does it!)_ -One solution is to use the set of `t.As()` helper methods to convert a tuple of type `IFdbTuple` into a more friendly `FdbTuple` introducing tape safety and intellisence. +One solution is to use the set of `t.As()` helper methods to convert a tuple of type `ITuple` into a more friendly `STuple` introducing tape safety and intellisence. ```CSharp -public MyFooBar DecodeFoobar(IFdbTuple tuple) +public MyFooBar DecodeFoobar(ITuple tuple) { var t = tuple.As(); // this throws if tuple is null, or not of size 3 @@ -351,7 +351,7 @@ That's better, but you can still swap two arguments by mistake, if they have the To combat this, you can use on of the `t.With(Action)` or `t.With(Func)` which can give names to the elements. ```CSharp -public MyFooBar DecodeFoobar(IFdbTuple tuple) +public MyFooBar DecodeFoobar(ITuple tuple) { return tuple.With((Guid productId, Guid categoryId, Guid orderId) => new MyFooBar(productId, categoriyId, orderId)); // all three elements are GUID, but adding name help you catch argument inversion errors diff --git a/build.bat b/build.bat index 1ebc5f2d0..921a6ba8d 100644 --- a/build.bat +++ b/build.bat @@ -9,12 +9,12 @@ if not exist .nuget\nuget.exe ( REM we need FAKE to process our build scripts if not exist build\tools\FAKE\tools\Fake.exe ( - ECHO FAKE not found.. Installing.. - ".nuget\nuget.exe" "install" "FAKE" "-OutputDirectory" "build\tools" "-ExcludeVersion" "-Prerelease" + ECHO FAKE not found... Installing... + ".nuget\nuget.exe" "install" "FAKE" "-Version" "4.64.12" "-OutputDirectory" "build\tools" "-ExcludeVersion" ) REM we need nunit-console to run our tests -if not exist build\tools\NUnit.Runners\tools\nunit-console.exe ( +if not exist build\tools\NUnit.ConsoleRunner\tools\nunit3-console.exe ( ECHO Nunit not found.. Installing ".nuget\nuget.exe" "install" "NUnit.Runners" "-OutputDirectory" "build\tools" "-ExcludeVersion" "-Prerelease" ) diff --git a/build/FoundationDB.Client.nuspec b/build/FoundationDB.Client.nuspec index 48a87cfff..63a6421f5 100644 --- a/build/FoundationDB.Client.nuspec +++ b/build/FoundationDB.Client.nuspec @@ -2,7 +2,7 @@ FoundationDB.Client - 0.9.9-pre + 5.1.0-alpha1 FoundationDB Client Doxense Doxense @@ -12,9 +12,9 @@ false .NET Binding for FoundationDB This is a pre-release of the .NET Binding, the public API is still subject to changes. - Copyright 2013-2015 Doxense SAS + Copyright 2013-2018 Doxense SAS en-US - foundationdb nosql + foundationdb fdb nosql diff --git a/build/FoundationDB.Layers.Common.nuspec b/build/FoundationDB.Layers.Common.nuspec index fd3f73888..699fcd55e 100644 --- a/build/FoundationDB.Layers.Common.nuspec +++ b/build/FoundationDB.Layers.Common.nuspec @@ -2,7 +2,7 @@ FoundationDB.Layers.Common - 0.9.9-pre + 5.1.0-alpha1 FoundationDB Common Layers Doxense Doxense @@ -12,11 +12,11 @@ false Common Layers for the FoundationDB .NET Binding This is a pre-release of the .NET Binding, the public API is still subject to changes. - Copyright 2013-2015 Doxense SAS + Copyright 2013-2018 Doxense SAS en-US foundationdb nosql layers - + diff --git a/build/build.fsx b/build/build.fsx index 6072ff41a..cc48d7223 100644 --- a/build/build.fsx +++ b/build/build.fsx @@ -2,6 +2,7 @@ #r "tools/FAKE/tools/FakeLib.dll" open Fake +open Fake.Testing let projectRoot () = if FileUtils.pwd().EndsWith("build") then @@ -10,7 +11,7 @@ let projectRoot () = FileUtils.pwd() // Properties -let version = "0.9.9-pre" //TODO: find a way to extract this from somewhere convenient +let version = "5.1.0-alpha1" //TODO: find a way to extract this from somewhere convenient let buildDir = projectRoot() @@ "build" @@ "output" let nugetPath = projectRoot() @@ ".nuget" @@ "NuGet.exe" let nugetOutDir = buildDir @@ "_packages" @@ -63,14 +64,14 @@ Target "Test" (fun _ -> CreateDir testDir ActivateFinalTarget "CloseTestRunner" !! (buildDir @@ "**" @@ "*Test*.dll") - |> NUnit( - fun p -> { p with DisableShadowCopy = true - OutputFile = "TestResults.xml" + |> NUnit3( + fun p -> { p with ShadowCopy = false + //ResultSpecs = "TestResults.xml" StopOnError = false ErrorLevel = DontFailBuild WorkingDir = testDir TimeOut = System.TimeSpan.FromMinutes 10.0 - ExcludeCategory = "LongRunning,LocalCluster" })) + Where = "cat != LongRunning && cat != LocalCluster" })) FinalTarget "CloseTestRunner" (fun _ -> ProcessHelper.killProcess "nunit-agent.exe" @@ -98,7 +99,7 @@ Target "BuildNuget" (fun _ -> let binariesDir = buildDir @@ name // Copy XML doc to binaries dir, works by default on windows but not on Mono. - let xmlDocFile = projectRoot() @@ name @@ "bin" @@ "Release" @@ (sprintf "%s.XML") name + let xmlDocFile = projectRoot() @@ name @@ "bin" @@ "Release" @@ "netstandard2.0" @@ (sprintf "%s.XML") name FileUtils.cp xmlDocFile binariesDir NuGetPack ( diff --git a/build/download-nuget.ps1 b/build/download-nuget.ps1 index 561837d59..4724efe9c 100644 --- a/build/download-nuget.ps1 +++ b/build/download-nuget.ps1 @@ -1,4 +1,4 @@ -$source = "http://nuget.org/nuget.exe" +$source = "https://dist.nuget.org/win-x86-commandline/latest/nuget.exe" $destination = ".\.nuget\nuget.exe" $wc = New-Object System.Net.WebClient