From 630b27bde3785adc604567325dc99a9d53907b21 Mon Sep 17 00:00:00 2001 From: yankejustin Date: Thu, 21 May 2015 16:12:58 -0400 Subject: [PATCH] Added Unit Tests for SafeQuickLZ Added Unit Tests with small random and large random sets of data. --- Client.Tests/Client.Tests.csproj | 6 +- .../Core/Compression/SafeQuickLZ.Tests.cs | 131 ++++++++++++++++++ .../Core/Compression/SafeQuickLZ.Tests.cs | 131 ++++++++++++++++++ Server.Tests/Server.Tests.csproj | 1 + 4 files changed, 267 insertions(+), 2 deletions(-) create mode 100644 Client.Tests/Core/Compression/SafeQuickLZ.Tests.cs create mode 100644 Server.Tests/Core/Compression/SafeQuickLZ.Tests.cs diff --git a/Client.Tests/Client.Tests.csproj b/Client.Tests/Client.Tests.csproj index 7da6b759..2659278a 100644 --- a/Client.Tests/Client.Tests.csproj +++ b/Client.Tests/Client.Tests.csproj @@ -55,9 +55,9 @@ + - @@ -66,7 +66,9 @@ Client - + + + diff --git a/Client.Tests/Core/Compression/SafeQuickLZ.Tests.cs b/Client.Tests/Core/Compression/SafeQuickLZ.Tests.cs new file mode 100644 index 00000000..324f22a7 --- /dev/null +++ b/Client.Tests/Core/Compression/SafeQuickLZ.Tests.cs @@ -0,0 +1,131 @@ +using System; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using xClient.Core.Compression; + +namespace xClient.Tests.Core.Compression +{ + [TestClass] + public class SafeQuickLZTests + { + // Tests using pseudo-randomly generated data. + #region Random Data + + /* + * Purpose: To validate a small amount of data after compression/decompression + * using SafeQuickLZ with level 1 compression. + */ + [TestMethod] + [TestCategory("Compression")] + public void SmallDataTestLevel1() + { + SafeQuickLZ safeQuickLZtest = new SafeQuickLZ(); + byte[] SmallData = new byte[100]; + + // Fill the small data array with random data. + new Random().NextBytes(SmallData); + + // Store the compressed data. + byte[] SmallDataCompressed = safeQuickLZtest.Compress(SmallData, 0, SmallData.Length, 1); + + // The original should not equal the compressed data. + Assert.AreNotEqual(SmallData, SmallDataCompressed, "Original data is equal to the compressed data!"); + + // Store the decompressed data. + byte[] SmallDataDecompressed = safeQuickLZtest.Decompress(SmallDataCompressed, 0, SmallDataCompressed.Length); + + // The compressed data should not equal the decompressed data. + Assert.AreNotEqual(SmallDataCompressed, SmallDataDecompressed, "Compressed data is equal to the decompressed data!"); + // The original data must equal the decompressed data; must be able to make a round-trip. + CollectionAssert.AreEqual(SmallData, SmallDataDecompressed, "Original data does not match the decompressed data!"); + } + + /* + * Purpose: To validate a small amount of data after compression/decompression + * using SafeQuickLZ with level 3 compression. + */ + [TestMethod] + [TestCategory("Compression")] + public void SmallDataTestLevel3() + { + SafeQuickLZ safeQuickLZtest = new SafeQuickLZ(); + byte[] SmallData = new byte[100]; + + // Fill the small data array with random data. + new Random().NextBytes(SmallData); + + // Store the compressed data. + byte[] SmallDataCompressed = safeQuickLZtest.Compress(SmallData, 0, SmallData.Length, 3); + + // The original should not equal the compressed data. + Assert.AreNotEqual(SmallData, SmallDataCompressed, "Original data is equal to the compressed data!"); + + // Store the decompressed data. + byte[] SmallDataDecompressed = safeQuickLZtest.Decompress(SmallDataCompressed, 0, SmallDataCompressed.Length); + + // The compressed data should not equal the decompressed data. + Assert.AreNotEqual(SmallDataCompressed, SmallDataDecompressed, "Compressed data is equal to the decompressed data!"); + // The original data must equal the decompressed data; must be able to make a round-trip. + CollectionAssert.AreEqual(SmallData, SmallDataDecompressed, "Original data does not match the decompressed data!"); + } + + /* + * Purpose: To validate a large amount of data after compression/decompression + * using SafeQuickLZ with level 1 compression. + */ + [TestMethod] + [TestCategory("Compression")] + public void BigDataTestLevel1() + { + SafeQuickLZ safeQuickLZtest = new SafeQuickLZ(); + byte[] BigData = new byte[100000]; + + // Fill the big data array with random data. + new Random().NextBytes(BigData); + + // Store the compressed data. + byte[] BigDataCompressed = safeQuickLZtest.Compress(BigData, 0, BigData.Length, 1); + + // The original should not equal the compressed data. + Assert.AreNotEqual(BigData, BigDataCompressed, "Original data is equal to the compressed data!"); + + // Store the decompressed data. + byte[] BigDataDecompressed = safeQuickLZtest.Decompress(BigDataCompressed, 0, BigDataCompressed.Length); + + // The compressed data should not equal the decompressed data. + Assert.AreNotEqual(BigDataCompressed, BigDataDecompressed, "Compressed data is equal to the decompressed data!"); + // The original data must equal the decompressed data; must be able to make a round-trip. + CollectionAssert.AreEqual(BigData, BigDataDecompressed, "Original data does not match the decompressed data!"); + } + + /* + * Purpose: To validate a large amount of data after compression/decompression + * using SafeQuickLZ with level 3 compression. + */ + [TestMethod] + [TestCategory("Compression")] + public void BigDataTestLevel3() + { + SafeQuickLZ safeQuickLZtest = new SafeQuickLZ(); + byte[] BigData = new byte[100000]; + + // Fill the big data array with random data. + new Random().NextBytes(BigData); + + // Store the compressed data. + byte[] BigDataCompressed = safeQuickLZtest.Compress(BigData, 0, BigData.Length, 3); + + // The original should not equal the compressed data. + Assert.AreNotEqual(BigData, BigDataCompressed, "Original data is equal to the compressed data!"); + + // Store the decompressed data. + byte[] BigDataDecompressed = safeQuickLZtest.Decompress(BigDataCompressed, 0, BigDataCompressed.Length); + + // The compressed data should not equal the decompressed data. + Assert.AreNotEqual(BigDataCompressed, BigDataDecompressed, "Compressed data is equal to the decompressed data!"); + // The original data must equal the decompressed data; must be able to make a round-trip. + CollectionAssert.AreEqual(BigData, BigDataDecompressed, "Original data does not match the decompressed data!"); + } + + #endregion + } +} \ No newline at end of file diff --git a/Server.Tests/Core/Compression/SafeQuickLZ.Tests.cs b/Server.Tests/Core/Compression/SafeQuickLZ.Tests.cs new file mode 100644 index 00000000..5317ac43 --- /dev/null +++ b/Server.Tests/Core/Compression/SafeQuickLZ.Tests.cs @@ -0,0 +1,131 @@ +using System; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using xServer.Core.Compression; + +namespace xServer.Tests.Core.Compression +{ + [TestClass] + public class SafeQuickLZTests + { + // Tests using pseudo-randomly generated data. + #region Random Data + + /* + * Purpose: To validate a small amount of data after compression/decompression + * using SafeQuickLZ with level 1 compression. + */ + [TestMethod] + [TestCategory("Compression")] + public void SmallDataTestLevel1() + { + SafeQuickLZ safeQuickLZtest = new SafeQuickLZ(); + byte[] SmallData = new byte[100]; + + // Fill the small data array with random data. + new Random().NextBytes(SmallData); + + // Store the compressed data. + byte[] SmallDataCompressed = safeQuickLZtest.Compress(SmallData, 0, SmallData.Length, 1); + + // The original should not equal the compressed data. + Assert.AreNotEqual(SmallData, SmallDataCompressed, "Original data is equal to the compressed data!"); + + // Store the decompressed data. + byte[] SmallDataDecompressed = safeQuickLZtest.Decompress(SmallDataCompressed, 0, SmallDataCompressed.Length); + + // The compressed data should not equal the decompressed data. + Assert.AreNotEqual(SmallDataCompressed, SmallDataDecompressed, "Compressed data is equal to the decompressed data!"); + // The original data must equal the decompressed data; must be able to make a round-trip. + CollectionAssert.AreEqual(SmallData, SmallDataDecompressed, "Original data does not match the decompressed data!"); + } + + /* + * Purpose: To validate a small amount of data after compression/decompression + * using SafeQuickLZ with level 3 compression. + */ + [TestMethod] + [TestCategory("Compression")] + public void SmallDataTestLevel3() + { + SafeQuickLZ safeQuickLZtest = new SafeQuickLZ(); + byte[] SmallData = new byte[100]; + + // Fill the small data array with random data. + new Random().NextBytes(SmallData); + + // Store the compressed data. + byte[] SmallDataCompressed = safeQuickLZtest.Compress(SmallData, 0, SmallData.Length, 3); + + // The original should not equal the compressed data. + Assert.AreNotEqual(SmallData, SmallDataCompressed, "Original data is equal to the compressed data!"); + + // Store the decompressed data. + byte[] SmallDataDecompressed = safeQuickLZtest.Decompress(SmallDataCompressed, 0, SmallDataCompressed.Length); + + // The compressed data should not equal the decompressed data. + Assert.AreNotEqual(SmallDataCompressed, SmallDataDecompressed, "Compressed data is equal to the decompressed data!"); + // The original data must equal the decompressed data; must be able to make a round-trip. + CollectionAssert.AreEqual(SmallData, SmallDataDecompressed, "Original data does not match the decompressed data!"); + } + + /* + * Purpose: To validate a large amount of data after compression/decompression + * using SafeQuickLZ with level 1 compression. + */ + [TestMethod] + [TestCategory("Compression")] + public void BigDataTestLevel1() + { + SafeQuickLZ safeQuickLZtest = new SafeQuickLZ(); + byte[] BigData = new byte[100000]; + + // Fill the big data array with random data. + new Random().NextBytes(BigData); + + // Store the compressed data. + byte[] BigDataCompressed = safeQuickLZtest.Compress(BigData, 0, BigData.Length, 1); + + // The original should not equal the compressed data. + Assert.AreNotEqual(BigData, BigDataCompressed, "Original data is equal to the compressed data!"); + + // Store the decompressed data. + byte[] BigDataDecompressed = safeQuickLZtest.Decompress(BigDataCompressed, 0, BigDataCompressed.Length); + + // The compressed data should not equal the decompressed data. + Assert.AreNotEqual(BigDataCompressed, BigDataDecompressed, "Compressed data is equal to the decompressed data!"); + // The original data must equal the decompressed data; must be able to make a round-trip. + CollectionAssert.AreEqual(BigData, BigDataDecompressed, "Original data does not match the decompressed data!"); + } + + /* + * Purpose: To validate a large amount of data after compression/decompression + * using SafeQuickLZ with level 3 compression. + */ + [TestMethod] + [TestCategory("Compression")] + public void BigDataTestLevel3() + { + SafeQuickLZ safeQuickLZtest = new SafeQuickLZ(); + byte[] BigData = new byte[100000]; + + // Fill the big data array with random data. + new Random().NextBytes(BigData); + + // Store the compressed data. + byte[] BigDataCompressed = safeQuickLZtest.Compress(BigData, 0, BigData.Length, 3); + + // The original should not equal the compressed data. + Assert.AreNotEqual(BigData, BigDataCompressed, "Original data is equal to the compressed data!"); + + // Store the decompressed data. + byte[] BigDataDecompressed = safeQuickLZtest.Decompress(BigDataCompressed, 0, BigDataCompressed.Length); + + // The compressed data should not equal the decompressed data. + Assert.AreNotEqual(BigDataCompressed, BigDataDecompressed, "Compressed data is equal to the decompressed data!"); + // The original data must equal the decompressed data; must be able to make a round-trip. + CollectionAssert.AreEqual(BigData, BigDataDecompressed, "Original data does not match the decompressed data!"); + } + + #endregion + } +} \ No newline at end of file diff --git a/Server.Tests/Server.Tests.csproj b/Server.Tests/Server.Tests.csproj index f84adae7..3e60cec7 100644 --- a/Server.Tests/Server.Tests.csproj +++ b/Server.Tests/Server.Tests.csproj @@ -53,6 +53,7 @@ +