From c6dc686350f071261aac1e0cd04da2ddae822cd2 Mon Sep 17 00:00:00 2001 From: j0nimost Date: Mon, 16 Oct 2023 07:39:52 +0300 Subject: [PATCH] changed the file separator from enum --- src/Kafa/KafaOptions.cs | 10 +++++----- src/Kafa/Reader/KafaReadState.cs | 4 ++-- src/Kafa/Reader/KafaReader.Col.cs | 3 ++- src/Kafa/Reflection/KafaReflection.Reader.cs | 4 ++-- src/KafaTests/KafaReadTests.cs | 6 +++--- src/KafaTests/KafaWriteTests.cs | 2 +- src/KafaTests/RFC4180Tests.cs | 2 +- 7 files changed, 16 insertions(+), 15 deletions(-) diff --git a/src/Kafa/KafaOptions.cs b/src/Kafa/KafaOptions.cs index 3a72f81..5cab77d 100644 --- a/src/Kafa/KafaOptions.cs +++ b/src/Kafa/KafaOptions.cs @@ -7,16 +7,16 @@ namespace nyingi.Kafa { - public enum FileType + public class SeparatorFileType { - CSV = (byte)',', - TSV = (byte)';' + public const char CSV = ','; + public const char TSV = ';'; } public sealed partial class KafaOptions { public CultureInfo? CultureInfo { get; set; } public Encoding? Encoding { get; set; } - public FileType FileType { get; set; } + public char Separator { get; set; } = '\0'; public bool HasHeader { get; set; } = true; @@ -24,7 +24,7 @@ internal static KafaOptions Default { get { - return new KafaOptions { FileType = FileType.CSV, CultureInfo = CultureInfo.InvariantCulture, Encoding = Encoding.UTF8 }; + return new KafaOptions { Separator = SeparatorFileType.CSV, CultureInfo = CultureInfo.InvariantCulture, Encoding = Encoding.UTF8 }; } } diff --git a/src/Kafa/Reader/KafaReadState.cs b/src/Kafa/Reader/KafaReadState.cs index e9fb64b..4518bde 100644 --- a/src/Kafa/Reader/KafaReadState.cs +++ b/src/Kafa/Reader/KafaReadState.cs @@ -119,7 +119,7 @@ private void ReadColMarkers() i = j + 1; continue; } - else if (Buffer[i] == (int)Options.FileType || Buffer[i] == '\n' || Buffer[i + 1] == '\0') + else if (Buffer[i] == (int)Options.Separator || Buffer[i] == '\n' || Buffer[i + 1] == '\0') { ColMarker[colIndexer] = i; colIndexer++; @@ -175,7 +175,7 @@ private void ReadColCount() i = j + 1; continue; } - else if (Buffer[i] == (int)Options.FileType) + else if (Buffer[i] == (int)Options.Separator) { ColCount++; } diff --git a/src/Kafa/Reader/KafaReader.Col.cs b/src/Kafa/Reader/KafaReader.Col.cs index fa11cf4..4ae061d 100644 --- a/src/Kafa/Reader/KafaReader.Col.cs +++ b/src/Kafa/Reader/KafaReader.Col.cs @@ -69,7 +69,8 @@ public ReadOnlySpan ReadColSpan(int index) int startIndex = _colMarkerIndexes[index]; startIndex = startIndex == 0 ? 0 : startIndex + 1; // SKIP Separator int lastIndex = _colMarkerIndexes[lastColMarker]; - lastIndex = _reader.HasCRLF && lastColMarker == _colMarkerIndexes.Length - 1 && lastIndex != _reader.LastBufferIndex // figure out use case + // TODO: Simplify this checks + lastIndex = _reader.HasCRLF && lastColMarker == _colMarkerIndexes.Length - 1 && lastIndex != _reader.LastBufferIndex ? lastIndex - 1 : lastIndex; return _reader.ReadColSpan(startIndex, lastIndex); } diff --git a/src/Kafa/Reflection/KafaReflection.Reader.cs b/src/Kafa/Reflection/KafaReflection.Reader.cs index 2bf2268..c74ef79 100644 --- a/src/Kafa/Reflection/KafaReflection.Reader.cs +++ b/src/Kafa/Reflection/KafaReflection.Reader.cs @@ -39,7 +39,7 @@ public async Task GetProperties(List entities, TextWriter text if (countHeader < propertyInfos.Length - 1) { - await textWriter.WriteAsync((char)TypeInfo.KafaOptions.FileType); + await textWriter.WriteAsync((char)TypeInfo.KafaOptions.Separator); } countHeader++; @@ -56,7 +56,7 @@ public async Task GetProperties(List entities, TextWriter text if (count < propertyCount - 1) { - await textWriter.WriteAsync((char)TypeInfo.KafaOptions.FileType); + await textWriter.WriteAsync((char)TypeInfo.KafaOptions.Separator); } count++; } diff --git a/src/KafaTests/KafaReadTests.cs b/src/KafaTests/KafaReadTests.cs index d7f6e04..ba2472c 100644 --- a/src/KafaTests/KafaReadTests.cs +++ b/src/KafaTests/KafaReadTests.cs @@ -24,7 +24,7 @@ public class KafaReadTests new object[]{ "date,open,high,low,close,volume,Name\n2013-02-08,15.07,15.12,14.63,14.75,8407500,\"AAL\"\n2013-02-12,14.45,14.51,14.1,14.27,8126000,AAL" }, }; - private KafaOptions ReadEverythingOption => new KafaOptions() { HasHeader = false, FileType = FileType.CSV }; + private KafaOptions ReadEverythingOption => new KafaOptions() { HasHeader = false, Separator = SeparatorFileType.CSV}; [Theory] [MemberData(nameof(GetDifferentRows))] @@ -213,7 +213,7 @@ public void ReadRowWithHeader() { CultureInfo = System.Globalization.CultureInfo.CurrentCulture, HasHeader = true, - FileType = FileType.CSV + Separator = SeparatorFileType.CSV }; using var kafaReaderState = new KafaReadState((int)ioStream.Length, kafaOptions); @@ -239,7 +239,7 @@ public void ReadRowWithoutHeader() { CultureInfo = System.Globalization.CultureInfo.CurrentCulture, HasHeader = false, - FileType = FileType.CSV + Separator = SeparatorFileType.CSV }; diff --git a/src/KafaTests/KafaWriteTests.cs b/src/KafaTests/KafaWriteTests.cs index 0dbfc10..3d6011e 100644 --- a/src/KafaTests/KafaWriteTests.cs +++ b/src/KafaTests/KafaWriteTests.cs @@ -12,7 +12,7 @@ public async Task WriteCSVNoHeaderAsync() new CsvData{ Date = DateTime.Parse("10/10/2023 4:09:45 PM"), Open=12.45, Close=12.99, High=13.00, Low=12.1, Name="AMZN", Volume=1233435512} }; - var rowmem = await Kafa.WriteAsync(csvs, new KafaOptions() { HasHeader = false, FileType = FileType.CSV }); + var rowmem = await Kafa.WriteAsync(csvs, new KafaOptions() { HasHeader = false, Separator=SeparatorFileType.CSV}); string expected = ""; if (Environment.OSVersion.Platform == PlatformID.Unix) diff --git a/src/KafaTests/RFC4180Tests.cs b/src/KafaTests/RFC4180Tests.cs index e15e160..d2688cc 100644 --- a/src/KafaTests/RFC4180Tests.cs +++ b/src/KafaTests/RFC4180Tests.cs @@ -5,7 +5,7 @@ /// public class RFC4180Tests { - private KafaOptions ReadEverythingOption => new KafaOptions() { HasHeader = false, FileType = FileType.CSV }; + private KafaOptions ReadEverythingOption => new KafaOptions() { HasHeader = false, Separator= SeparatorFileType.CSV}; [Fact] public void ReadRowsWithCRLF()