Skip to content

Commit ad25aa0

Browse files
authored
Update jb tools version & run cleanupcode (#565)
1 parent 504361a commit ad25aa0

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

48 files changed

+448
-449
lines changed

.config/dotnet-tools.json

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,12 @@
22
"version": 1,
33
"isRoot": true,
44
"tools": {
5-
"jetbrains.resharper.globaltools": {
6-
"version": "2021.1.3",
7-
"commands": [
8-
"jb"
9-
]
10-
}
5+
"jetbrains.resharper.globaltools": {
6+
"version": "2025.2.2.1",
7+
"commands": [
8+
"jb"
9+
],
10+
"rollForward": false
11+
}
1112
}
1213
}

csharp.benchmark/DecimalRead.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ public DecimalRead()
2525
return sign * ((decimal) n * n * n) / 1000M;
2626
}).ToArray();
2727

28-
using (var fileWriter = new ParquetFileWriter(Filename, new Column[] {new Column<decimal>("Value", LogicalType.Decimal(precision: 29, scale: 3))}))
28+
using (var fileWriter = new ParquetFileWriter(Filename, new Column[] { new Column<decimal>("Value", LogicalType.Decimal(precision: 29, scale: 3)) }))
2929
{
3030
using var rowGroupWriter = fileWriter.AppendRowGroup();
3131
using var valueWriter = rowGroupWriter.NextColumn().LogicalWriter<decimal>();

csharp.benchmark/DecimalWrite.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ public DecimalWrite()
3333
[Benchmark(Baseline = true)]
3434
public long ParquetSharp()
3535
{
36-
using (var fileWriter = new ParquetFileWriter("decimal_timeseries.parquet", new Column[] {new Column<decimal>("Value", LogicalType.Decimal(precision: 29, scale: 3))}))
36+
using (var fileWriter = new ParquetFileWriter("decimal_timeseries.parquet", new Column[] { new Column<decimal>("Value", LogicalType.Decimal(precision: 29, scale: 3)) }))
3737
{
3838
using var rowGroupWriter = fileWriter.AppendRowGroup();
3939
using var valueWriter = rowGroupWriter.NextColumn().LogicalWriter<decimal>();

csharp.benchmark/FloatTimeSeriesWrite.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ public FloatTimeSeriesWrite()
5454
.Field(new Apache.Arrow.Field("Value", new Apache.Arrow.Types.FloatType(), false))
5555
.Build();
5656
_recordBatch = new RecordBatch(
57-
schema, new IArrowArray[] {timestampArray, idArray, valueArray}, timestampArray.Length);
57+
schema, new IArrowArray[] { timestampArray, idArray, valueArray }, timestampArray.Length);
5858

5959
Console.WriteLine("Generated {0:N0} rows in {1:N2} sec", numRows, timer.Elapsed.TotalSeconds);
6060
Console.WriteLine();
@@ -213,7 +213,7 @@ private void ParquetChunkedImpl(ParquetFileWriter fileWriter)
213213
[Benchmark(Description = "RowOriented")]
214214
public long ParquetRowOriented()
215215
{
216-
using (var rowWriter = ParquetFile.CreateRowWriter<(DateTime, int, float)>("float_timeseries.parquet.roworiented", new[] {"DateTime", "ObjectId", "Value"}))
216+
using (var rowWriter = ParquetFile.CreateRowWriter<(DateTime, int, float)>("float_timeseries.parquet.roworiented", new[] { "DateTime", "ObjectId", "Value" }))
217217
{
218218
ParquetRowOrientedImpl(rowWriter);
219219
}
@@ -227,7 +227,7 @@ public long ParquetRowOrientedStream()
227227
using (var stream = new FileStream("float_timeseries.parquet.roworiented.stream", FileMode.Create))
228228
{
229229
using var writer = new IO.ManagedOutputStream(stream);
230-
using var rowWriter = ParquetFile.CreateRowWriter<(DateTime, int, float)>(writer, new[] {"DateTime", "ObjectId", "Value"});
230+
using var rowWriter = ParquetFile.CreateRowWriter<(DateTime, int, float)>(writer, new[] { "DateTime", "ObjectId", "Value" });
231231
ParquetRowOrientedImpl(rowWriter);
232232
}
233233

@@ -250,7 +250,7 @@ private void ParquetRowOrientedImpl(ParquetRowWriter<(DateTime, int, float)> row
250250
[Benchmark(Description = "RowOriented (Batched)")]
251251
public long ParquetRowOrientedBatched()
252252
{
253-
using (var rowWriter = ParquetFile.CreateRowWriter<(DateTime, int, float)>("float_timeseries.parquet.roworiented.batched", new[] {"DateTime", "ObjectId", "Value"}))
253+
using (var rowWriter = ParquetFile.CreateRowWriter<(DateTime, int, float)>("float_timeseries.parquet.roworiented.batched", new[] { "DateTime", "ObjectId", "Value" }))
254254
{
255255
rowWriter.WriteRowSpan(_allRows);
256256
rowWriter.Close();

csharp.benchmark/NestedRead.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,9 +29,9 @@ public NestedRead()
2929
using var noneType = LogicalType.None();
3030
using var nonNullInner = new PrimitiveNode("x0", Repetition.Required, noneType, PhysicalType.Int32);
3131
using var nullableInner = new PrimitiveNode("x1", Repetition.Required, noneType, PhysicalType.Int32);
32-
using var nonNullGroup = new GroupNode("g0", Repetition.Required, new[] {nonNullInner});
33-
using var nullableGroup = new GroupNode("g1", Repetition.Optional, new[] {nullableInner});
34-
using var schema = new GroupNode("schema", Repetition.Required, new[] {nonNullGroup, nullableGroup});
32+
using var nonNullGroup = new GroupNode("g0", Repetition.Required, new[] { nonNullInner });
33+
using var nullableGroup = new GroupNode("g1", Repetition.Optional, new[] { nullableInner });
34+
using var schema = new GroupNode("schema", Repetition.Required, new[] { nonNullGroup, nullableGroup });
3535

3636
using var propertiesBuilder = new WriterPropertiesBuilder().Compression(Compression.Snappy);
3737
using var properties = propertiesBuilder.Build();

csharp.benchmark/NestedWrite.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -37,9 +37,9 @@ public long ParquetSharp()
3737
using var noneType = LogicalType.None();
3838
using var nonNullInner = new PrimitiveNode("x0", Repetition.Required, noneType, PhysicalType.Int32);
3939
using var nullableInner = new PrimitiveNode("x1", Repetition.Required, noneType, PhysicalType.Int32);
40-
using var nonNullGroup = new GroupNode("g0", Repetition.Required, new[] {nonNullInner});
41-
using var nullableGroup = new GroupNode("g1", Repetition.Optional, new[] {nullableInner});
42-
using var schema = new GroupNode("schema", Repetition.Required, new[] {nonNullGroup, nullableGroup});
40+
using var nonNullGroup = new GroupNode("g0", Repetition.Required, new[] { nonNullInner });
41+
using var nullableGroup = new GroupNode("g1", Repetition.Optional, new[] { nullableInner });
42+
using var schema = new GroupNode("schema", Repetition.Required, new[] { nonNullGroup, nullableGroup });
4343

4444
using var propertiesBuilder = new WriterPropertiesBuilder().Compression(Compression.Snappy);
4545
using var properties = propertiesBuilder.Build();

csharp.test/Arrow/TestEncryptionRoundTrip.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,8 @@ public static async Task TestArrowColumnEncryption()
2828
using var encryptionConfig = new EncryptionConfiguration("Key0");
2929
encryptionConfig.ColumnKeys = new Dictionary<string, IReadOnlyList<string>>
3030
{
31-
{"Key1", new[] {"x"}},
32-
{"Key2", new[] {"y"}},
31+
{ "Key1", new[] { "x" } },
32+
{ "Key2", new[] { "y" } },
3333
};
3434

3535
using var buffer = new ResizableBuffer();
@@ -74,8 +74,8 @@ public static void TestReadWithoutDecryptionProperties()
7474
using var encryptionConfig = new EncryptionConfiguration("Key0");
7575
encryptionConfig.ColumnKeys = new Dictionary<string, IReadOnlyList<string>>
7676
{
77-
{"Key1", new[] {"x"}},
78-
{"Key2", new[] {"y"}},
77+
{ "Key1", new[] { "x" } },
78+
{ "Key2", new[] { "y" } },
7979
};
8080

8181
using var buffer = new ResizableBuffer();

csharp.test/Arrow/TestFileReader.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ public async Task TestReadSelectedRowGroups()
127127
using var fileReader = new FileReader(inStream);
128128

129129
using var batchReader = fileReader.GetRecordBatchReader(
130-
rowGroups: new[] {1, 2});
130+
rowGroups: new[] { 1, 2 });
131131

132132
int rowsRead = 0;
133133
while (true)
@@ -166,7 +166,7 @@ public async Task TestReadSelectedColumns()
166166
using var fileReader = new FileReader(inStream);
167167

168168
using var batchReader = fileReader.GetRecordBatchReader(
169-
columns: new[] {1, 2});
169+
columns: new[] { 1, 2 });
170170

171171
var schema = batchReader.Schema;
172172
Assert.That(schema.FieldsList.Count, Is.EqualTo(2));

csharp.test/Arrow/TestFileWriter.cs

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ public class TestFileWriter
1515
[Test]
1616
public void TestWriteFile()
1717
{
18-
var fields = new[] {new Field("x", new Apache.Arrow.Types.Int32Type(), false)};
18+
var fields = new[] { new Field("x", new Apache.Arrow.Types.Int32Type(), false) };
1919
var schema = new Apache.Arrow.Schema(fields, null);
2020

2121
using var dir = new TempWorkingDirectory();
@@ -27,7 +27,7 @@ public void TestWriteFile()
2727
[Test]
2828
public void TestWriteOutputStream()
2929
{
30-
var fields = new[] {new Field("x", new Apache.Arrow.Types.Int32Type(), false)};
30+
var fields = new[] { new Field("x", new Apache.Arrow.Types.Int32Type(), false) };
3131
var schema = new Apache.Arrow.Schema(fields, null);
3232

3333
using var buffer = new ResizableBuffer();
@@ -40,7 +40,7 @@ public void TestWriteOutputStream()
4040
[Test]
4141
public void TestWriteAndReadStream()
4242
{
43-
var fields = new[] {new Field("x", new Apache.Arrow.Types.Int32Type(), false)};
43+
var fields = new[] { new Field("x", new Apache.Arrow.Types.Int32Type(), false) };
4444
var schema = new Apache.Arrow.Schema(fields, null);
4545

4646
using var stream = new MemoryStream();
@@ -192,12 +192,12 @@ RecordBatch GetBatch(int xVal, int numRows)
192192
using var fileReader = new FileReader(inStream);
193193
Assert.That(fileReader.NumRowGroups, Is.EqualTo(4));
194194

195-
var expectedSizes = new[] {200, 200, 250, 50};
196-
var expectedXValues = new[] {0, 1, 2, 2};
195+
var expectedSizes = new[] { 200, 200, 250, 50 };
196+
var expectedXValues = new[] { 0, 1, 2, 2 };
197197

198198
for (var rowGroupIdx = 0; rowGroupIdx < fileReader.NumRowGroups; ++rowGroupIdx)
199199
{
200-
using var batchReader = fileReader.GetRecordBatchReader(rowGroups: new[] {rowGroupIdx});
200+
using var batchReader = fileReader.GetRecordBatchReader(rowGroups: new[] { rowGroupIdx });
201201
using var batch = await batchReader.ReadNextRecordBatchAsync();
202202
Assert.That(batch, Is.Not.Null);
203203
Assert.That(batch.Length, Is.EqualTo(expectedSizes[rowGroupIdx]));
@@ -233,15 +233,15 @@ public async Task TestWriteRowGroupColumns()
233233
writer.NewRowGroup();
234234

235235
using (var intArray = new Int32Array.Builder()
236-
.AppendRange(Enumerable.Range(start, rowsPerRowGroup))
237-
.Build())
236+
.AppendRange(Enumerable.Range(start, rowsPerRowGroup))
237+
.Build())
238238
{
239239
writer.WriteColumnChunk(intArray);
240240
}
241241

242242
using (var floatArray = new FloatArray.Builder()
243-
.AppendRange(Enumerable.Range(start, rowsPerRowGroup).Select(i => i / 100.0f))
244-
.Build())
243+
.AppendRange(Enumerable.Range(start, rowsPerRowGroup).Select(i => i / 100.0f))
244+
.Build())
245245
{
246246
writer.WriteColumnChunk(floatArray);
247247
}
@@ -285,7 +285,7 @@ public async Task TestWriteRowGroupColumnsChunked()
285285
using var intArray1 = new Int32Array.Builder()
286286
.AppendRange(Enumerable.Range(start1, chunkSize))
287287
.Build();
288-
writer.WriteColumnChunk(new ChunkedArray(new Array[] {intArray0, intArray1}));
288+
writer.WriteColumnChunk(new ChunkedArray(new Array[] { intArray0, intArray1 }));
289289
}
290290

291291
{
@@ -295,7 +295,7 @@ public async Task TestWriteRowGroupColumnsChunked()
295295
using var floatArray1 = new FloatArray.Builder()
296296
.AppendRange(Enumerable.Range(start1, chunkSize).Select(i => i / 100.0f))
297297
.Build();
298-
writer.WriteColumnChunk(new ChunkedArray(new Array[] {floatArray0, floatArray1}));
298+
writer.WriteColumnChunk(new ChunkedArray(new Array[] { floatArray0, floatArray1 }));
299299
}
300300
}
301301

@@ -309,7 +309,7 @@ public async Task TestWriteRowGroupColumnsChunked()
309309
[Test]
310310
public void TestWriteWithProperties()
311311
{
312-
var fields = new[] {new Field("x", new Apache.Arrow.Types.Int32Type(), false)};
312+
var fields = new[] { new Field("x", new Apache.Arrow.Types.Int32Type(), false) };
313313
var schema = new Apache.Arrow.Schema(fields, null);
314314

315315
using var propertiesBuilder = new WriterPropertiesBuilder();
@@ -332,11 +332,11 @@ public void TestWriteAndReadMetadata()
332332
// Writing key-value metadata requires using the Arrow schema
333333
var metadata = new Dictionary<string, string>
334334
{
335-
{"foo", "bar"},
336-
{"baz", "123"},
335+
{ "foo", "bar" },
336+
{ "baz", "123" },
337337
};
338338

339-
var fields = new[] {new Field("x", new Apache.Arrow.Types.Int32Type(), false)};
339+
var fields = new[] { new Field("x", new Apache.Arrow.Types.Int32Type(), false) };
340340
var schema = new Apache.Arrow.Schema(fields, metadata);
341341

342342
using var buffer = new ResizableBuffer();

csharp.test/Encryption/TestCryptoFactory.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ public static void TestCreateEncryptionProperties()
1717
using var encryptionConfig = new EncryptionConfiguration("Key0");
1818
encryptionConfig.ColumnKeys = new Dictionary<string, IReadOnlyList<string>>
1919
{
20-
{"Key1", new[] {"col0", "col1"}}
20+
{ "Key1", new[] { "col0", "col1" } }
2121
};
2222
using var fileEncryptionProperties =
2323
cryptoFactory.GetFileEncryptionProperties(connectionConfig, encryptionConfig);
@@ -65,7 +65,7 @@ public static void TestThrowingFactory()
6565
using var encryptionConfig = new EncryptionConfiguration("Key0");
6666
encryptionConfig.ColumnKeys = new Dictionary<string, IReadOnlyList<string>>
6767
{
68-
{"Key1", new[] {"col0", "col1"}}
68+
{ "Key1", new[] { "col0", "col1" } }
6969
};
7070

7171
var exception = Assert.Throws<ParquetException>(() =>
@@ -82,7 +82,7 @@ public static void TestConnectionConfigPassThrough()
8282
const string updatedKeyAccessToken = "NEW_SECRET";
8383
var customKmsConf = new Dictionary<string, string>
8484
{
85-
{"key", "value"}
85+
{ "key", "value" }
8686
};
8787

8888
using var connectionConfig = new KmsConnectionConfig();
@@ -108,7 +108,7 @@ public static void TestConnectionConfigPassThrough()
108108
using var encryptionConfig = new EncryptionConfiguration("Key0");
109109
encryptionConfig.ColumnKeys = new Dictionary<string, IReadOnlyList<string>>
110110
{
111-
{"Key1", new[] {"col0", "col1"}}
111+
{ "Key1", new[] { "col0", "col1" } }
112112
};
113113

114114
connectionConfig.RefreshKeyAccessToken(updatedKeyAccessToken);

0 commit comments

Comments
 (0)