Skip to content

Commit

Permalink
SheetAppender header row bug fix #45
Browse files Browse the repository at this point in the history
  • Loading branch information
SteveWinward committed Sep 28, 2024
1 parent 3545618 commit 126919e
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 15 deletions.
2 changes: 1 addition & 1 deletion src/GoogleSheetsWrapper/GoogleSheetsWrapper.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
<PackageProjectUrl>https://github.com/SteveWinward/GoogleSheetsWrapper</PackageProjectUrl>
<PackageLicenseFile>LICENSE</PackageLicenseFile>
<RepositoryUrl>https://github.com/SteveWinward/GoogleSheetsWrapper</RepositoryUrl>
<Version>2.0.18</Version>
<Version>2.0.19</Version>
<PackageTags>Google Sheets</PackageTags>
<PackageReadmeFile>README.md</PackageReadmeFile>
<Description>A simple wrapper library that makes it easier to perform CRUD operations against Google Sheets spreadsheets.</Description>
Expand Down
32 changes: 18 additions & 14 deletions src/GoogleSheetsWrapper/SheetAppender.cs
Original file line number Diff line number Diff line change
Expand Up @@ -43,48 +43,51 @@ public SheetAppender(SheetHelper sheetHelper)
public void Init(string jsonCredentials)
{
_sheetHelper.Init(jsonCredentials);
}

}

/// <summary>
/// Appends a CSV file and all its rows into the current Google Sheets tab
/// </summary>
/// <param name="filePath"></param>
/// <param name="includeHeaders"></param>
/// <param name="batchWaitTime">See https://developers.google.com/sheets/api/limits at last check is 60 requests a minute, so 1 second delay per request should avoid limiting</param>
/// <param name="csvHasHeaderRecord">This boolean indicates whether the CSV file has a header record row or not</param>
/// <param name="batchWaitTime">See https://developers.google.com/sheets/api/limits at last check is 60 requests a minute, so 1 second delay per request should avoid limiting</param>
/// <param name="batchSize">Increasing batch size may improve throughput. Default is conservative.</param>
public void AppendCsv(string filePath, bool includeHeaders, int batchWaitTime = 1000, int batchSize = 100)
/// <param name="skipWritingHeaderRow">This boolean indicates if you want to actually write the header row to the Google sheet</param>
public void AppendCsv(string filePath, bool csvHasHeaderRecord, int batchWaitTime = 1000, int batchSize = 100, bool skipWritingHeaderRow = false)
{
using (var stream = new FileStream(filePath, FileMode.Open))
{
AppendCsv(stream, includeHeaders, batchWaitTime, batchSize);
AppendCsv(stream, csvHasHeaderRecord, batchWaitTime, batchSize, skipWritingHeaderRow);
}
}

/// <summary>
/// Appends a CSV file and all its rows into the current Google Sheets tab
/// </summary>
/// <param name="stream"></param>
/// <param name="includeHeaders"></param>
/// <param name="batchWaitTime">See https://developers.google.com/sheets/api/limits at last check is 60 requests a minute, so 1 second delay per request should avoid limiting</param>
/// <param name="csvHasHeaderRecord">This boolean indicates whether the CSV file has a header record row or not</param>
/// <param name="batchWaitTime">See https://developers.google.com/sheets/api/limits at last check is 60 requests a minute, so 1 second delay per request should avoid limiting</param>
/// <param name="batchSize">Increasing batch size may improve throughput. Default is conservative.</param>
public void AppendCsv(Stream stream, bool includeHeaders, int batchWaitTime = 1000, int batchSize = 100)
/// <param name="skipWritingHeaderRow">This boolean indicates if you want to actually write the header row to the Google sheet</param>
public void AppendCsv(Stream stream, bool csvHasHeaderRecord, int batchWaitTime = 1000, int batchSize = 100, bool skipWritingHeaderRow = false)
{
var csvConfig = new CsvConfiguration(CultureInfo.InvariantCulture)
{
HasHeaderRecord = includeHeaders
HasHeaderRecord = csvHasHeaderRecord
};

AppendCsv(stream, csvConfig, batchWaitTime, batchSize);
AppendCsv(stream, csvConfig, batchWaitTime, batchSize, skipWritingHeaderRow);
}

/// <summary>
/// Appends a CSV file and all its rows into the current Google Sheets tab
/// </summary>
/// <param name="stream"></param>
/// <param name="csvConfig"></param>
/// <param name="batchWaitTime">See https://developers.google.com/sheets/api/limits at last check is 60 requests a minute, so 1 second delay per request should avoid limiting</param>
/// <param name="batchWaitTime">See https://developers.google.com/sheets/api/limits at last check is 60 requests a minute, so 1 second delay per request should avoid limiting</param>
/// <param name="batchSize">Increasing batch size may improve throughput. Default is conservative.</param>
public void AppendCsv(Stream stream, CsvConfiguration csvConfig, int batchWaitTime = 1000, int batchSize = 100)
/// <param name="skipWritingHeaderRow">This boolean indicates if you want to actually write the header row to the Google sheet</param>
public void AppendCsv(Stream stream, CsvConfiguration csvConfig, int batchWaitTime = 1000, int batchSize = 100, bool skipWritingHeaderRow = false)
{
using var streamReader = new StreamReader(stream);
using var csv = new CsvReader(streamReader, csvConfig);
Expand All @@ -100,7 +103,8 @@ public void AppendCsv(Stream stream, CsvConfiguration csvConfig, int batchWaitTi

var currentBatchCount = 0;

if (csvConfig.HasHeaderRecord)
// Only write the header record if its specified to not skip writing the header row
if (csvConfig.HasHeaderRecord && !skipWritingHeaderRow)
{
currentBatchCount++;

Expand Down

0 comments on commit 126919e

Please sign in to comment.