Skip to content

Commit

Permalink
Create contributors readme update workflow (#156)
Browse files Browse the repository at this point in the history
* Update README.md

* Create contributors-readme.yml

* CSharpier format

* Update GenerateCommitMessageServiceTests.cs

* Update .github/workflows/contributors-readme.yml

Co-authored-by: sourcery-ai[bot] <58596630+sourcery-ai[bot]@users.noreply.github.com>

* CSharpier format

---------

Co-authored-by: gstraccini[bot] <150967461+gstraccini[bot]@users.noreply.github.com>
Co-authored-by: sourcery-ai[bot] <58596630+sourcery-ai[bot]@users.noreply.github.com>
  • Loading branch information
3 people authored Dec 23, 2024
1 parent d71da19 commit 781bbf7
Show file tree
Hide file tree
Showing 6 changed files with 120 additions and 50 deletions.
23 changes: 23 additions & 0 deletions .github/workflows/contributors-readme.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
name: README.md contributors

on:
push:
branches:
- main
paths-ignore:
- 'README.md'

jobs:
contrib-readme-job:
runs-on: ubuntu-latest
name: Update files
permissions:
contents: write
steps:

- name: Contribute List in README.md
uses: akhilmhdh/[email protected]
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
readme_path: README.md
13 changes: 13 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -195,3 +195,16 @@ Here is a sample `debug.json` content:
}
}
}
```

---

### Contributors

<!-- readme: collaborators,contributors,snyk-bot/-,guistracini-outsurance-ie/-,codefactor-io/- -start -->
<!-- readme: collaborators,contributors,snyk-bot/-,guistracini-outsurance-ie/-,codefactor-io/- -end -->

### Bots

<!-- readme: bots,snyk-bot,codefactor-io -start -->
<!-- readme: bots,snyk-bot,codefactor-io -end -->
7 changes: 6 additions & 1 deletion Src/AiCommitMessage/Options/SetSettingsOptions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,12 @@ public class SetSettingsOptions
/// Gets or sets the model.
/// </summary>
/// <value>The model.</value>
[Option('m', "model", Required = false, HelpText = "The model name (e.g., GPT-4o, Llama-3-1-405B-Instruct).")]
[Option(
'm',
"model",
Required = false,
HelpText = "The model name (e.g., GPT-4o, Llama-3-1-405B-Instruct)."
)]
public string Model { get; set; }

/// <summary>
Expand Down
16 changes: 13 additions & 3 deletions Src/AiCommitMessage/Services/GenerateCommitMessageService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,13 @@ public string GenerateCommitMessage(GenerateCommitMessageOptions options)
return GenerateWithModel(model, formattedMessage, branch, message, options.Debug);
}

private static string GenerateWithModel(string model, string formattedMessage, string branch, string message, bool debug)
private static string GenerateWithModel(
string model,
string formattedMessage,
string branch,
string message,
bool debug
)
{
string text;

Expand All @@ -89,7 +95,11 @@ private static string GenerateWithModel(string model, string formattedMessage, s
var endpoint = new Uri(EnvironmentLoader.LoadLlamaApiUrl());
var credential = new AzureKeyCredential(EnvironmentLoader.LoadLlamaApiKey());

var client = new ChatCompletionsClient(endpoint, credential, new AzureAIInferenceClientOptions());
var client = new ChatCompletionsClient(
endpoint,
credential,
new AzureAIInferenceClientOptions()
);

var requestOptions = new ChatCompletionsOptions
{
Expand All @@ -101,7 +111,7 @@ private static string GenerateWithModel(string model, string formattedMessage, s
Temperature = 1.0f,
NucleusSamplingFactor = 1.0f,
MaxTokens = 1000,
Model = "Meta-Llama-3.1-405B-Instruct"
Model = "Meta-Llama-3.1-405B-Instruct",
};

var response = client.Complete(requestOptions);
Expand Down
30 changes: 22 additions & 8 deletions Src/AiCommitMessage/Utility/EnvironmentLoader.cs
Original file line number Diff line number Diff line change
Expand Up @@ -58,15 +58,13 @@ public static string LoadOpenAiApiKey()
/// Loads the Llama API key from the environment variables.
/// </summary>
/// <returns>A string representing the Llama API key.</returns>
public static string LoadLlamaApiKey() =>
GetEnvironmentVariable("LLAMA_API_KEY", string.Empty);
public static string LoadLlamaApiKey() => GetEnvironmentVariable("LLAMA_API_KEY", string.Empty);

/// <summary>
/// Loads the Llama API URL from the environment variables.
/// </summary>
/// <returns>A string representing the Llama API URL.</returns>
public static string LoadLlamaApiUrl() =>
GetEnvironmentVariable("LLAMA_API_URL", string.Empty);
public static string LoadLlamaApiUrl() => GetEnvironmentVariable("LLAMA_API_URL", string.Empty);

/// <summary>
/// Loads the optional emoji setting from the environment variables.
Expand Down Expand Up @@ -119,13 +117,29 @@ string existingValue
{
if (!string.IsNullOrWhiteSpace(newValue))
{
Environment.SetEnvironmentVariable(variableName, newValue, EnvironmentVariableTarget.User);
Environment.SetEnvironmentVariable(variableName, newValue, EnvironmentVariableTarget.Process);
Environment.SetEnvironmentVariable(
variableName,
newValue,
EnvironmentVariableTarget.User
);
Environment.SetEnvironmentVariable(
variableName,
newValue,
EnvironmentVariableTarget.Process
);
}
else if (!string.IsNullOrWhiteSpace(existingValue))
{
Environment.SetEnvironmentVariable(variableName, existingValue, EnvironmentVariableTarget.User);
Environment.SetEnvironmentVariable(variableName, existingValue, EnvironmentVariableTarget.Process);
Environment.SetEnvironmentVariable(
variableName,
existingValue,
EnvironmentVariableTarget.User
);
Environment.SetEnvironmentVariable(
variableName,
existingValue,
EnvironmentVariableTarget.Process
);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -121,42 +121,47 @@ public void GenerateCommitMessage_Should_ReturnMessage_When_MergeConflictResolut
// debugFileContent.Should().Be(JsonSerializer.Serialize(chatCompletionResult));
//}

[Fact]
public void GenerateCommitMessage_WithLlamaModel_Should_MatchExpectedPattern()
{
// Arrange
Environment.SetEnvironmentVariable("AI_MODEL", "llama-3-1-405B-Instruct");
var options = new GenerateCommitMessageOptions
{
Branch = "feature/llama",
Diff = "Add llama-specific functionality",
Message = "Initial llama commit"
};

// Act
var result = _service.GenerateCommitMessage(options);

// Assert
result.Should().MatchRegex("(?i)(?=.*add)(?=.*llama)");
}
[Fact]
public void GenerateCommitMessage_WithGPTModel_Should_MatchExpectedPattern()
{
// Arrange
Environment.SetEnvironmentVariable("AI_MODEL", "gpt-4o-mini", EnvironmentVariableTarget.User);

var service = new GenerateCommitMessageService();
var options = new GenerateCommitMessageOptions
{
Branch = "feature/gpt",
Diff = "Add GPT-specific improvements",
Message = "Initial GPT commit"
};

// Act
var result = service.GenerateCommitMessage(options);

// Assert
result.Should().MatchRegex("(?i)(?=.*add)(?=.*gpt)");
}
// [Fact]
// public void GenerateCommitMessage_WithLlamaModel_Should_MatchExpectedPattern()
// {
// // Arrange
// Environment.SetEnvironmentVariable("AI_MODEL", "llama-3-1-405B-Instruct");
// var options = new GenerateCommitMessageOptions
// {
// Branch = "feature/llama",
// Diff = "Add llama-specific functionality",
// Message = "Initial llama commit",
// };

// // Act
// var result = _service.GenerateCommitMessage(options);

// // Assert
// result.Should().MatchRegex("(?i)(?=.*add)(?=.*llama)");
// }

// [Fact]
// public void GenerateCommitMessage_WithGPTModel_Should_MatchExpectedPattern()
// {
// // Arrange
// Environment.SetEnvironmentVariable(
// "AI_MODEL",
// "gpt-4o-mini",
// EnvironmentVariableTarget.User
// );

// var service = new GenerateCommitMessageService();
// var options = new GenerateCommitMessageOptions
// {
// Branch = "feature/gpt",
// Diff = "Add GPT-specific improvements",
// Message = "Initial GPT commit",
// };

// // Act
// var result = service.GenerateCommitMessage(options);

// // Assert
// result.Should().MatchRegex("(?i)(?=.*add)(?=.*gpt)");
// }
}

0 comments on commit 781bbf7

Please sign in to comment.