Skip to content

Commit

Permalink
fix: Fixed latest warnings.
Browse files Browse the repository at this point in the history
  • Loading branch information
HavenDV committed Jan 4, 2024
1 parent 8d7d314 commit 7f91993
Show file tree
Hide file tree
Showing 19 changed files with 328 additions and 127 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@

<PropertyGroup>
<TargetFrameworks>net4.6.2;netstandard2.0;net6.0;net7.0;net8.0</TargetFrameworks>
<NoWarn>$(NoWarn);CA1720;CA1510</NoWarn>
<SignAssembly>false</SignAssembly>
<NoWarn>$(NoWarn);CA1720;CA1510;CA2100</NoWarn>
</PropertyGroup>

<PropertyGroup Label="NuGet">
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,7 @@ ON CONFLICT (id)
/// <summary>
/// Get record by id
/// </summary>
[CLSCompliant(false)]
public async Task<EmbeddingTableRecord?> GetRecordByIdAsync(
string tableName,
string id,
Expand Down Expand Up @@ -209,6 +210,7 @@ ON CONFLICT (id)
/// <param name="withEmbeddings">include or not embeddings in the result</param>
/// <param name="cancellationToken"><see cref="CancellationToken"/></param>
/// <returns></returns>
[CLSCompliant(false)]
public async Task<IEnumerable<(EmbeddingTableRecord, float)>> GetWithDistanceAsync(
string tableName, float[] embedding, DistanceStrategy strategy,
int limit, double minRelevanceScore = 0, bool withEmbeddings = false,
Expand Down Expand Up @@ -273,6 +275,7 @@ ORDER BY score DESC
/// <param name="withEmbeddings">include or not embeddings in the result</param>
/// <param name="cancellationToken"><see cref="CancellationToken"/></param>
/// <returns></returns>
[CLSCompliant(false)]
public async Task<EmbeddingTableRecord?> ReadAsync(
string tableName, string id,
bool withEmbeddings = false,
Expand Down Expand Up @@ -312,10 +315,15 @@ ORDER BY score DESC
/// <param name="withEmbeddings">include or not embeddings in the result</param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
[CLSCompliant(false)]
public async IAsyncEnumerable<EmbeddingTableRecord> ReadBatchAsync(
string tableName, IReadOnlyList<string> ids, bool withEmbeddings = false,
string tableName,
IReadOnlyList<string> ids,
bool withEmbeddings = false,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ids = ids ?? throw new ArgumentNullException(nameof(ids));

if (ids.Count == 0)
{
yield break;
Expand Down Expand Up @@ -372,9 +380,13 @@ public async Task DeleteAsync(string tableName, string id, CancellationToken can
/// <param name="tableName">embeddings table</param>
/// <param name="ids">list of record ids to delete</param>
/// <param name="cancellationToken"><see cref="CancellationToken"/></param>
public async Task DeleteBatchAsync(string tableName, IReadOnlyList<string> ids,
public async Task DeleteBatchAsync(
string tableName,
IReadOnlyList<string> ids,
CancellationToken cancellationToken = default)
{
ids = ids ?? throw new ArgumentNullException(nameof(ids));

if (ids.Count == 0)
{
return;
Expand Down Expand Up @@ -465,6 +477,7 @@ public enum DistanceStrategy
/// <summary>
/// Document with embedding db record
/// </summary>
[CLSCompliant(false)]
public record EmbeddingTableRecord(
string Id,
string Content,
Expand Down
31 changes: 29 additions & 2 deletions src/libs/LangChain.Core/Chains/Chain.cs
Original file line number Diff line number Diff line change
Expand Up @@ -133,19 +133,46 @@ public static STTChain<T> STT<T>(ISpeechToTextModel<T> model,
return new STTChain<T>(model, settings, inputKey, outputKey);
}

public static ReActAgentExecutorChain ReActAgentExecutor(IChatModel model, string reActPrompt = null,
int maxActions = 5, string inputKey = "input",
/// <summary>
///
/// </summary>
/// <param name="model"></param>
/// <param name="reActPrompt"></param>
/// <param name="maxActions"></param>
/// <param name="inputKey"></param>
/// <param name="outputKey"></param>
/// <returns></returns>
public static ReActAgentExecutorChain ReActAgentExecutor(
IChatModel model,
string? reActPrompt = null,
int maxActions = 5,
string inputKey = "input",
string outputKey = "final_answer")
{
return new ReActAgentExecutorChain(model, reActPrompt, maxActions, inputKey, outputKey);
}

/// <summary>
///
/// </summary>
/// <param name="inputKey"></param>
/// <param name="outputKey"></param>
/// <returns></returns>
public static ReActParserChain ReActParser(
string inputKey = "text", string outputKey = "answer")
{
return new ReActParserChain(inputKey, outputKey);
}

/// <summary>
///
/// </summary>
/// <param name="agents"></param>
/// <param name="stopPhrase"></param>
/// <param name="messagesLimit"></param>
/// <param name="inputKey"></param>
/// <param name="outputKey"></param>
/// <returns></returns>
public static GroupChat GroupChat(
IList<AgentExecutorChain> agents, string? stopPhrase = null, int messagesLimit = 10, string inputKey = "input", string outputKey = "output")
{
Expand Down
Original file line number Diff line number Diff line change
@@ -1,50 +1,63 @@
using LangChain.Abstractions.Chains.Base;
using LangChain.Abstractions.Schema;
using LangChain.Abstractions.Schema;
using LangChain.Chains.HelperChains;
using LangChain.Memory;
using LangChain.Providers;
using LangChain.Schema;

namespace LangChain.Chains.StackableChains.Agents;

public class AgentExecutorChain: BaseStackableChain
/// <summary>
///
/// </summary>
public class AgentExecutorChain : BaseStackableChain
{
/// <summary>
///
/// </summary>
public string HistoryKey { get; }

private readonly BaseStackableChain _originalChain;

private BaseStackableChain _chainWithHistory;

public string Name { get; private set; }
private BaseStackableChain? _chainWithHistory;

/// <summary>
/// Messages of this agent will not be added to the history
/// </summary>
public bool IsObserver { get; set; } = false;
public bool IsObserver { get; set; }

public AgentExecutorChain(BaseStackableChain originalChain, string name, string historyKey="history",
/// <inheritdoc/>
public AgentExecutorChain(
BaseStackableChain originalChain,
string name,
string historyKey = "history",
string outputKey = "final_answer")
{
Name = name;
HistoryKey = historyKey;
_originalChain = originalChain;

InputKeys = new[] { historyKey};
InputKeys = new[] { historyKey };
OutputKeys = new[] { outputKey };

SetHistory("");
}

/// <summary>
///
/// </summary>
/// <param name="history"></param>
public void SetHistory(string history)
{

_chainWithHistory =
Chain.Set(history, HistoryKey)
|_originalChain;
_chainWithHistory =
Chain.Set(history, HistoryKey) |
_originalChain;
}

/// <inheritdoc/>
protected override async Task<IChainValues> InternalCall(IChainValues values)
{
var res=await _chainWithHistory.CallAsync(values);
return res;
if (_chainWithHistory == null)
{
throw new InvalidOperationException("History is not set");
}

return await _chainWithHistory.CallAsync(values).ConfigureAwait(false);
}
}
48 changes: 33 additions & 15 deletions src/libs/LangChain.Core/Chains/StackableChains/Agents/GroupChat.cs
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
using LangChain.Abstractions.Schema;
using LangChain.Chains.HelperChains;
using LangChain.Chains.HelperChains.Exceptions;
using LangChain.Memory;
using LangChain.Providers;

namespace LangChain.Chains.StackableChains.Agents;

public class GroupChat:BaseStackableChain
/// <summary>
///
/// </summary>
public class GroupChat : BaseStackableChain
{
private readonly IList<AgentExecutorChain> _agents;

Expand All @@ -15,34 +17,50 @@ public class GroupChat:BaseStackableChain
private readonly string _inputKey;
private readonly string _outputKey;


int _currentAgentId=0;
int _currentAgentId;
private readonly ConversationBufferMemory _conversationBufferMemory;


public bool ThrowOnLimit { get; set; } = false;
public GroupChat(IList<AgentExecutorChain> agents, string? stopPhrase=null, int messagesLimit=10, string inputKey="input", string outputKey="output")
/// <summary>
///
/// </summary>
public bool ThrowOnLimit { get; set; }

/// <summary>
///
/// </summary>
/// <param name="agents"></param>
/// <param name="stopPhrase"></param>
/// <param name="messagesLimit"></param>
/// <param name="inputKey"></param>
/// <param name="outputKey"></param>
public GroupChat(
IList<AgentExecutorChain> agents,
string? stopPhrase = null,
int messagesLimit = 10,
string inputKey = "input",
string outputKey = "output")
{
_agents = agents;

_stopPhrase = stopPhrase;
_stopPhrase = stopPhrase ?? string.Empty;
_messagesLimit = messagesLimit;
_inputKey = inputKey;
_outputKey = outputKey;
_conversationBufferMemory = new ConversationBufferMemory(new ChatMessageHistory()) { AiPrefix = "", HumanPrefix = "", SystemPrefix = "", SaveHumanMessages = false };
InputKeys = new[] { inputKey };
OutputKeys = new[] { outputKey };

}

public IReadOnlyList<Message> GetHistory()
{
return _conversationBufferMemory.ChatHistory.Messages;
}

/// <summary>
///
/// </summary>
/// <returns></returns>
public IReadOnlyList<Message> History => _conversationBufferMemory.ChatHistory.Messages;

/// <inheritdoc />
protected override async Task<IChainValues> InternalCall(IChainValues values)
{
values = values ?? throw new ArgumentNullException(nameof(values));

await _conversationBufferMemory.Clear().ConfigureAwait(false);
foreach (var agent in _agents)
Expand Down Expand Up @@ -72,7 +90,7 @@ await _conversationBufferMemory.ChatHistory.AddMessage(new Message($"{agent.Name
}
}

var result = _conversationBufferMemory.ChatHistory.Messages.Last();
var result = _conversationBufferMemory.ChatHistory.Messages[^1];
messagesCount = _conversationBufferMemory.ChatHistory.Messages.Count;
if (ThrowOnLimit && messagesCount >= _messagesLimit)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,14 @@

namespace LangChain.Chains.StackableChains.Agents;

public class PromptedAgent: AgentExecutorChain
/// <summary>
///
/// </summary>
public class PromptedAgent : AgentExecutorChain
{
/// <summary>
///
/// </summary>
public const string Template =
@"{system}
{history}";
Expand All @@ -16,8 +22,19 @@ private static BaseStackableChain MakeChain(string name, string system, IChatMod
| Chain.LLM(model,outputKey: outputKey);
}


public PromptedAgent(string name, string prompt, IChatModel model, string outputKey = "final_answer") : base(MakeChain(name,prompt,model, outputKey),name, "history", outputKey)
/// <summary>
///
/// </summary>
/// <param name="name"></param>
/// <param name="prompt"></param>
/// <param name="model"></param>
/// <param name="outputKey"></param>
public PromptedAgent(
string name,
string prompt,
IChatModel model,
string outputKey = "final_answer")
: base(MakeChain(name,prompt,model, outputKey), name, "history", outputKey)
{

}
Expand Down
Loading

0 comments on commit 7f91993

Please sign in to comment.