Skip to content

Commit 0c2b492

Browse files
committed
chore(tests): Mock LLM in tests for PRs
This saves time when testing on CPU which is the only sensible thing to do on GitHub CI for PRs. For releases or once the commit is merged we could use an external runner with GPU or just wait. Signed-off-by: Richard Palethorpe <[email protected]>
1 parent a8706ab commit 0c2b492

File tree

10 files changed

+390
-64
lines changed

10 files changed

+390
-64
lines changed

.github/workflows/tests.yml

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -30,17 +30,29 @@ jobs:
3030
$(. /etc/os-release && echo "${UBUNTU_CODENAME:-$VERSION_CODENAME}") stable" | \
3131
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
3232
sudo apt-get update
33-
sudo apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
33+
sudo apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin make
3434
docker version
3535
3636
docker run --rm hello-world
3737
- uses: actions/setup-go@v5
3838
with:
3939
go-version: '>=1.17.0'
40+
- name: Free up disk space
41+
run: |
42+
sudo rm -rf /usr/share/dotnet
43+
sudo rm -rf /usr/local/lib/android
44+
sudo rm -rf /opt/ghc
45+
sudo apt-get clean
46+
docker system prune -af || true
47+
df -h
48+
4049
- name: Run tests
4150
run: |
42-
sudo apt-get update && sudo apt-get install -y make
43-
make tests
51+
if [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then
52+
make tests-mock
53+
else
54+
make tests
55+
fi
4456
#sudo mv coverage/coverage.txt coverage.txt
4557
#sudo chmod 777 coverage.txt
4658

Makefile

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,9 @@ cleanup-tests:
1313
tests: prepare-tests
1414
LOCALAGI_MCPBOX_URL="http://localhost:9090" LOCALAGI_MODEL="gemma-3-12b-it-qat" LOCALAI_API_URL="http://localhost:8081" LOCALAGI_API_URL="http://localhost:8080" $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --fail-fast -v -r ./...
1515

16+
tests-mock: prepare-tests
17+
LOCALAGI_MCPBOX_URL="http://localhost:9090" LOCALAI_API_URL="http://localhost:8081" LOCALAGI_API_URL="http://localhost:8080" $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --fail-fast -v -r ./...
18+
1619
run-nokb:
1720
$(MAKE) run KBDISABLEINDEX=true
1821

core/agent/agent.go

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ type Agent struct {
2727
sync.Mutex
2828
options *options
2929
Character Character
30-
client *openai.Client
30+
client llm.LLMClient
3131
jobQueue chan *types.Job
3232
context *types.ActionContext
3333

@@ -59,7 +59,12 @@ func New(opts ...Option) (*Agent, error) {
5959
return nil, fmt.Errorf("failed to set options: %v", err)
6060
}
6161

62-
client := llm.NewClient(options.LLMAPI.APIKey, options.LLMAPI.APIURL, options.timeout)
62+
var client llm.LLMClient
63+
if options.llmClient != nil {
64+
client = options.llmClient
65+
} else {
66+
client = llm.NewClient(options.LLMAPI.APIKey, options.LLMAPI.APIURL, options.timeout)
67+
}
6368

6469
c := context.Background()
6570
if options.context != nil {
@@ -116,6 +121,11 @@ func New(opts ...Option) (*Agent, error) {
116121
return a, nil
117122
}
118123

124+
// LLMClient returns the agent's LLM client (for testing)
125+
func (a *Agent) LLMClient() llm.LLMClient {
126+
return a.client
127+
}
128+
119129
func (a *Agent) startNewConversationsConsumer() {
120130
go func() {
121131
for {

core/agent/agent_suite_test.go

Lines changed: 13 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package agent_test
22

33
import (
4+
"net/url"
45
"os"
56
"testing"
67

@@ -13,15 +14,18 @@ func TestAgent(t *testing.T) {
1314
RunSpecs(t, "Agent test suite")
1415
}
1516

16-
var testModel = os.Getenv("LOCALAGI_MODEL")
17-
var apiURL = os.Getenv("LOCALAI_API_URL")
18-
var apiKeyURL = os.Getenv("LOCALAI_API_KEY")
17+
var (
18+
testModel = os.Getenv("LOCALAGI_MODEL")
19+
apiURL = os.Getenv("LOCALAI_API_URL")
20+
apiKeyURL = os.Getenv("LOCALAI_API_KEY")
21+
useRealLocalAI bool
22+
)
23+
24+
func isValidURL(u string) bool {
25+
parsed, err := url.ParseRequestURI(u)
26+
return err == nil && parsed.Scheme != "" && parsed.Host != ""
27+
}
1928

2029
func init() {
21-
if testModel == "" {
22-
testModel = "hermes-2-pro-mistral"
23-
}
24-
if apiURL == "" {
25-
apiURL = "http://192.168.68.113:8080"
26-
}
30+
useRealLocalAI = isValidURL(apiURL) && apiURL != "" && testModel != ""
2731
}

0 commit comments

Comments
 (0)