Skip to content

LLM Connector for Vulnerability Analyser #1215

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 50 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
50 commits
Select commit Hold shift + click to select a range
4c66578
llm connector
seran Apr 4, 2025
c56e783
config update
seran Apr 4, 2025
7133f7a
Merge branch 'master' into llm-connector
seran Apr 8, 2025
e677968
Merge branch 'master' into llm-connector
seran Apr 8, 2025
a5f9d62
moving connector to common location
seran Apr 9, 2025
4ef1ddc
clean-up
seran Apr 9, 2025
e1d341a
more clean-up
seran Apr 9, 2025
6016a82
Merge branch 'master' into llm-connector
seran Apr 29, 2025
6ed0706
wip: workerpool to handle prompt requests
seran Apr 29, 2025
919d196
working in worker pool
seran Apr 29, 2025
5f6c045
gitignore to ignore target folders
seran May 4, 2025
5b2e78c
gitignore to ignore target folders
seran May 4, 2025
7bf59e3
wip
seran May 6, 2025
2b64f85
Merge branch 'master' into llm-connector
seran May 6, 2025
e1078dc
minor clean-up
seran May 6, 2025
b2f33ea
fixes and comments
seran May 6, 2025
2941dfd
Merge branch 'master' into llm-connector
seran May 6, 2025
63a77e4
clean-up
seran May 7, 2025
a19b567
clean-up
seran May 7, 2025
218d214
Merge branch 'master' into llm-connector
seran May 9, 2025
042cc82
Merge branch 'master' into llm-connector
seran May 11, 2025
74d1998
clean-up
seran May 12, 2025
b58271f
Merge branch 'master' into llm-connector
seran May 12, 2025
b8c9cb4
Merge branch 'master' into llm-connector
seran May 13, 2025
88caa97
clean-up
seran May 13, 2025
6cd7833
Merge branch 'master' into llm-connector
seran May 14, 2025
484de8a
Merge branch 'master' into llm-connector
seran May 19, 2025
0433f7c
additional methods
seran May 19, 2025
272d7ac
Merge branch 'master' into llm-connector
seran May 23, 2025
78f9ee3
Merge branch 'master' into llm-connector
seran May 23, 2025
b152575
addressing comments
seran May 23, 2025
ce78ff1
added ollama container to test
seran May 23, 2025
3d3526d
more updates
seran May 23, 2025
77d2798
Merge branch 'master' into llm-connector
seran May 23, 2025
6104227
replaced URL to HttpClientFactory
seran May 23, 2025
91fe6b3
clean-up
seran May 23, 2025
edcb8f9
clean-up
seran May 23, 2025
56945e4
more clean-up
seran May 23, 2025
e4c2f07
almost completed
seran May 23, 2025
74bdd77
testing LLM in CI
seran May 23, 2025
90ed8dd
Merge branch 'master' into llm-connector
seran May 23, 2025
72d2bb3
working commit
seran May 23, 2025
3d43824
Merge branch 'master' into llm-connector
seran May 27, 2025
0598210
Merge branch 'master' into llm-connector
seran May 27, 2025
402ff47
Merge branch 'master' into llm-connector
seran May 30, 2025
917775b
updates
seran May 30, 2025
52474f4
minor fix
seran May 30, 2025
9b9be39
minor fix
seran May 30, 2025
836d426
minor change
seran May 30, 2025
17230df
Merge branch 'master' into llm-connector
seran Jun 2, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 32 additions & 9 deletions core/src/main/kotlin/org/evomaster/core/EMConfig.kt
Original file line number Diff line number Diff line change
Expand Up @@ -591,6 +591,14 @@ class EMConfig {
throw ConfigProblemException("The use of 'security' requires 'minimize'")
}

if (languageModelConnector && languageModelServerURL.isNullOrEmpty()) {
throw ConfigProblemException("Language model server URL cannot be empty.")
}

if (languageModelConnector && languageModelName.isNullOrEmpty()) {
throw ConfigProblemException("Language model name cannot be empty.")
}

if(prematureStop.isNotEmpty() && stoppingCriterion != StoppingCriterion.TIME){
throw ConfigProblemException("The use of 'prematureStop' is meaningful only if the stopping criterion" +
" 'stoppingCriterion' is based on time")
Expand Down Expand Up @@ -2235,16 +2243,16 @@ class EMConfig {
RANDOM
}

@Cfg("Specify a method to select the first external service spoof IP address.")
@Experimental
@Cfg("Specify a method to select the first external service spoof IP address.")
var externalServiceIPSelectionStrategy = ExternalServiceIPSelectionStrategy.NONE

@Experimental
@Cfg("User provided external service IP." +
" When EvoMaster mocks external services, mock server instances will run on local addresses starting from" +
" this provided address." +
" Min value is ${defaultExternalServiceIP}." +
" Lower values like ${ExternalServiceSharedUtils.RESERVED_RESOLVED_LOCAL_IP} and ${ExternalServiceSharedUtils.DEFAULT_WM_LOCAL_IP} are reserved.")
@Experimental
@Regex(externalServiceIPRegex)
var externalServiceIP : String = defaultExternalServiceIP

Expand Down Expand Up @@ -2275,26 +2283,24 @@ class EMConfig {
@Probability(true)
var useExtraSqlDbConstraintsProbability = 0.9


@Cfg("a probability of harvesting actual responses from external services as seeds.")
@Experimental
@Cfg("a probability of harvesting actual responses from external services as seeds.")
@Probability(activating = true)
var probOfHarvestingResponsesFromActualExternalServices = 0.0


@Cfg("a probability of prioritizing to employ successful harvested actual responses from external services as seeds (e.g., 2xx from HTTP external service).")
@Experimental
@Cfg("a probability of prioritizing to employ successful harvested actual responses from external services as seeds (e.g., 2xx from HTTP external service).")
@Probability(activating = true)
var probOfPrioritizingSuccessfulHarvestedActualResponses = 0.0

@Cfg("a probability of mutating mocked responses based on actual responses")
@Experimental
@Cfg("a probability of mutating mocked responses based on actual responses")
@Probability(activating = true)
var probOfMutatingResponsesBasedOnActualResponse = 0.0

@Experimental
@Cfg("Number of threads for external request harvester. No more threads than numbers of processors will be used.")
@Min(1.0)
@Experimental
var externalRequestHarvesterNumberOfThreads: Int = 2


Expand Down Expand Up @@ -2323,8 +2329,8 @@ class EMConfig {
RANDOM
}

@Cfg("Harvested external request response selection strategy")
@Experimental
@Cfg("Harvested external request response selection strategy")
var externalRequestResponseSelectionStrategy = ExternalRequestResponseSelectionStrategy.EXACT

@Cfg("Whether to employ constraints specified in API schema (e.g., OpenAPI) in test generation")
Expand Down Expand Up @@ -2373,6 +2379,23 @@ class EMConfig {
@Cfg("Apply a security testing phase after functional test cases have been generated.")
var security = true

@Experimental
@Cfg("Enable language model connector")
var languageModelConnector = false

@Experimental
@Cfg("Large-language model external service URL. Default is set to Ollama local instance URL.")
var languageModelServerURL: String = "http://localhost:11434/"

@Experimental
@Cfg("Large-language model name as listed in Ollama")
var languageModelName: String = "llama3.2:latest"

@Experimental
@Cfg("Number of threads for language model connector. No more threads than numbers of processors will be used.")
@Min(1.0)
var languageModelConnectorNumberOfThreads: Int = 2


@Cfg("If there is no configuration file, create a default template at given configPath location." +
" However this is done only on the 'default' location. If you change 'configPath', no new file will be" +
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
package org.evomaster.core.languagemodel.data

class AnsweredPrompt (
val prompt: Prompt,
val answer: String,
) {
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
package org.evomaster.core.languagemodel.data

import java.util.UUID

class Prompt(
val id: UUID,

val prompt: String
) {
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
package org.evomaster.core.languagemodel.data.ollama

class OllamaEndpoints {

companion object {
/**
* API URL to generate a response for a given prompt with a provided model.
*/
const val GENERATE_ENDPOINT = "/api/generate"

/**
* API URL to list models that are available locally.
*/
const val TAGS_ENDPOINT = "/api/tags"

fun getGenerateEndpoint(serverURL: String): String {
return cleanURL(serverURL) + GENERATE_ENDPOINT
}

fun getTagEndpoint(serverURL: String): String {
return cleanURL(serverURL) + TAGS_ENDPOINT
}

private fun cleanURL(serverURL: String): String {
return if (serverURL.endsWith("/")) serverURL.dropLast(1) else serverURL
}
}


}
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
package org.evomaster.core.languagemodel.data.ollama

class OllamaModel {

val name: String = ""

val model: String = ""

val modified_at: String = ""

val size: Int = 0

val digest: String = ""

val details: OllamaModelDetail = OllamaModelDetail()

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
package org.evomaster.core.languagemodel.data.ollama

class OllamaModelDetail {
val parent_model: String = ""

val format: String = ""

val family: String = ""

val families: List<String> = listOf()

val parameter_size: String = ""

val quantization_level: String = ""
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
package org.evomaster.core.languagemodel.data.ollama

class OllamaModelResponse {
val models: List<OllamaModel> = listOf()
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
package org.evomaster.core.languagemodel.data.ollama

/**
* DTO to represent the Ollama request schema.
*/
class OllamaRequest (
val model: String,

/**
* Contains the string of the prompt for the language model.
*/
val prompt: String,

/**
* False will return the response as a single object; meanwhile,
* True will respond a stream of objects.
*/
val stream: Boolean = false
) {

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
package org.evomaster.core.languagemodel.data.ollama

enum class OllamaRequestVerb {
GET,
POST;
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
package org.evomaster.core.languagemodel.data.ollama

/**
* DTO to represent the Ollama response schema.
*/
class OllamaResponse {

/**
* Used model name
*/
val model: String = ""

val created_at: String = ""

/**
* Contains the response string for non-stream output
*/
val response: String = ""

val done: Boolean = false

val done_reason: String = ""

val context: List<Int> = emptyList()

val total_duration: Int = 0

val load_duration: Int = 0

val prompt_eval_count: Int = 0

val prompt_eval_duration: Int = 0

val eval_count: Int = 0

val eval_duration: Int = 0
}
Loading