Skip to content

Commit

Permalink
Feat: add LMM Honeypot HTTP Server (#110)
Browse files Browse the repository at this point in the history
* add LMM Honeypot HTTP Server

* improve unit test code coverage

* integrate LLM plugin into http honeypot strategy

* improve code coverage

* fix typos

* improve OpenAI plugin with gpt-4, adpt new API amd map new object
  • Loading branch information
mariocandela authored Jun 23, 2024
1 parent 24b4153 commit 93d7804
Show file tree
Hide file tree
Showing 7 changed files with 207 additions and 76 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@ Beelzebub is an advanced honeypot framework designed to provide a highly secure

<img src="https://beelzebub.netlify.app/go-beelzebub.png" alt="Beelzebub Logo" width="200"/>

## OpenAI GPT Integration
## LLM Honeypot

Learn how to integrate Beelzebub with OpenAI GPT-3 by referring to our comprehensive guide on Medium: [Medium Article](https://medium.com/@mario.candela.personal/how-to-build-a-highly-effective-honeypot-with-beelzebub-and-chatgpt-a2f0f05b3e1)
Learn how to integrate Beelzebub with LLM OpenAI by referring to our comprehensive guide on Medium: [Medium Article](https://medium.com/@mario.candela.personal/how-to-build-a-highly-effective-honeypot-with-beelzebub-and-chatgpt-a2f0f05b3e1)

## Telegram Bot for Real-Time Attacks

Expand Down
4 changes: 2 additions & 2 deletions configurations/services/ssh-2222.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@ address: ":2222"
description: "SSH interactive ChatGPT"
commands:
- regex: "^(.+)$"
plugin: "OpenAIGPTLinuxTerminal"
plugin: "LLMHoneypot"
serverVersion: "OpenSSH"
serverName: "ubuntu"
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
deadlineTimeoutSeconds: 60
plugin:
openAPIChatGPTSecretKey: ""
openAISecretKey: ""
2 changes: 1 addition & 1 deletion parser/configurations_parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ type Prometheus struct {
}

type Plugin struct {
OpenAPIChatGPTSecretKey string `yaml:"openAPIChatGPTSecretKey"`
OpenAISecretKey string `yaml:"openAISecretKey"`
}

// BeelzebubServiceConfiguration is the struct that contains the configurations of the honeypot service
Expand Down
132 changes: 87 additions & 45 deletions plugins/openai-gpt.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,31 +3,28 @@ package plugins
import (
"encoding/json"
"errors"
"fmt"
"github.com/go-resty/resty/v2"
"strings"
"github.com/mariocandela/beelzebub/v3/tracer"

log "github.com/sirupsen/logrus"
)

const (
promptVirtualizeLinuxTerminal = "You will act as an Ubuntu Linux terminal. The user will type commands, and you are to reply with what the terminal should show. Your responses must be contained within a single code block. Do not provide explanations or type commands unless explicitly instructed by the user. Remember previous commands and consider their effects on subsequent outputs.\n\nA:pwd\n\nQ:/home/user\n\n"
ChatGPTPluginName = "OpenAIGPTLinuxTerminal"
openAIGPTEndpoint = "https://api.openai.com/v1/completions"
systemPromptVirtualizeLinuxTerminal = "You will act as an Ubuntu Linux terminal. The user will type commands, and you are to reply with what the terminal should show. Your responses must be contained within a single code block. Do not provide explanations or type commands unless explicitly instructed by the user. Remember previous commands and consider their effects on subsequent outputs."
systemPromptVirtualizeHTTPServer = "You will act as an unsecure HTTP Server with multiple vulnerability like aws and git credentials stored into root http directory. The user will send HTTP requests, and you are to reply with what the server should show. Do not provide explanations or type commands unless explicitly instructed by the user."
ChatGPTPluginName = "LLMHoneypot"
openAIGPTEndpoint = "https://api.openai.com/v1/chat/completions"
)

type History struct {
Input, Output string
}

type openAIGPTVirtualTerminal struct {
Histories []History
type openAIVirtualHoneypot struct {
Histories []Message
openAIKey string
client *resty.Client
protocol tracer.Protocol
}

type Choice struct {
Text string `json:"text"`
Message Message `json:"message"`
Index int `json:"index"`
Logprobs interface{} `json:"logprobs"`
FinishReason string `json:"finish_reason"`
Expand All @@ -47,61 +44,106 @@ type gptResponse struct {
}

type gptRequest struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
Temperature int `json:"temperature"`
MaxTokens int `json:"max_tokens"`
TopP int `json:"top_p"`
FrequencyPenalty int `json:"frequency_penalty"`
PresencePenalty int `json:"presence_penalty"`
Stop []string `json:"stop"`
Model string `json:"model"`
Messages []Message `json:"messages"`
}

type Message struct {
Role string `json:"role"`
Content string `json:"content"`
}

type Role int

const (
SYSTEM Role = iota
USER
ASSISTANT
)

func (role Role) String() string {
return [...]string{"system", "user", "assistant"}[role]
}

func Init(history []History, openAIKey string) *openAIGPTVirtualTerminal {
return &openAIGPTVirtualTerminal{
func Init(history []Message, openAIKey string, protocol tracer.Protocol) *openAIVirtualHoneypot {
return &openAIVirtualHoneypot{
Histories: history,
openAIKey: openAIKey,
client: resty.New(),
protocol: protocol,
}
}

func buildPrompt(histories []History, command string) string {
var sb strings.Builder

sb.WriteString(promptVirtualizeLinuxTerminal)

for _, history := range histories {
sb.WriteString(fmt.Sprintf("A:%s\n\nQ:%s\n\n", history.Input, history.Output))
func buildPrompt(histories []Message, protocol tracer.Protocol, command string) ([]Message, error) {
var messages []Message

switch protocol {
case tracer.SSH:
messages = append(messages, Message{
Role: SYSTEM.String(),
Content: systemPromptVirtualizeLinuxTerminal,
})
messages = append(messages, Message{
Role: USER.String(),
Content: "pwd",
})
messages = append(messages, Message{
Role: ASSISTANT.String(),
Content: "/home/user",
})
for _, history := range histories {
messages = append(messages, history)
}
case tracer.HTTP:
messages = append(messages, Message{
Role: SYSTEM.String(),
Content: systemPromptVirtualizeHTTPServer,
})
messages = append(messages, Message{
Role: USER.String(),
Content: "GET /index.html",
})
messages = append(messages, Message{
Role: ASSISTANT.String(),
Content: "<html><body>Hello, World!</body></html>",
})
default:
return nil, errors.New("no prompt for protocol selected")
}
// Append command to evaluate
sb.WriteString(fmt.Sprintf("A:%s\n\nQ:", command))
messages = append(messages, Message{
Role: USER.String(),
Content: command,
})

return sb.String()
return messages, nil
}

func (openAIGPTVirtualTerminal *openAIGPTVirtualTerminal) GetCompletions(command string) (string, error) {
func (openAIVirtualHoneypot *openAIVirtualHoneypot) GetCompletions(command string) (string, error) {
var err error

prompt, err := buildPrompt(openAIVirtualHoneypot.Histories, openAIVirtualHoneypot.protocol, command)

if err != nil {
return "", err
}

requestJson, err := json.Marshal(gptRequest{
Model: "gpt-3.5-turbo-instruct",
Prompt: buildPrompt(openAIGPTVirtualTerminal.Histories, command),
Temperature: 0,
MaxTokens: 100,
TopP: 1,
FrequencyPenalty: 0,
PresencePenalty: 0,
Stop: []string{"\n"},
Model: "gpt-4", //"gpt-3.5-turbo",
Messages: prompt,
})
if err != nil {
return "", err
}

if openAIGPTVirtualTerminal.openAIKey == "" {
if openAIVirtualHoneypot.openAIKey == "" {
return "", errors.New("openAIKey is empty")
}

response, err := openAIGPTVirtualTerminal.client.R().
log.Debug(string(requestJson))
response, err := openAIVirtualHoneypot.client.R().
SetHeader("Content-Type", "application/json").
SetBody(requestJson).
SetAuthToken(openAIGPTVirtualTerminal.openAIKey).
SetAuthToken(openAIVirtualHoneypot.openAIKey).
SetResult(&gptResponse{}).
Post(openAIGPTEndpoint)

Expand All @@ -113,5 +155,5 @@ func (openAIGPTVirtualTerminal *openAIGPTVirtualTerminal) GetCompletions(command
return "", errors.New("no choices")
}

return response.Result().(*gptResponse).Choices[0].Text, nil
return response.Result().(*gptResponse).Choices[0].Message.Content, nil
}
Loading

0 comments on commit 93d7804

Please sign in to comment.