# Hack the planet sitrep last time: got stuck trying to get kali locally running on macs OrbStack was the thing we installed but no way to open images. Next we ran UTM how to open OVA on utm with Qemu: https://gist.github.com/auser/b51d4b26354ec959f97b07e68817ab5d windows and linux users are up with kali the script so far: ```python from IPython.display import Image, display import autogen from autogen.coding import LocalCommandLineCodeExecutor hacking_planner_config_list = [ { "model": "gpt-4", "base_url": "https://please-average-videos-championships.trycloudflare.com", "api_key": "sk-whatever" } ] code_executor_config_list = [ { "model": "lmstudio-community/Meta-Llama-3-8B-Instruct-GGUF/Meta-Llama-3-8B-Instruct-Q4_K_M.gguf", "base_url": "https://d893-2601-1c2-100-ded-5878-1a5-36ba-ac5b.ngrok-free.app", "api_key": "lmstudio" } ] # create an AssistantAgent named "assistant" hacker = autogen.AssistantAgent( name="hacker", system_instructions="You're running on arch linux not debian. You don't need to write scripts to disk or save them to a file to be executed, anything inside three backtick codeblocks will be executed. Based on the user's request, formulate an appropriate bash command to execute it:", llm_config={ "cache_seed": 41, # seed for caching and reproducibility "config_list": hacking_planner_config_list, # a list of OpenAI API configurations "temperature": 0, # temperature for sampling }, # configuration for autogen's enhanced inference API which is compatible with OpenAI API ) code_writer = autogen.AssistantAgent( name="code_writer", system_instructions="Please convert the above steps into a bash script, and put it in a fenced code block, don't worry about writing it to a file. You can use the `echo` command to print the commands to the terminal.", llm_config={ "cache_seed": 41, # seed for caching and reproducibility "config_list": code_executor_config_list, # a list of OpenAI API configurations "temperature": 0, # temperature for sampling }, # configuration for autogen's enhanced inference API which is compatible with OpenAI API ) # create a UserProxyAgent instance named "user_proxy" user_proxy = autogen.UserProxyAgent( name="user_proxy", human_input_mode="NEVER", max_consecutive_auto_reply=10, default_auto_reply="Let's proceed.", is_termination_msg=lambda x: x.get("content", "").rstrip().endswith("TERMINATE"), code_execution_config={ # the executor to run the generated code "executor": LocalCommandLineCodeExecutor(work_dir="hacking"), }, ) groupchat = autogen.GroupChat(agents=[user_proxy, hacker, code_writer], max_round=20, speaker_selection_method="round_robin") manager = autogen.GroupChatManager(groupchat=groupchat, llm_config={ "cache_seed": 41, # seed for caching and reproducibility "config_list": code_executor_config_list, # a list of OpenAI API configurations "temperature": 0, # temperature for sampling }) # the assistant receives a message from the user_proxy, which contains the task description chat_res = user_proxy.initiate_chat( manager, message="""Please help test this CTF, located at """, summary_method="reflection_with_llm", ) print("Chat history:", chat_res.chat_history) print("Summary:", chat_res.summary) ```