Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file added client/app/assets/images/db-logos/d1.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
152 changes: 152 additions & 0 deletions redash/query_runner/d1.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
import json
import re

from redash.query_runner import (
TYPE_BOOLEAN,
TYPE_DATETIME,
TYPE_FLOAT,
TYPE_INTEGER,
TYPE_STRING,
BaseQueryRunner,
register,
)
from redash.utils.requests_session import requests_session as session

# Map Python types to Redash types
TYPES_MAP = {
"str": TYPE_STRING,
"int": TYPE_INTEGER,
"float": TYPE_FLOAT,
"bool": TYPE_BOOLEAN,
"NoneType": TYPE_STRING,
}


def detect_datetime_string(value):
"""Detect if a string value looks like a datetime."""
if not isinstance(value, str):
return False

# Common datetime patterns
datetime_patterns = [
r"^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$", # 2024-05-03 21:16:13
r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", # 2024-05-03T21:16:13
r"^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d+$", # 2024-05-03 21:16:13.123
r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+", # 2024-05-03T21:16:13.123
]

for pattern in datetime_patterns:
if re.match(pattern, value):
return True
return False


class D1QueryRunner(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
"type": "object",
"properties": {
"cf_url": {"type": "string", "title": "Cloudflare D1 API URL"},
"cf_token": {"type": "string", "title": "Cloudflare API Token"},
},
"required": ["cf_url", "cf_token"],
"secret": ["cf_token"],
}

@classmethod
def type(cls):
return "d1"

@classmethod
def name(cls):
return "Cloudflare D1"

def _query(self, sql, params=None):
"""Helper to run a raw SQL against D1 and return parsed JSON results."""
headers = {
"Authorization": f"Bearer {self.configuration.get('cf_token')}",
"Content-Type": "application/json",
}
body = {"sql": sql, "params": params or []}

try:
resp = session.post(self.configuration.get("cf_url"), headers=headers, data=json.dumps(body), timeout=30)
resp.raise_for_status()
data = resp.json()

# Expected: { "result": [ { "results": [...] } ] }
results = data.get("result", [])
if not results:
return []
return results[0].get("results", [])
Copy link
Copy Markdown
Contributor

@cubic-dev-ai cubic-dev-ai bot Mar 19, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P2: D1 API error indicators (success/errors) are ignored, so failed queries can be returned as empty successful results.

Prompt for AI agents
Check if this issue is valid — if so, understand the root cause and fix it. At redash/query_runner/d1.py, line 97:

<comment>D1 API error indicators (`success`/`errors`) are ignored, so failed queries can be returned as empty successful results.</comment>

<file context>
@@ -0,0 +1,174 @@
+            results = data.get("result", [])
+            if not results:
+                return []
+            return results[0].get("results", [])
+
+        except session.exceptions.RequestException as e:
</file context>
Fix with Cubic


except session.exceptions.RequestException as e:
Copy link
Copy Markdown
Contributor

@cubic-dev-ai cubic-dev-ai bot Mar 19, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P2: Exception handling references session.exceptions on a session instance, which can break request error handling with an AttributeError.

Prompt for AI agents
Check if this issue is valid — if so, understand the root cause and fix it. At redash/query_runner/d1.py, line 99:

<comment>Exception handling references `session.exceptions` on a session instance, which can break request error handling with an `AttributeError`.</comment>

<file context>
@@ -0,0 +1,174 @@
+                return []
+            return results[0].get("results", [])
+
+        except session.exceptions.RequestException as e:
+            raise Exception(f"Failed to connect to Cloudflare D1: {str(e)}")
+        except json.JSONDecodeError as e:
</file context>
Fix with Cubic

raise Exception(f"Failed to connect to Cloudflare D1: {str(e)}")
except json.JSONDecodeError as e:
raise Exception(f"Invalid JSON response from D1: {str(e)}")
except KeyError as e:
raise Exception(f"Unexpected response format from D1: {str(e)}")

def run_query(self, query, user):
try:
rows = self._query(query)
if not rows:
return {"columns": [], "rows": []}, None

# Infer columns from first row
first_row = rows[0]
columns = []
for k, v in first_row.items():
# Get the Python type name and map it to Redash type
python_type = type(v).__name__
redash_type = TYPES_MAP.get(python_type, TYPE_STRING)

# Special handling for strings that look like datetimes
if python_type == "str" and detect_datetime_string(v):
redash_type = TYPE_DATETIME

columns.append({"name": k, "friendly_name": k, "type": redash_type})

result = {"columns": columns, "rows": rows}
# Debug: Log the result structure
print(f"[D1] Query result: {len(rows)} rows, {len(columns)} columns")
return result, None

except Exception as e:
return None, str(e)

def get_schema(self, get_stats=False):
"""Return schema information for the database."""
schema = []
try:
# Get all tables from sqlite_master
tables = self._query(
"SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' AND name NOT LIKE '_cf_KV'"
)

for table in tables:
table_name = table["name"]
# Get column information for each table
columns = self._query(f"PRAGMA table_info({table_name})")
Copy link
Copy Markdown
Contributor

@cubic-dev-ai cubic-dev-ai bot Mar 19, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P2: Schema introspection builds PRAGMA table_info(...) with an unquoted table identifier, which can fail for valid SQLite table names requiring quoting.

Prompt for AI agents
Check if this issue is valid — if so, understand the root cause and fix it. At redash/query_runner/d1.py, line 148:

<comment>Schema introspection builds `PRAGMA table_info(...)` with an unquoted table identifier, which can fail for valid SQLite table names requiring quoting.</comment>

<file context>
@@ -0,0 +1,174 @@
+            for table in tables:
+                table_name = table["name"]
+                # Get column information for each table
+                columns = self._query(f"PRAGMA table_info({table_name})")
+                
+                # Extract detailed column information including data types
</file context>
Fix with Cubic


# Extract detailed column information including data types
column_info = []
for col in columns:
column_info.append({"name": col["name"], "type": col["type"]})

schema.append({"name": table_name, "columns": column_info})

except Exception as e:
raise Exception(f"Failed to get schema: {str(e)}")

return schema

def test_connection(self):
query = "SELECT 1 as test"
_, error = self.run_query(query, None)
if error:
raise Exception(error)


register(D1QueryRunner)
1 change: 1 addition & 0 deletions redash/settings/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,6 +348,7 @@ def email_server_is_configured():
"redash.query_runner.oracle",
"redash.query_runner.e6data",
"redash.query_runner.risingwave",
"redash.query_runner.d1",
"redash.query_runner.duckdb",
]

Expand Down
Loading