Skip to content

[WIP] SQLiteにgraphを保存 #3

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 3 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -174,3 +174,9 @@ cython_debug/

# PyPI configuration file
.pypirc

# aider
.aider.*

# vscode
.vscode/
19 changes: 13 additions & 6 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,11 +1,18 @@
FROM python:3.12-slim

# Change the timezone to UTC+8
RUN ln -sf /usr/share/zoneinfo/Asia/Singapore /etc/localtime

RUN apt-get update && apt-get upgrade -y

RUN apt-get install -y cron && apt-get clean
ENV DEBIAN_FRONTEND=noninteractive
ENV TZ=Asia/Singapore

# Set timezone
RUN ln -sf /usr/share/zoneinfo/${TZ} /etc/localtime

RUN apt-get update && \
apt-get -y upgrade && \
apt-get -y install --no-install-recommends \
cron \
sqlite3 && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*

# Set working directory
WORKDIR /app
Expand Down
4 changes: 4 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
.PHONY: test

test:
pytest -v
2 changes: 2 additions & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[pytest]
pythonpath = src
4 changes: 4 additions & 0 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
-r requirements.txt
pytest
pytest-asyncio
httpx
5 changes: 2 additions & 3 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,8 @@ langgraph
fastapi
uvicorn
httpx

openai
pyyaml
python-multipart

langchain-ollama
langchain-ollama
simple-graph-sqlite==2.1.0
8 changes: 8 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from setuptools import setup, find_packages

setup(
name='LangGraph-GUI-backend',
version='0.1.0',
packages=find_packages(where='src'),
package_dir={'': 'src'}
)
127 changes: 112 additions & 15 deletions src/FileTransmit.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
import io
from datetime import datetime
import json
import uuid
from simple_graph_sqlite import database as sg_db

from fastapi import HTTPException, BackgroundTasks
from fastapi import APIRouter, File, UploadFile, HTTPException
Expand All @@ -21,14 +23,24 @@
def get_or_create_workspace(username: str) -> str:
"""
Ensures the workspace directory for a given username exists.
Creates the directory if it doesn't exist.
Creates the directory and initializes SQLite database if it doesn't exist.
"""
workspace_path = os.path.join('./workspace/', username)
if not os.path.exists(workspace_path):
os.makedirs(workspace_path)
print(f"Created workspace for {username} at {workspace_path}")

# Initialize SQLite database
db_path = os.path.join(workspace_path, 'graphs.db')
sg_db.initialize(db_path)

return workspace_path

def get_db_path(username: str) -> str:
"""Get SQLite database file path for the user"""
workspace_path = get_or_create_workspace(username)
return os.path.join(workspace_path, 'graphs.db')


@file_router.get('/download/{username}')
async def download_workspace(username: str):
Expand Down Expand Up @@ -79,24 +91,109 @@ async def upload_file(username: str, files: List[UploadFile] = File(...)):

return JSONResponse(content={"message": "Files successfully uploaded"}, status_code=200)

# Route to handle saving graph data as JSON with username
@file_router.post('/save-graph/{username}')
async def save_graph(username: str, graph_data: dict):
@file_router.post('/graph/{username}')
async def post_graph(username: str, graph_data: dict):
"""
Save a new graph to the database
Returns the UUID of the saved graph
"""
try:
# Get or create the user's workspace
user_workspace = get_or_create_workspace(username)

# Save the JSON data to a file in the user's workspace
graph_file_path = os.path.join(user_workspace, 'graph.json')
with open(graph_file_path, 'w') as graph_file:
json.dump(graph_data, graph_file, indent=2)
db_path = get_db_path(username)

# Generate UUID for new graph
graph_uuid = str(uuid.uuid4())

# Save nodes
nodes = graph_data.get('nodes', [])
node_ids = [str(uuid.uuid4()) for _ in nodes]
node_bodies = [{
**node,
'graph_uuid': graph_uuid,
'node_id': node_ids[i]
} for i, node in enumerate(nodes)]

# Save edges
edges = []
for i, node in enumerate(nodes):
for next_id in node.get('nexts', []):
edges.append({
'source': node_ids[i],
'target': next_id,
'properties': {}
})

# Save to database
sg_db.atomic(db_path, sg_db.add_nodes(node_bodies, node_ids))
if edges:
sources = [e['source'] for e in edges]
targets = [e['target'] for e in edges]
properties = [e['properties'] for e in edges]
sg_db.atomic(db_path, sg_db.connect_many_nodes(sources, targets, properties))

return JSONResponse(content={"uuid": graph_uuid}, status_code=200)

except Exception as e:
print(f"Error saving graph: {e}")
raise HTTPException(status_code=500, detail=f"Failed to save graph: {str(e)}")

print(f"Graph data saved to {graph_file_path}")
return JSONResponse(content={"message": "Graph data successfully saved"}, status_code=200)
@file_router.get('/graph/{username}/{graph_uuid}')
async def get_graph_by_uuid(username: str, graph_uuid: str):
"""
Retrieve a graph by its UUID
Returns the graph data in JSON format
"""
try:
db_path = get_db_path(username)

# Find nodes for this graph
clause = sg_db._generate_clause('graph_uuid')
nodes = sg_db.atomic(db_path, sg_db.find_nodes([clause], (graph_uuid,)))

# Build node mapping
node_map = {n['node_id']: n for n in nodes}

# Find edges and build connections
for node in nodes:
connections = sg_db.atomic(db_path, sg_db.get_connections(node['node_id']))
node['nexts'] = [edge[1] for edge in connections]

# Remove internal fields
for node in nodes:
node.pop('graph_uuid', None)
node.pop('node_id', None)

return JSONResponse(content={"nodes": nodes}, status_code=200)

except Exception as e:
print(f"Error retrieving graph: {e}")
raise HTTPException(status_code=500, detail=f"Failed to retrieve graph: {str(e)}")

@file_router.get('/graphs/{username}')
async def list_graphs(username: str):
"""
List all graph UUIDs for a user
Returns a list of UUIDs
"""
try:
db_path = get_db_path(username)

# Get unique graph UUIDs
clause = sg_db._generate_clause('graph_uuid', tree=True)
graphs = sg_db.atomic(db_path, sg_db.find_nodes(
[clause],
('%',),
tree_query=True,
key='graph_uuid'
))

# Extract unique UUIDs
uuids = list(set(g['graph_uuid'] for g in graphs))

return JSONResponse(content={"uuids": uuids}, status_code=200)

except Exception as e:
print(f"Error saving graph data: {e}")
raise HTTPException(status_code=500, detail=f"Failed to save graph data: {str(e)}")
print(f"Error listing graphs: {e}")
raise HTTPException(status_code=500, detail=f"Failed to list graphs: {str(e)}")

# Route to handle cleaning the user's workspace
@file_router.post('/clean-cache/{username}')
Expand Down
Empty file added src/__init__.py
Empty file.
52 changes: 52 additions & 0 deletions tests/test_data/example.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
{
"nodes": [
{
"uniq_id": "uniq_id_12",
"nexts": [],
"type": "CONDITION",
"name": "Check Roll",
"description": "see final one 'need_roll'\nif see need_roll is True give me True \nelse if see need_roll is false, give me False",
"tool": "",
"true_next": "uniq_id_11",
"false_next": "uniq_id_10",
"ext": {
"pos_x": 611,
"pos_y": -237.20000000000005,
"width": 312,
"height": 326,
"info": ""
}
},
{
"uniq_id": "uniq_id_11",
"nexts": [],
"type": "ACTION",
"name": "True Case",
"description": "",
"tool": "",
"ext": {
"pos_x": 1011,
"pos_y": -37.200000000000045,
"width": 200,
"height": 100,
"info": ""
}
},
{
"uniq_id": "uniq_id_10",
"nexts": [],
"type": "ACTION",
"name": "False Case",
"description": "",
"tool": "",
"ext": {
"pos_x": 1011,
"pos_y": -437.20000000000005,
"width": 200,
"height": 100,
"info": ""
}
}
],
"node_counter": 3
}
82 changes: 82 additions & 0 deletions tests/test_file_transmit.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
import pytest
from fastapi.testclient import TestClient
from FileTransmit import file_router
import os
import shutil
import json
import pathlib

@pytest.fixture
def client():
from fastapi import FastAPI
app = FastAPI()
app.include_router(file_router)
return TestClient(app)

@pytest.fixture(autouse=True)
def cleanup():
if os.path.exists('./workspace/'):
shutil.rmtree('./workspace/')
yield
if os.path.exists('./workspace/'):
shutil.rmtree('./workspace/')

def test_upload_and_download(client):
files = [('files', ('test.txt', b'hello world'))]
response = client.post('/upload/testuser', files=files)
assert response.status_code == 200

response = client.get('/download/testuser')
assert response.status_code == 200
assert response.headers['content-type'] == 'application/zip'
assert 'attachment; filename=testuser_workspace.zip' in response.headers['content-disposition']

def test_graph_operations(client):
graph_data = {
'nodes': [
{'name': 'node1', 'nexts': []},
{'name': 'node2', 'nexts': []}
]
}
response = client.post('/graph/testuser', json=graph_data)
assert response.status_code == 200
graph_uuid = response.json()['uuid']

response = client.get(f'/graph/testuser/{graph_uuid}')
assert response.status_code == 200
assert len(response.json()['nodes']) == 2

#response = client.get('/graphs/testuser')
#assert response.status_code == 200
#assert graph_uuid == response.json()['uuid']

def test_save_example_graph(client):
file = pathlib.Path("tests/test_data/example.json")
with open(file) as f:
example_graph_data = json.load(f)

response = client.post('/graph/testuser', json=example_graph_data)
assert response.status_code == 200
graph_uuid = response.json()['uuid']

response = client.get(f'/graph/testuser/{graph_uuid}')
assert response.status_code == 200

saved_nodes = response.json()['nodes']
assert len(saved_nodes) == len(example_graph_data['nodes'])

for saved_node, original_node in zip(saved_nodes, example_graph_data['nodes']):
assert saved_node['name'] == original_node['name']
assert saved_node['type'] == original_node['type']
assert saved_node['description'] == original_node['description']

def test_clean_cache(client):
files = [('files', ('test.txt', b'hello world'))]
client.post('/upload/testuser', files=files)

response = client.post('/clean-cache/testuser')
assert response.status_code == 200

response = client.get('/download/testuser')
assert response.status_code == 200