From ff3570dec088bd55b5fba8038d0378c2af0edf85 Mon Sep 17 00:00:00 2001 From: dextmorgn Date: Thu, 15 May 2025 17:16:27 +0200 Subject: [PATCH] feat: crazy progress --- flowsint-api/app/api/__init__.py | 0 flowsint-api/app/api/routes/__init__.py | 0 flowsint-api/app/api/routes/sketches.py | 177 ++++ flowsint-api/app/api/routes/transforms.py | 306 +++++++ flowsint-api/app/core/celery.py | 4 +- flowsint-api/app/core/logger.py | 5 +- flowsint-api/app/main.py | 231 +----- flowsint-api/app/scanners/domains/resolve.py | 30 +- .../app/scanners/domains/subdomains.py | 84 +- flowsint-api/app/scanners/domains/whois.py | 28 +- flowsint-api/app/scanners/emails/holehe.py | 30 +- flowsint-api/app/scanners/ips/geolocation.py | 30 +- .../app/scanners/ips/reverse_resolve.py | 30 +- flowsint-api/app/scanners/orchestrator.py | 240 +++++- .../app/scanners/organizations/sirene.py | 31 +- flowsint-api/app/scanners/socials/maigret.py | 30 +- flowsint-api/app/tasks/transform.py | 24 +- flowsint-api/app/types/transform.py | 29 + flowsint-api/app/utils.py | 45 +- flowsint-api/branches.json | 36 + flowsint-web/package.json | 3 + .../sketches/[sketch_id]/2d-graph.tsx | 252 +++--- .../sketches/[sketch_id]/console-panel.tsx | 78 +- .../sketches/[sketch_id]/left-panel.tsx | 2 +- .../sketches/[sketch_id]/right-panel.tsx | 8 +- .../sketches/[sketch_id]/settings-modal.tsx | 5 +- .../sketches/[sketch_id]/toolbar.tsx | 4 +- flowsint-web/src/app/dashboard/layout.tsx | 11 +- .../transforms/[transform_id]/page.tsx | 6 +- .../src/app/dashboard/transforms/page.tsx | 14 +- flowsint-web/src/app/layout.tsx | 17 +- flowsint-web/src/app/not-found.tsx | 2 +- .../src/components/dashboard/feedback.tsx | 2 +- .../src/components/dashboard/main-nav.tsx | 4 +- .../components/dashboard/secondary-nav.tsx | 29 + .../{profile-panel.tsx => details-panel.tsx} | 10 +- .../sketches/sketch/launch-transform.tsx | 39 +- .../sketches/sketch/nodes-panel.tsx | 4 +- .../src/components/transforms/controls.tsx | 210 +++-- .../src/components/transforms/editor.tsx | 129 +-- .../transforms/flow-computation-drawer.tsx | 591 ++++++++++++++ .../components/transforms/scanner-data.tsx | 3 +- .../components/transforms/scanner-item.tsx | 30 +- .../components/transforms/scanner-node.tsx | 61 +- .../transforms/transform-item.tsx | 16 +- .../transforms/transform-name-panel.tsx | 153 ++++ flowsint-web/src/components/ui/dialog.tsx | 4 +- .../src/hooks/use-launch-transform.ts | 2 +- .../src/store/node-display-settings.ts | 53 +- flowsint-web/styles/globals.css | 8 +- flowsint-web/yarn.lock | 761 +++++++++++++++++- package.json | 4 +- yarn.lock | 590 ++++++++++++++ 53 files changed, 3679 insertions(+), 816 deletions(-) create mode 100644 flowsint-api/app/api/__init__.py create mode 100644 flowsint-api/app/api/routes/__init__.py create mode 100644 flowsint-api/app/api/routes/sketches.py create mode 100644 flowsint-api/app/api/routes/transforms.py create mode 100644 flowsint-api/app/types/transform.py create mode 100644 flowsint-api/branches.json create mode 100644 flowsint-web/src/components/dashboard/secondary-nav.tsx rename flowsint-web/src/components/sketches/sketch/{profile-panel.tsx => details-panel.tsx} (81%) create mode 100644 flowsint-web/src/components/transforms/flow-computation-drawer.tsx rename flowsint-web/src/{app/dashboard => components}/transforms/transform-item.tsx (78%) create mode 100644 flowsint-web/src/components/transforms/transform-name-panel.tsx diff --git a/flowsint-api/app/api/__init__.py b/flowsint-api/app/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/flowsint-api/app/api/routes/__init__.py b/flowsint-api/app/api/routes/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/flowsint-api/app/api/routes/sketches.py b/flowsint-api/app/api/routes/sketches.py new file mode 100644 index 0000000..8ec5f5f --- /dev/null +++ b/flowsint-api/app/api/routes/sketches.py @@ -0,0 +1,177 @@ +from fastapi import APIRouter, HTTPException +from pydantic import BaseModel +from typing import Dict, Any +from fastapi import HTTPException +from pydantic import BaseModel, Field +from app.utils import flatten +from typing import Dict, Any +from app.neo4j.connector import Neo4jConnection +import os +from dotenv import load_dotenv + + +load_dotenv() + +URI = os.getenv("NEO4J_URI_BOLT") +USERNAME = os.getenv("NEO4J_USERNAME") +PASSWORD = os.getenv("NEO4J_PASSWORD") + +neo4j_connection = Neo4jConnection(URI, USERNAME, PASSWORD) + + +router = APIRouter() + +@router.get("/sketch/{sketch_id}/nodes") +async def get_sketch_nodes(sketch_id: str): + import random + + nodes_query = """ + MATCH (n) + WHERE n.sketch_id = $sketch_id + RETURN elementId(n) as id, labels(n) as labels, properties(n) as data + LIMIT 500 + """ + nodes_result = neo4j_connection.query(nodes_query, parameters={"sketch_id": sketch_id}) + + node_ids = [record["id"] for record in nodes_result] + + rels_query = """ + UNWIND $node_ids AS nid + MATCH (a)-[r]->(b) + WHERE elementId(a) = nid AND elementId(b) IN $node_ids + RETURN elementId(r) as id, type(r) as type, elementId(a) as source, elementId(b) as target, properties(r) as data + """ + rels_result = neo4j_connection.query(rels_query, parameters={"node_ids": node_ids}) + + nodes = [ + { + "id": str(record["id"]), + "labels": record["labels"], + "data": record["data"], + "label": record["data"].get("label", "Node"), + "type": record["labels"][0].lower(), + "caption": record["data"].get("label", "Node"), + "size": 40, + "color": record["data"].get("color", "#FFFFFF"), + "x": random.random() * 1000, + "y": random.random() * 1000 + } + for record in nodes_result + ] + + rels = [ + { + "id": str(record["id"]), + "type": record["type"], + "from": str(record["source"]), + "to": str(record["target"]), + "data": record["data"], + "caption": record["type"], + "width": 1, + "color": "#A5ABB6" + } + for record in rels_result + ] + + return {"nds": nodes, "rls": rels} + + +class NodeInput(BaseModel): + type: str + data: Dict[str, Any] = Field(default_factory=dict) + +def dict_to_cypher_props(props: dict) -> str: + return ", ".join(f"{key}: ${key}" for key in props) + +# Endpoints +@router.get("/sketches") +def read_root(): + return {"message": "Sketches API is running"} + + +@router.post("/sketch/{sketch_id}/nodes/add") +def add_node(sketch_id: str, node: NodeInput): + + node_type = getattr(node, "type", "unknown") + node_data = getattr(node, "data", {}) + + properties = { + "type": node_type, + "sketch_id": sketch_id, + "caption": node_data.get("label", "Node"), + "label": node_data.get("label", "Node"), + "color": node_data.get("color", "Node"), + "size": 40, + } + + if node_data and isinstance(node_data, dict): + flattened_data = flatten(node_data) + properties.update(flattened_data) + + cypher_props = dict_to_cypher_props(properties) + + create_query = f""" + MERGE (d:`{node_type}` {{ {cypher_props} }}) + RETURN d as node + """ + + try: + create_result = neo4j_connection.query(create_query, properties) + except Exception as e: + print(f"Query execution error: {e}") + raise HTTPException(status_code=500, detail=f"Database error: {str(e)}") + + if not create_result: + raise HTTPException(status_code=400, detail="Node creation failed - no result returned") + + try: + new_node = create_result[0]["node"] + except (IndexError, KeyError) as e: + print(f"Error extracting node_id: {e}, result: {create_result}") + new_node = None + new_node["data"]=node_data + + return { + "status": "node added", + "node": new_node, + } + +class EdgeInput(BaseModel): + from_node: dict + to_node: dict + type: str + +@router.post("/sketch/{sketch_id}/edges/add") +def add_edge(sketch_id: str, edge: EdgeInput): + from_props = flatten(edge.from_node) + to_props = flatten(edge.to_node) + + from_cypher = dict_to_cypher_props(from_props) + to_cypher = dict_to_cypher_props(to_props) + + query = f""" + MATCH (a {{ {from_cypher} }}) + MATCH (b {{ {to_cypher} }}) + MERGE (a)-[r:`{edge.type}` {{sketch_id: $sketch_id}}]->(b) + RETURN r + """ + + params = { + **from_props, + **to_props, + "sketch_id": sketch_id, + } + + try: + result = neo4j_connection.query(query, params) + except Exception as e: + print(f"Edge creation error: {e}") + raise HTTPException(status_code=500, detail="Failed to create edge") + + if not result: + raise HTTPException(status_code=400, detail="Edge creation failed") + + return { + "status": "edge added", + "edge": result[0]["r"], + } \ No newline at end of file diff --git a/flowsint-api/app/api/routes/transforms.py b/flowsint-api/app/api/routes/transforms.py new file mode 100644 index 0000000..60b79b8 --- /dev/null +++ b/flowsint-api/app/api/routes/transforms.py @@ -0,0 +1,306 @@ +from fastapi import APIRouter, HTTPException +from typing import Dict, List, Any, Optional +from pydantic import BaseModel +import json +from app.utils import extract_input_schema, extract_transform +from app.scanners.registry import ScannerRegistry +from app.core.celery import celery_app +from app.types.domain import MinimalDomain +from app.types.ip import MinimalIp +from app.types.social import MinimalSocial +from app.types.organization import MinimalOrganization +from app.types.email import Email +from app.types.transform import Node, Edge, FlowStep, FlowBranch + +from app.core.db import get_db + +class FlowComputationRequest(BaseModel): + nodes: List[Node] + edges: List[Edge] + inputType: Optional[str] = None + +class FlowComputationResponse(BaseModel): + transformBranches: List[FlowBranch] + initialData: Any + +class StepSimulationRequest(BaseModel): + transformBranches: List[FlowBranch] + currentStepIndex: int + +class LaunchTransformPayload(BaseModel): + values: List[str] + sketch_id: str + +router = APIRouter() + +# Endpoints +@router.get("/transforms") +def read_root(): + return {"message": "Flow Computation API is running"} + +@router.get("/transforms/nodes") +async def get_scans_list(): + scanners = ScannerRegistry.list_by_category() + + flattened_scanners = { + category: [ + { + "class_name": scanner["class_name"], + "name": scanner["name"], + "module": scanner["module"], + "doc": scanner["doc"], + "inputs": scanner["inputs"], + "outputs": scanner["outputs"], + "type": "scanner" + } + for scanner in scanner_list + ] + for category, scanner_list in scanners.items() + } + + # Ajoute les types comme des "scanners" spéciaux de type 'type' + object_inputs = [ + extract_input_schema("MinimalDomain", MinimalDomain), + extract_input_schema("MinimalIp", MinimalIp), + extract_input_schema("MinimalSocial", MinimalSocial), + extract_input_schema("Email", Email), + extract_input_schema("MinimalOrganization", MinimalOrganization) + ] + + flattened_scanners["types"] = object_inputs + + return {"items": flattened_scanners} + +@router.post("/transforms/{transform_id}/launch") +async def launch_transform( + transform_id: str, + payload: LaunchTransformPayload, +): + db = get_db() + try: + response = db.table("transforms").select("*").eq("id", str(transform_id)).single().execute() + if response.data is None: + raise HTTPException(status_code=404, detail="Transform not found") + nodes = [Node(**node) for node in response.data["transform_schema"]["nodes"]] + edges = [Edge(**edge) for edge in response.data["transform_schema"]["edges"]] + transform_branches = compute_transform_branches( + payload.values, + nodes, + edges + ) + serializable_branches = [branch.dict() for branch in transform_branches] + task = celery_app.send_task("run_transform", args=[serializable_branches, payload.values, payload.sketch_id]) + return {"id": task.id} + + except Exception as e: + print(e) + raise HTTPException(status_code=404, detail="Transform not found") + +@router.post("/transforms/{transform_id}/compute", response_model=FlowComputationResponse) +def compute_transforms(request: FlowComputationRequest): + # Générer les données d'exemple en fonction du type d'entrée + initial_data = generate_sample_data(request.inputType or "string") + + # Calculer les branches de flux + transform_branches = compute_transform_branches( + initial_data, + request.nodes, + request.edges + ) + + return FlowComputationResponse( + transformBranches=transform_branches, + initialData=initial_data + ) + +# Fonctions utilitaires +def generate_sample_data(type_str: str) -> Any: + """Génère des données d'exemple en fonction du type""" + type_str = type_str.lower() if type_str else "string" + + if type_str == "string": + return "sample_text" + elif type_str == "number": + return 42 + elif type_str == "boolean": + return True + elif type_str == "array": + return [1, 2, 3] + elif type_str == "object": + return {"key": "value"} + elif type_str == "url": + return "https://example.com" + elif type_str == "email": + return "user@example.com" + elif type_str == "domain": + return "example.com" + elif type_str == "ip": + return "192.168.1.1" + else: + return f"sample_{type_str}" + +def compute_transform_branches(initial_value: Any, nodes: List[Node], edges: List[Edge]) -> List[FlowBranch]: + """Calcule les branches de flux en fonction des nœuds et des arêtes""" + # Trouver les nœuds d'entrée (points de départ) + input_nodes = [node for node in nodes if node.data.get("type") == "type"] + + if not input_nodes: + return [ + FlowBranch( + id="error", + name="Error", + steps=[ + FlowStep( + nodeId="error", + inputs={}, + type="error", + outputs={}, + status="error", + branchId="error", + depth=0, + ) + ], + ) + ] + + node_map = {node.id: node for node in nodes} + processed_nodes = set() + branches = [] + + def get_outgoing_edges(node_id: str) -> List[Edge]: + return [edge for edge in edges if edge.source == node_id] + + def traverse_graph( + node_id: str, + branch_id: str, + branch_name: str, + depth: int, + input_data: Dict[str, Any], + visited_in_branch=None + ): + branch_counter = 0 + + if visited_in_branch is None: + visited_in_branch = set() + + # Ignorer si ce nœud a déjà été visité dans cette branche + if node_id in visited_in_branch: + return + + # Marquer comme visité dans cette branche + visited_in_branch.add(node_id) + + node = node_map.get(node_id) + if not node: + return + + # Obtenir ou créer la branche + branch = next((b for b in branches if b.id == branch_id), None) + if not branch: + branch = FlowBranch(id=branch_id, name=branch_name, steps=[]) + branches.append(branch) + + is_input_node = node.data.get("type") == "type" + + if is_input_node: + outputs_array = node.data["outputs"].get("properties", []) + first_output_name = outputs_array[0].get("name", "output") if outputs_array else "output" + outputs = {first_output_name: initial_value} + else: + outputs = process_node_data(node, input_data) + + # Ajouter l'étape à la branche + branch.steps.append( + FlowStep( + nodeId=node_id, + inputs={} if is_input_node else input_data, + outputs=outputs, + type= "type" if is_input_node else "scanner", + status="pending", + branchId=branch_id, + depth=depth, + ) + ) + processed_nodes.add(node_id) + out_edges = get_outgoing_edges(node_id) + if not out_edges: + return + if len(out_edges) == 1: + edge = out_edges[0] + target_node = node_map.get(edge.target) + if target_node: + # Passer la sortie comme entrée au nœud suivant + output_key = edge.sourceHandle or list(outputs.keys())[0] if outputs else "output" + output_value = outputs.get(output_key, None) + next_input = {edge.targetHandle or "input": output_value} + + traverse_graph(edge.target, branch_id, branch_name, depth + 1, next_input, visited_in_branch) + # Si plusieurs arêtes sortantes, créer de nouvelles branches + else: + for index, edge in enumerate(out_edges): + target_node = node_map.get(edge.target) + if target_node: + # Créer un nouvel ID de branche pour toutes les arêtes sauf la première + new_branch_id = branch_id if index == 0 else f"{branch_id}-{branch_counter}" + if index > 0: + branch_counter += 1 + new_branch_name = branch_name if index == 0 else f"{branch_name} (Branch {index + 1})" + + # Passer la sortie comme entrée au nœud suivant + output_key = edge.sourceHandle or list(outputs.keys())[0] if outputs else "output" + output_value = outputs.get(output_key, None) + next_input = {edge.targetHandle or "input": output_value} + + # Pour la première arête, continuer dans la même branche + # Pour les autres arêtes, créer de nouvelles branches mais ne pas revisiter les nœuds déjà dans ce chemin + new_visited = visited_in_branch if index == 0 else visited_in_branch.copy() + + traverse_graph(edge.target, new_branch_id, new_branch_name, depth + 1, next_input, new_visited) + + # Démarrer DFS à partir de chaque nœud d'entrée + for index, input_node in enumerate(input_nodes): + branch_id = f"branch-{index}" + branch_name = f"Flow {index + 1}" if len(input_nodes) > 1 else "Main Flow" + traverse_graph(input_node.id, branch_id, branch_name, 0, {}) + + # Trier les branches par la profondeur de leur premier nœud + branches.sort(key=lambda branch: branch.steps[0].depth if branch.steps else 0) + + return branches + +def process_node_data(node: Node, inputs: Dict[str, Any]) -> Dict[str, Any]: + """Traite les données de nœud en fonction du type de nœud et des entrées""" + outputs = {} + output_types = node.data["outputs"].get("properties", []) + for output in output_types: + output_name = output.get("name", "output") + # Simuler la transformation basée sur la classe/type du nœud + class_name = node.data.get("class_name", "") + if class_name == "StringToLower": + outputs[output_name] = inputs.get("input").lower() if isinstance(inputs.get("input"), str) else inputs.get("input") + elif class_name == "StringToUpper": + outputs[output_name] = inputs.get("input").upper() if isinstance(inputs.get("input"), str) else inputs.get("input") + elif class_name == "Concatenate": + outputs[output_name] = f"{inputs.get('input1', '')}{inputs.get('input2', '')}" + elif class_name == "Add": + outputs[output_name] = (float(inputs.get("input1", 0)) or 0) + (float(inputs.get("input2", 0)) or 0) + elif class_name == "Multiply": + outputs[output_name] = (float(inputs.get("input1", 0)) or 0) * (float(inputs.get("input2", 0)) or 0) + elif class_name == "ParseJSON": + try: + outputs[output_name] = json.loads(inputs.get("input")) if isinstance(inputs.get("input"), str) else inputs.get("input") + except: + outputs[output_name] = None + elif class_name == "ExtractDomain": + if isinstance(inputs.get("input"), str) and "." in inputs.get("input", ""): + # Simple extraction de domaine avec regex (implémentation simplifiée) + parts = inputs.get("input").split("/") + domain_part = next((part for part in parts if "." in part), "") + outputs[output_name] = domain_part or inputs.get("input") + else: + outputs[output_name] = inputs.get("input") + else: + # Pour les transformations inconnues, simplement passer l'entrée + outputs[output_name] = inputs.get("input") or f"transformed_{output_name}" + + return outputs \ No newline at end of file diff --git a/flowsint-api/app/core/celery.py b/flowsint-api/app/core/celery.py index 3c584f5..430000b 100644 --- a/flowsint-api/app/core/celery.py +++ b/flowsint-api/app/core/celery.py @@ -2,7 +2,7 @@ from celery import Celery celery_app = Celery( "flowsint", - broker="redis://redis:6379/0", - backend="redis://redis:6379/0", + broker="redis://127.0.0.1:6379/0", + backend="redis://127.0.0.1:6379/0", include=["app.tasks.transform"] ) \ No newline at end of file diff --git a/flowsint-api/app/core/logger.py b/flowsint-api/app/core/logger.py index 53f46e5..d5ecfe2 100644 --- a/flowsint-api/app/core/logger.py +++ b/flowsint-api/app/core/logger.py @@ -2,7 +2,7 @@ from app.core.db import get_db # Supabase client from typing import Literal from uuid import UUID -LogLevel = Literal["info", "warn", "error", "success"] +LogLevel = Literal["info", "warn", "error", "success", "debug"] class Logger: def __init__(self, db): @@ -31,5 +31,8 @@ class Logger: def success(self, scan_id: UUID, sketch_id: UUID, content: str): self.emit(scan_id, sketch_id, content, level="success") + + def debug(self, scan_id: UUID, sketch_id: UUID, content: str): + self.emit(scan_id, sketch_id, content, level="debug") logger = Logger(get_db()) diff --git a/flowsint-api/app/main.py b/flowsint-api/app/main.py index 181c9e3..d20e028 100644 --- a/flowsint-api/app/main.py +++ b/flowsint-api/app/main.py @@ -1,25 +1,11 @@ -import json -import random -from uuid import UUID -from fastapi import FastAPI, HTTPException, Depends -from pydantic import BaseModel, Field -from typing import List -from app.core.db import get_db +from fastapi import FastAPI from app.scanners.registry import ScannerRegistry -from app.core.auth import get_current_user -from app.utils import extract_input_schema, flatten -from app.core.celery import celery_app -from app.types.domain import MinimalDomain -from app.types.ip import MinimalIp -from app.types.social import MinimalSocial -from app.types.organization import MinimalOrganization -from app.types.email import Email -from typing import List, Dict, Any from app.neo4j.connector import Neo4jConnection import os from dotenv import load_dotenv -from typing import List from fastapi.middleware.cors import CORSMiddleware +from app.api.routes import transforms +from app.api.routes import sketches load_dotenv() @@ -39,7 +25,6 @@ origins = [ ] - app = FastAPI() neo4j_connection = Neo4jConnection(URI, USERNAME, PASSWORD) @@ -51,213 +36,9 @@ app.add_middleware( allow_headers=["*"], ) +app.include_router(transforms.router, prefix="/api", tags=["branches"]) +app.include_router(sketches.router, prefix="/api", tags=["branches"]) + @app.get("/scanners") async def get_scans_list(): return {"scanners": ScannerRegistry.list()} - -@app.get("/transforms/nodes") -async def get_scans_list(): - scanners = ScannerRegistry.list_by_category() - # Flatten scanner nodes - flattened_scanners = { - category: [ - { - "class_name": scanner["class_name"], - "name": scanner["name"], - "module": scanner["module"], - "doc": scanner["doc"], - "inputs": scanner["inputs"], - "outputs": scanner["outputs"], - "type": "scanner" - } - for scanner in scanner_list - ] - for category, scanner_list in scanners.items() - } - - # Add your object types under a dedicated category (e.g., "types") - object_inputs = [ - extract_input_schema("MinimalDomain", MinimalDomain), - extract_input_schema("MinimalIp", MinimalIp), - extract_input_schema("MinimalSocial", MinimalSocial), - extract_input_schema("Email", Email), - extract_input_schema("MinimalOrganization", MinimalOrganization) - ] - - flattened_scanners["inputs"] = object_inputs - - return {"items": flattened_scanners} - - -class LaunchTransformPayload(BaseModel): - values: List[str] - sketch_id: str - -@app.post("/transforms/{transform_id}/launch") -async def launch_transform( - transform_id: str, - payload: LaunchTransformPayload, - user=Depends(get_current_user) -): - db = get_db() - try: - response = db.table("transforms").select("*").eq("id", str(transform_id)).single().execute() - if response.data is None: - raise HTTPException(status_code=404, detail="Transform not found") - - task = celery_app.send_task("run_transform", args=[response.data["transform_schema"], payload.values, payload.sketch_id]) - return {"id": task.id} - - except Exception as e: - print(e) - raise HTTPException(status_code=404, detail="Transform not found") - - -@app.get("/sketch/{sketch_id}/nodes") -async def get_sketch_nodes(sketch_id: str): - import random - - nodes_query = """ - MATCH (n) - WHERE n.sketch_id = $sketch_id - RETURN elementId(n) as id, labels(n) as labels, properties(n) as data - LIMIT 500 - """ - nodes_result = neo4j_connection.query(nodes_query, parameters={"sketch_id": sketch_id}) - - node_ids = [record["id"] for record in nodes_result] - - rels_query = """ - UNWIND $node_ids AS nid - MATCH (a)-[r]->(b) - WHERE elementId(a) = nid AND elementId(b) IN $node_ids - RETURN elementId(r) as id, type(r) as type, elementId(a) as source, elementId(b) as target, properties(r) as data - """ - rels_result = neo4j_connection.query(rels_query, parameters={"node_ids": node_ids}) - - nodes = [ - { - "id": str(record["id"]), - "labels": record["labels"], - "data": record["data"], - "label": record["data"].get("label", "Node"), - "type": record["labels"][0].lower(), - "caption": record["data"].get("label", "Node"), - "size": 40, - "color": record["data"].get("color", "#FFFFFF"), - "x": random.random() * 1000, - "y": random.random() * 1000 - } - for record in nodes_result - ] - - rels = [ - { - "id": str(record["id"]), - "type": record["type"], - "from": str(record["source"]), - "to": str(record["target"]), - "data": record["data"], - "caption": record["type"], - "width": 1, - "color": "#A5ABB6" - } - for record in rels_result - ] - - return {"nds": nodes, "rls": rels} - - -class NodeInput(BaseModel): - type: str - data: Dict[str, Any] = Field(default_factory=dict) - -def dict_to_cypher_props(props: dict) -> str: - return ", ".join(f"{key}: ${key}" for key in props) - -@app.post("/sketch/{sketch_id}/nodes/add") -def add_node(sketch_id: str, node: NodeInput): - - node_type = getattr(node, "type", "unknown") - node_data = getattr(node, "data", {}) - - properties = { - "type": node_type, - "sketch_id": sketch_id, - "caption": node_data.get("label", "Node"), - "label": node_data.get("label", "Node"), - "color": node_data.get("color", "Node"), - "size": 40, - } - - if node_data and isinstance(node_data, dict): - flattened_data = flatten(node_data) - properties.update(flattened_data) - - cypher_props = dict_to_cypher_props(properties) - - create_query = f""" - MERGE (d:`{node_type}` {{ {cypher_props} }}) - RETURN d as node - """ - - try: - create_result = neo4j_connection.query(create_query, properties) - except Exception as e: - print(f"Query execution error: {e}") - raise HTTPException(status_code=500, detail=f"Database error: {str(e)}") - - if not create_result: - raise HTTPException(status_code=400, detail="Node creation failed - no result returned") - - try: - new_node = create_result[0]["node"] - except (IndexError, KeyError) as e: - print(f"Error extracting node_id: {e}, result: {create_result}") - new_node = None - new_node["data"]=node_data - - return { - "status": "node added", - "node": new_node, - } - -class EdgeInput(BaseModel): - from_node: dict - to_node: dict - type: str - -@app.post("/sketch/{sketch_id}/edges/add") -def add_edge(sketch_id: str, edge: EdgeInput): - from_props = flatten(edge.from_node) - to_props = flatten(edge.to_node) - - from_cypher = dict_to_cypher_props(from_props) - to_cypher = dict_to_cypher_props(to_props) - - query = f""" - MATCH (a {{ {from_cypher} }}) - MATCH (b {{ {to_cypher} }}) - MERGE (a)-[r:`{edge.type}` {{sketch_id: $sketch_id}}]->(b) - RETURN r - """ - - params = { - **from_props, - **to_props, - "sketch_id": sketch_id, - } - - try: - result = neo4j_connection.query(query, params) - except Exception as e: - print(f"Edge creation error: {e}") - raise HTTPException(status_code=500, detail="Failed to create edge") - - if not result: - raise HTTPException(status_code=400, detail="Edge creation failed") - - return { - "status": "edge added", - "edge": result[0]["r"], - } \ No newline at end of file diff --git a/flowsint-api/app/scanners/domains/resolve.py b/flowsint-api/app/scanners/domains/resolve.py index a906acd..1e09b82 100644 --- a/flowsint-api/app/scanners/domains/resolve.py +++ b/flowsint-api/app/scanners/domains/resolve.py @@ -20,22 +20,36 @@ class ResolveScanner(Scanner): @classmethod def category(cls) -> str: return "domains" + + @classmethod + def key(cls) -> str: + return "domain" @classmethod def input_schema(cls) -> Dict[str, Any]: adapter = TypeAdapter(InputType) - return [ - {"name": prop, "type": resolve_type(details) } - for prop, details in adapter.json_schema()["$defs"]["MinimalDomain"]["properties"].items() - ] + schema = adapter.json_schema() + type_name, details = list(schema["$defs"].items())[0] + return { + "type": type_name, + "properties": [ + {"name": prop, "type": resolve_type(info)} + for prop, info in details["properties"].items() + ] + } @classmethod def output_schema(cls) -> Dict[str, Any]: adapter = TypeAdapter(OutputType) - return [ - {"name": prop, "type": resolve_type(details)} - for prop, details in adapter.json_schema()["$defs"]["MinimalIp"]["properties"].items() - ] + schema = adapter.json_schema() + type_name, details = list(schema["$defs"].items())[0] + return { + "type": type_name, + "properties": [ + {"name": prop, "type": resolve_type(info)} + for prop, info in details["properties"].items() + ] + } def preprocess(self, data: Union[List[str], List[dict], InputType]) -> InputType: cleaned: InputType = [] diff --git a/flowsint-api/app/scanners/domains/subdomains.py b/flowsint-api/app/scanners/domains/subdomains.py index 487f7d9..bdaeb24 100644 --- a/flowsint-api/app/scanners/domains/subdomains.py +++ b/flowsint-api/app/scanners/domains/subdomains.py @@ -1,3 +1,4 @@ +import shutil import requests import subprocess from typing import List, Dict, Any, TypeAlias, Union @@ -8,10 +9,10 @@ from pydantic import TypeAdapter from app.core.logger import logger InputType: TypeAlias = List[MinimalDomain] -OutputType: TypeAlias = List[Domain] +OutputType: TypeAlias = List[MinimalDomain] class SubdomainScanner(Scanner): - """Scanner to find subdomains associated to a domain.""" + """Scanner to find subdomains associated with a domain.""" @classmethod def name(cls) -> str: @@ -23,26 +24,38 @@ class SubdomainScanner(Scanner): @classmethod def key(cls) -> str: - return "subdomains" - + return "domain" + @classmethod def input_schema(cls) -> Dict[str, Any]: adapter = TypeAdapter(InputType) - return [ - {"name": prop, "type": resolve_type(details)} - for prop, details in adapter.json_schema()["$defs"]["MinimalDomain"]["properties"].items() - ] + schema = adapter.json_schema() + type_name, details = list(schema["$defs"].items())[0] + return { + "type": type_name, + "properties": [ + {"name": prop, "type": resolve_type(info)} + for prop, info in details["properties"].items() + ] + } @classmethod def output_schema(cls) -> Dict[str, Any]: adapter = TypeAdapter(OutputType) - return [ - {"name": prop, "type": resolve_type(details)} - for prop, details in adapter.json_schema()["$defs"]["Domain"]["properties"].items() - ] + schema = adapter.json_schema() + type_name, details = list(schema["$defs"].items())[0] + return { + "type": type_name, + "properties": [ + {"name": prop, "type": resolve_type(info)} + for prop, info in details["properties"].items() + ] + } + def preprocess(self, data: Union[List[str], List[dict], InputType]) -> InputType: cleaned: InputType = [] + logger.debug(self.scan_id, self.sketch_id,f"[SUBDOMAIN_SCANNER]: preprocessed: {str(data)}") for item in data: domain_obj = None if isinstance(item, str): @@ -53,21 +66,34 @@ class SubdomainScanner(Scanner): domain_obj = item if domain_obj and is_valid_domain(domain_obj.domain) != "invalid": cleaned.append(domain_obj) + logger.debug(self.scan_id, self.sketch_id,f"[SUBDOMAIN_SCANNER]: postprocessed: {str(cleaned)}") return cleaned def scan(self, data: InputType) -> OutputType: - """Find subdomains using crt.sh and subfinder.""" + """Find subdomains using subfinder (if available) or fallback to crt.sh.""" domains: OutputType = [] + use_subfinder = self.__is_subfinder_installed() + logger.debug(self.scan_id, self.sketch_id,f"[SUBDOMAIN_SCANNER]: input data: {str(data)}") for md in data: d = Domain(domain=md.domain) - subdomains = self.__get_subdomains_from_subfinder(d.domain) - # subdomains.update(self.__get_subdomains_from_crtsh(d.domain)) + if use_subfinder: + subdomains = self.__get_subdomains_from_subfinder(d.domain) + if not subdomains: + logger.warning(self.scan_id, self.sketch_id, f"subfinder failed for {d.domain}, falling back to crt.sh") + subdomains = self.__get_subdomains_from_crtsh(d.domain) + else: + logger.info(self.scan_id, self.sketch_id, "subfinder not found, using crt.sh only") + subdomains = self.__get_subdomains_from_crtsh(d.domain) + d.subdomains = sorted(subdomains) domains.append(d) return domains + def __is_subfinder_installed(self) -> bool: + return shutil.which("subfinder") is not None + def __get_subdomains_from_crtsh(self, domain: str) -> set[str]: subdomains: set[str] = set() try: @@ -84,9 +110,9 @@ class SubdomainScanner(Scanner): if "*" not in sub and is_valid_domain(sub) and sub.endswith(domain) and sub != domain: subdomains.add(sub) elif "*" in sub: - print(f"[IGNORED] Wildcard subdomain: {repr(sub)}") + logger.debug(self.scan_id, self.sketch_id, f"Ignored wildcard subdomain: {sub}") except Exception as e: - print(f"[ERROR] crt.sh failed for {domain}: {e}") + logger.error(self.scan_id, self.sketch_id, f"crt.sh failed for {domain}: {e}") return subdomains def __get_subdomains_from_subfinder(self, domain: str) -> set[str]: @@ -99,27 +125,28 @@ class SubdomainScanner(Scanner): if result.returncode == 0: for sub in result.stdout.strip().splitlines(): sub = sub.strip().lower() - if is_valid_domain(sub) and sub.endswith(domain) and sub != domain: + if is_valid_domain(sub) and sub.endswith(domain) and sub != domain and not sub.startswith("."): subdomains.add(sub) else: - print(f"[ERROR] subfinder failed for {domain}: {result.stderr.strip()}") + logger.error(self.scan_id, self.sketch_id, f"subfinder failed for {domain}: {result.stderr.strip()}") except Exception as e: - print(f"[ERROR] subfinder exception for {domain}: {e}") + logger.error(self.scan_id, self.sketch_id, f"subfinder exception for {domain}: {e}") return subdomains def postprocess(self, results: OutputType, original_input: InputType) -> OutputType: + output: OutputType = [] for domain_obj in results: if not self.neo4j_conn: continue for subdomain in domain_obj.subdomains: + output.append(MinimalDomain(domain=subdomain)) self.neo4j_conn.query(""" - MERGE (sub:subdomain {domain: $subdomain}) - SET sub.sketch_id = $sketch_id - SET sub.sketch_id = $sketch_id - SET sub.label = $label - SET sub.color = $color - SET sub.caption = $caption - SET sub.type = $type + MERGE (sub:domain {domain: $subdomain}) + SET sub.sketch_id = $sketch_id, + sub.label = $label, + sub.color = $color, + sub.caption = $caption, + sub.type = $type MERGE (d:domain {domain: $domain}) MERGE (d)-[:HAS_SUBDOMAIN {sketch_id: $sketch_id}]->(sub) """, { @@ -133,5 +160,4 @@ class SubdomainScanner(Scanner): }) logger.info(self.scan_id, self.sketch_id, f"{domain_obj.domain} -> {len(domain_obj.subdomains)} subdomain(s) found.") - - return results + return output diff --git a/flowsint-api/app/scanners/domains/whois.py b/flowsint-api/app/scanners/domains/whois.py index 705faeb..fb59f3b 100644 --- a/flowsint-api/app/scanners/domains/whois.py +++ b/flowsint-api/app/scanners/domains/whois.py @@ -22,24 +22,36 @@ class WhoisScanner(Scanner): @classmethod def category(cls) -> str: return "domains" + + @classmethod + def key(cls) -> str: + return "domain" @classmethod def input_schema(cls) -> Dict[str, Any]: adapter = TypeAdapter(InputType) - schema = [ - {"name": prop, "type": resolve_type(details)} - for prop, details in adapter.json_schema()["$defs"]["MinimalDomain"]["properties"].items() + schema = adapter.json_schema() + type_name, details = list(schema["$defs"].items())[0] + return { + "type": type_name, + "properties": [ + {"name": prop, "type": resolve_type(info)} + for prop, info in details["properties"].items() ] - return schema + } @classmethod def output_schema(cls) -> Dict[str, Any]: adapter = TypeAdapter(OutputType) - schema = [ - {"name": prop, "type": resolve_type(details)} - for prop, details in adapter.json_schema()["$defs"]["Whois"]["properties"].items() + schema = adapter.json_schema() + type_name, details = list(schema["$defs"].items())[0] + return { + "type": type_name, + "properties": [ + {"name": prop, "type": resolve_type(info)} + for prop, info in details["properties"].items() ] - return schema + } def preprocess(self, data: Union[List[str], List[dict], InputType]) -> InputType: cleaned: InputType = [] diff --git a/flowsint-api/app/scanners/emails/holehe.py b/flowsint-api/app/scanners/emails/holehe.py index 78356e6..230acc9 100644 --- a/flowsint-api/app/scanners/emails/holehe.py +++ b/flowsint-api/app/scanners/emails/holehe.py @@ -25,20 +25,30 @@ class HoleheScanner(Scanner): return "email" @classmethod - def input_schema(cls) -> List[Dict[str, Any]]: + def input_schema(cls) -> Dict[str, Any]: adapter = TypeAdapter(InputType) - return [ - {"name": prop, "type": resolve_type(details)} - for prop, details in adapter.json_schema()["$defs"]["Email"]["properties"].items() - ] + schema = adapter.json_schema() + type_name, details = list(schema["$defs"].items())[0] + return { + "type": type_name, + "properties": [ + {"name": prop, "type": resolve_type(info)} + for prop, info in details["properties"].items() + ] + } @classmethod - def output_schema(cls) -> List[Dict[str, Any]]: + def output_schema(cls) -> Dict[str, Any]: adapter = TypeAdapter(OutputType) - return [ - {"name": prop, "type": resolve_type(details)} - for prop, details in adapter.json_schema()["$defs"]["Social"]["properties"].items() - ] + schema = adapter.json_schema() + type_name, details = list(schema["$defs"].items())[0] + return { + "type": type_name, + "properties": [ + {"name": prop, "type": resolve_type(info)} + for prop, info in details["properties"].items() + ] + } def preprocess(self, data: Union[List[str], List[dict], InputType]) -> InputType: cleaned: InputType = [] diff --git a/flowsint-api/app/scanners/ips/geolocation.py b/flowsint-api/app/scanners/ips/geolocation.py index 139a771..0cee956 100644 --- a/flowsint-api/app/scanners/ips/geolocation.py +++ b/flowsint-api/app/scanners/ips/geolocation.py @@ -21,20 +21,30 @@ class GeolocationScanner(Scanner): return "ips" @classmethod - def input_schema(cls) -> List[Dict[str, Any]]: + def input_schema(cls) -> Dict[str, Any]: adapter = TypeAdapter(InputType) - return [ - {"name": prop, "type": resolve_type(details)} - for prop, details in adapter.json_schema()["$defs"]["MinimalIp"]["properties"].items() - ] + schema = adapter.json_schema() + type_name, details = list(schema["$defs"].items())[0] + return { + "type": type_name, + "properties": [ + {"name": prop, "type": resolve_type(info)} + for prop, info in details["properties"].items() + ] + } @classmethod - def output_schema(cls) -> List[Dict[str, Any]]: + def output_schema(cls) -> Dict[str, Any]: adapter = TypeAdapter(OutputType) - return [ - {"name": prop, "type": resolve_type(details)} - for prop, details in adapter.json_schema()["$defs"]["Ip"]["properties"].items() - ] + schema = adapter.json_schema() + type_name, details = list(schema["$defs"].items())[0] + return { + "type": type_name, + "properties": [ + {"name": prop, "type": resolve_type(info)} + for prop, info in details["properties"].items() + ] + } def preprocess(self, data: Union[List[str], List[dict], InputType]) -> InputType: cleaned: InputType = [] diff --git a/flowsint-api/app/scanners/ips/reverse_resolve.py b/flowsint-api/app/scanners/ips/reverse_resolve.py index c6f471b..fdbd6d0 100644 --- a/flowsint-api/app/scanners/ips/reverse_resolve.py +++ b/flowsint-api/app/scanners/ips/reverse_resolve.py @@ -27,20 +27,30 @@ class ReverseResolveScanner(Scanner): return "ips" @classmethod - def input_schema(cls) -> List[Dict[str, Any]]: + def input_schema(cls) -> Dict[str, Any]: adapter = TypeAdapter(InputType) - return [ - {"name": prop, "type": resolve_type(details)} - for prop, details in adapter.json_schema()["$defs"]["MinimalIp"]["properties"].items() - ] + schema = adapter.json_schema() + type_name, details = list(schema["$defs"].items())[0] + return { + "type": type_name, + "properties": [ + {"name": prop, "type": resolve_type(info)} + for prop, info in details["properties"].items() + ] + } @classmethod - def output_schema(cls) -> List[Dict[str, Any]]: + def output_schema(cls) -> Dict[str, Any]: adapter = TypeAdapter(OutputType) - return [ - {"name": prop, "type": resolve_type(details)} - for prop, details in adapter.json_schema()["$defs"]["MinimalDomain"]["properties"].items() - ] + schema = adapter.json_schema() + type_name, details = list(schema["$defs"].items())[0] + return { + "type": type_name, + "properties": [ + {"name": prop, "type": resolve_type(info)} + for prop, info in details["properties"].items() + ] + } def preprocess(self, data: Union[List[str], List[dict], InputType]) -> InputType: cleaned: InputType = [] diff --git a/flowsint-api/app/scanners/orchestrator.py b/flowsint-api/app/scanners/orchestrator.py index 82a0077..5b87dbf 100644 --- a/flowsint-api/app/scanners/orchestrator.py +++ b/flowsint-api/app/scanners/orchestrator.py @@ -1,28 +1,130 @@ -from typing import List, Dict, Any +from typing import List, Dict, Any, Tuple, Set from uuid import UUID from datetime import datetime -from pydantic import BaseModel +from pydantic import BaseModel, ValidationError from app.scanners.base import Scanner from app.scanners.registry import ScannerRegistry -from pydantic import ValidationError +from app.core.logger import logger +from app.types.transform import FlowBranch, FlowStep class TransformOrchestrator(Scanner): - def __init__(self, sketch_id: str, scan_id: str, scanner_names: List[str], neo4j_conn=None): + def __init__(self, sketch_id: str, scan_id: str, transform_branches: List[FlowBranch], neo4j_conn=None): super().__init__(sketch_id, scan_id, neo4j_conn=neo4j_conn) - self.scanner_names = scanner_names + self.transform_branches = transform_branches self.neo4j_conn = neo4j_conn - self.scanners = [] + self.scanners = {} # Map of nodeId -> scanner instance self._load_scanners() def _load_scanners(self) -> None: - if not self.scanner_names: - raise ValueError("No scanners provided") + if not self.transform_branches: + raise ValueError("No transform branches provided") - for name in self.scanner_names: - if not ScannerRegistry.scanner_exists(name): - raise ValueError(f"Scanner '{name}' not found in registry") - scanner = ScannerRegistry.get_scanner(name, self.sketch_id, self.scan_id, neo4j_conn=self.neo4j_conn) - self.scanners.append(scanner) + # Collect all scanner nodes across all branches + scanner_nodes = [] + for branch in self.transform_branches: + for step in branch.steps: + if step.type == "type": + continue + scanner_nodes.append(step) + + if not scanner_nodes: + raise ValueError("No scanner nodes found in transform branches") + + # Create scanner instances for each node + for node in scanner_nodes: + node_id = node.nodeId + + # Extract scanner name from nodeId (assuming format like "scanner_name-1234567890") + scanner_name = node_id.split('-')[0] + + if not ScannerRegistry.scanner_exists(scanner_name): + raise ValueError(f"Scanner '{scanner_name}' not found in registry") + + scanner = ScannerRegistry.get_scanner(scanner_name, self.sketch_id, self.scan_id, neo4j_conn=self.neo4j_conn) + self.scanners[node_id] = scanner + + # Log the execution plan for debugging + self.log_execution_plan() + + def log_execution_plan(self): + """Log the execution plan for debugging purposes""" + logger.info(self.scan_id, self.sketch_id, "Workflow execution plan:") + + for branch_idx, branch in enumerate(self.transform_branches): + branch_id = branch.id + branch_name = branch.name + + logger.info(self.scan_id, self.sketch_id, f"Branch: {branch_name} (ID: {branch_id})") + + steps = branch.steps + for step_idx, step in enumerate(steps): + node_id = step.nodeId + scanner_name = node_id.split('-')[0] + depth = step.depth + + # Log the step information + inputs_str = ', '.join([f"{k}: {v}" for k, v in step.inputs.items()]) + outputs_str = ', '.join([f"{k}: {v}" for k, v in step.outputs.items()]) + + logger.info(self.scan_id, self.sketch_id, + f" Step {step_idx+1}: {scanner_name} (Depth: {depth})") + logger.info(self.scan_id, self.sketch_id, f" Inputs: {inputs_str}") + logger.info(self.scan_id, self.sketch_id, f" Outputs: {outputs_str}") + + def resolve_reference(self, ref_value: str, results_mapping: Dict[str, Any]) -> Any: + """ + Resolve a reference value from the results mapping. + References could be just the key name like "transformed_domain". + """ + if ref_value in results_mapping: + return results_mapping[ref_value] + return None + + def prepare_scanner_inputs(self, step: FlowStep, results_mapping: Dict[str, Any], initial_values: List[str]) -> List[Any]: + """ + Prépare les inputs d'un scanner à partir des références et des résultats précédents. + Gère les références simples, les listes, et les valeurs directes. + """ + inputs = {} + + for input_key, input_ref in step.inputs.items(): + # Cas 1 : une seule référence (string) + if isinstance(input_ref, str): + resolved = self.resolve_reference(input_ref, results_mapping) + if resolved is not None: + inputs[input_key] = resolved + + # Cas 2 : liste de références ou valeurs + elif isinstance(input_ref, list): + resolved_items = [] + for item in input_ref: + if isinstance(item, str) and item in results_mapping: + resolved_items.append(results_mapping[item]) + else: + resolved_items.append(item) # valeur directe + inputs[input_key] = resolved_items + + else: + # Cas inattendu (valeur directe ?) + inputs[input_key] = input_ref + + # Si aucun input n'a été résolu, utiliser les valeurs initiales + if not inputs: + scanner = self.scanners.get(step.nodeId) + if scanner: + primary_key = scanner.key() + return {primary_key: initial_values} + + return inputs[input_key] + + + def update_results_mapping(self, outputs: Dict[str, Any], step_outputs: Dict[str, str], results_mapping: Dict[str, Any]) -> None: + """ + Update the results mapping with new outputs from a scanner. + """ + for output_key, output_ref in step_outputs.items(): + if output_key in outputs: + results_mapping[output_ref] = outputs[output_key] @classmethod def name(cls) -> str: @@ -43,40 +145,101 @@ class TransformOrchestrator(Scanner): @classmethod def output_schema(cls) -> Dict[str, str]: return { - "scanners": "array", + "branches": "array", "results": "dict" } def scan(self, values: List[str]) -> Dict[str, Any]: results = { "initial_values": values, - "scanners": [], + "branches": [], "results": {} } - current_values = values - - for scanner in self.scanners: - print("currentValues" + str(current_values)) - try: - res = scanner.execute(current_values) - if not isinstance(res, (dict, list)): - raise ValueError(f"Scanner '{scanner.name()}' returned unsupported output format") + + # Global mapping of output references to actual values + results_mapping = {} + + # Process each branch + for branch in self.reorder_branches(): + branch_id = branch.id + branch_name = branch.name + branch_results = { + "id": branch_id, + "name": branch_name, + "steps": [] + } + + # Process each step in the branch + scanner_inputs = values + for step in branch.steps: + if step.type == "type": + continue + node_id = step.nodeId + scanner = self.scanners.get(node_id) - results["scanners"].append(scanner.name()) - results["results"][scanner.name()] = res - - if isinstance(res, list): - current_values = res - elif isinstance(res, dict) and "values" in res: - current_values = res["values"] - else: - current_values = [] - - except (ValueError, ValidationError) as e: - results["results"][scanner.name()] = {"error": f"Validation error: {str(e)}"} - except Exception as e: - results["results"][scanner.name()] = {"error": f"Error during scan: {str(e)}"} + if not scanner: + logger.error(self.scan_id, self.sketch_id, f"Scanner not found for node {node_id}") + continue + + scanner_name = scanner.name() + step_result = { + "nodeId": node_id, + "scanner": scanner_name, + "status": "error" # Default to error, will update on success + } + + try: + # Prepare inputs for this scanner + # scanner_inputs = self.prepare_scanner_inputs(step, results_mapping, values) + logger.debug(self.scan_id, self.sketch_id,f"Current values to be used: {str(scanner_inputs)}") + if not scanner_inputs: + logger.warn(self.scan_id, self.sketch_id, + f"No inputs available for scanner {scanner_name}, skipping") + step_result["error"] = "No inputs available" + branch_results["steps"].append(step_result) + continue + + logger.info(self.scan_id, self.sketch_id, + f"Running scanner {scanner_name} with inputs: {str(scanner_inputs)}") + + # Execute the scanner + outputs = scanner.execute(scanner_inputs) + if not isinstance(outputs, (dict, list)): + raise ValueError(f"Scanner '{scanner_name}' returned unsupported output format") + logger.success(self.scan_id, self.sketch_id, f"{str(outputs)}") + # Convert outputs to JSON-serializable format + # outputs = self.results_to_json(outputs) + + # Store the outputs in the step result + step_result["outputs"] = outputs + step_result["status"] = "completed" + + # Update the global results mapping with the outputs + self.update_results_mapping(outputs, step.outputs, results_mapping) + + # Also store the raw outputs in the main results + results["results"][node_id] = outputs + scanner_inputs = outputs + except ValidationError as e: + error_msg = f"Validation error: {str(e)}" + logger.error(self.scan_id, self.sketch_id, f"Validation error in {scanner_name}: {str(e)}") + step_result["error"] = error_msg + results["results"][node_id] = {"error": error_msg} + + except Exception as e: + error_msg = f"Error during scan: {str(e)}" + logger.error(self.scan_id, self.sketch_id, f"Error during scan {scanner_name}: {str(e)}") + step_result["error"] = error_msg + results["results"][node_id] = {"error": error_msg} + + branch_results["steps"].append(step_result) + + results["branches"].append(branch_results) + + # Include the final reference mapping for debugging + results["reference_mapping"] = results_mapping + return results def results_to_json(self, results: Any) -> Any: @@ -90,5 +253,4 @@ class TransformOrchestrator(Scanner): return [self.results_to_json(item) for item in results] if isinstance(results, dict): return {key: self.results_to_json(value) for key, value in results.items()} - return results - + return results \ No newline at end of file diff --git a/flowsint-api/app/scanners/organizations/sirene.py b/flowsint-api/app/scanners/organizations/sirene.py index c1b25d0..5d81ff7 100644 --- a/flowsint-api/app/scanners/organizations/sirene.py +++ b/flowsint-api/app/scanners/organizations/sirene.py @@ -20,20 +20,30 @@ class SireneScanner(Scanner): return "organizations" @classmethod - def input_schema(cls) -> List[Dict[str, Any]]: + def input_schema(cls) -> Dict[str, Any]: adapter = TypeAdapter(InputType) - return [ - {"name": prop, "type": resolve_type(details)} - for prop, details in adapter.json_schema()["$defs"]["MinimalOrganization"]["properties"].items() - ] + schema = adapter.json_schema() + type_name, details = list(schema["$defs"].items())[0] + return { + "type": type_name, + "properties": [ + {"name": prop, "type": resolve_type(info)} + for prop, info in details["properties"].items() + ] + } @classmethod - def output_schema(cls) -> List[Dict[str, Any]]: + def output_schema(cls) -> Dict[str, Any]: adapter = TypeAdapter(OutputType) - return [ - {"name": prop, "type": resolve_type(details)} - for prop, details in adapter.json_schema()["$defs"]["Organization"]["properties"].items() - ] + schema = adapter.json_schema() + type_name, details = list(schema["$defs"].items())[0] + return { + "type": type_name, + "properties": [ + {"name": prop, "type": resolve_type(info)} + for prop, info in details["properties"].items() + ] + } def preprocess(self, data: Union[List[str], List[dict], InputType]) -> InputType: cleaned: InputType = [] @@ -117,6 +127,7 @@ class SireneScanner(Scanner): SET i.country = $country, i.issued_by = $issued_by, i.sketch_id = $sketch_id, + i.type = $type, i.label = $label, i.caption = $label WITH i diff --git a/flowsint-api/app/scanners/socials/maigret.py b/flowsint-api/app/scanners/socials/maigret.py index d148fec..9d55ff1 100644 --- a/flowsint-api/app/scanners/socials/maigret.py +++ b/flowsint-api/app/scanners/socials/maigret.py @@ -30,20 +30,30 @@ class MaigretScanner(Scanner): return "username" @classmethod - def input_schema(cls) -> List[Dict[str, Any]]: + def input_schema(cls) -> Dict[str, Any]: adapter = TypeAdapter(InputType) - return [ - {"name": prop, "type": resolve_type(details)} - for prop, details in adapter.json_schema()["$defs"]["MinimalSocial"]["properties"].items() - ] + schema = adapter.json_schema() + type_name, details = list(schema["$defs"].items())[0] + return { + "type": type_name, + "properties": [ + {"name": prop, "type": resolve_type(info)} + for prop, info in details["properties"].items() + ] + } @classmethod - def output_schema(cls) -> List[Dict[str, Any]]: + def output_schema(cls) -> Dict[str, Any]: adapter = TypeAdapter(OutputType) - return [ - {"name": prop, "type": resolve_type(details)} - for prop, details in adapter.json_schema()["$defs"]["Social"]["properties"].items() - ] + schema = adapter.json_schema() + type_name, details = list(schema["$defs"].items())[0] + return { + "type": type_name, + "properties": [ + {"name": prop, "type": resolve_type(info)} + for prop, info in details["properties"].items() + ] + } def preprocess(self, data: Union[List[str], List[dict], InputType]) -> InputType: cleaned: InputType = [] diff --git a/flowsint-api/app/tasks/transform.py b/flowsint-api/app/tasks/transform.py index d6ebf59..6d85765 100644 --- a/flowsint-api/app/tasks/transform.py +++ b/flowsint-api/app/tasks/transform.py @@ -1,19 +1,16 @@ -import json from app.core.celery import celery_app from app.scanners.orchestrator import TransformOrchestrator from celery import states from app.core.db import get_db -from app.utils import extract_transform from typing import List import os from dotenv import load_dotenv from typing import List from app.neo4j.connector import Neo4jConnection +from app.types.transform import FlowBranch load_dotenv() - - URI = os.getenv("NEO4J_URI_BOLT") USERNAME = os.getenv("NEO4J_USERNAME") PASSWORD = os.getenv("NEO4J_PASSWORD") @@ -22,32 +19,33 @@ neo4j_connection = Neo4jConnection(URI, USERNAME, PASSWORD) @celery_app.task(name="run_transform", bind=True) -def run_scan(self, transform_schema, values: List[str], sketch_id: str | None): - db=get_db() +def run_scan(self, transform_branches, values: List[str], sketch_id: str | None): + db = get_db() try: - extracted = extract_transform(transform_schema) - scanner_names = extracted["scanner_names"] + if not transform_branches: + raise ValueError("transform_branches not provided in the input transform") + res = db.table("scans").insert({ "id": self.request.id, "status": "pending", - "scanner_names": scanner_names, "values": values, "sketch_id": sketch_id, "results": [] }).execute() scan_id = res.data[0]["id"] - print(scan_id) - scanner = TransformOrchestrator(sketch_id, scan_id, scanner_names = scanner_names, neo4j_conn=neo4j_connection) + + transform_branches = [FlowBranch(**branch) for branch in transform_branches] + scanner = TransformOrchestrator(sketch_id, scan_id, transform_branches=transform_branches, neo4j_conn=neo4j_connection) results = scanner.execute(values=values) status = "finished" if "error" not in results else "error" db.table("scans").update({ "status": status, - "results":scanner.results_to_json(results=results) + "results": scanner.results_to_json(results=results) }).eq("id", self.request.id).execute() return {"result": scanner.results_to_json(results=results)} except Exception as ex: - error_logs= "an error occured" + error_logs = f"An error occurred: {str(ex)}" print(f"Error in task: {error_logs}") db.table("scans").update({ "status": "error", diff --git a/flowsint-api/app/types/transform.py b/flowsint-api/app/types/transform.py new file mode 100644 index 0000000..c32255b --- /dev/null +++ b/flowsint-api/app/types/transform.py @@ -0,0 +1,29 @@ +from pydantic import BaseModel +from typing import Dict, List, Any, Optional, Literal + +class Node(BaseModel): + id: str + data: Dict[str, Any] + position: Optional[Dict[str, float]] = None + type: Optional[str] = None + +class Edge(BaseModel): + id: str + source: str + target: str + sourceHandle: Optional[str] = None + targetHandle: Optional[str] = None + +class FlowStep(BaseModel): + nodeId: str + type: Literal["type", "scanner"] + inputs: Dict[str, Any] + outputs: Dict[str, Any] + status: Literal["pending", "processing", "completed", "error"] + branchId: str + depth: int + +class FlowBranch(BaseModel): + id: str + name: str + steps: List[FlowStep] \ No newline at end of file diff --git a/flowsint-api/app/utils.py b/flowsint-api/app/utils.py index b081fcf..853f036 100644 --- a/flowsint-api/app/utils.py +++ b/flowsint-api/app/utils.py @@ -7,8 +7,10 @@ from urllib.parse import urlparse import re import ssl import socket -from typing import Dict, Any +from typing import Dict, Any, Type +from typing import Any, Dict +from pydantic import BaseModel def is_valid_ip(address: str) -> bool: try: @@ -73,28 +75,41 @@ def resolve_type(details: dict) -> str: return "any" -def extract_input_schema(name: str, model: BaseModel) -> dict: +def extract_input_schema(name: str, model: Type[BaseModel]) -> Dict[str, Any]: adapter = TypeAdapter(model) schema = adapter.json_schema() - properties = schema.get("properties", {}) + + # Vérifie si le schéma utilise $defs (références internes) + if "$defs" in schema: + type_name, details = list(schema["$defs"].items())[0] + else: + type_name = name + details = schema return { "class_name": name, "name": name, "module": model.__module__, - "doc": model.__doc__, - "outputs": [ - { - "name": prop, - "type": resolve_type(val) - } - for prop, val in properties.items() - ], - "inputs": [], - "type": "input" + "doc": model.__doc__ or "", + "outputs": { + "type": type_name, + "properties": [ + { + "name": prop, + "type": resolve_type(info) + } + for prop, info in details.get("properties", {}).items() + ] + }, + "inputs": { + "type": "", + "properties": [] + }, + "type": "type" } + def get_domain_from_ssl(ip: str, port: int = 443) -> str | None: try: context = ssl.create_default_context() @@ -119,7 +134,7 @@ def extract_transform(transform: Dict[str, Any]) -> Dict[str, Any]: nodes = transform["nodes"] edges = transform["edges"] - input_node = next((node for node in nodes if node["data"]["type"] == "input"), None) + input_node = next((node for node in nodes if node["data"]["type"] == "type"), None) if not input_node: raise ValueError("No input node found.") input_output = input_node["data"]["outputs"] @@ -178,4 +193,4 @@ def flatten(data_dict): isinstance(value, list) and all(isinstance(item, (str, int, float, bool)) for item in value) ): flattened[key] = value - return flattened \ No newline at end of file + return flattened diff --git a/flowsint-api/branches.json b/flowsint-api/branches.json new file mode 100644 index 0000000..f1c5cfa --- /dev/null +++ b/flowsint-api/branches.json @@ -0,0 +1,36 @@ +[ + { + "id": "branch-0", + "name": "Main Flow", + "steps": [ + { + "nodeId": "MinimalDomain-1747312962241", + "type": "type", + "inputs": {}, + "outputs": { + "domain": [ + "adaltas.com" + ] + }, + "status": "pending", + "branchId": "branch-0", + "depth": 0 + }, + { + "nodeId": "domain_resolve_scanner-1747312965951", + "type": "scanner", + "inputs": { + "domain": [ + "adaltas.com" + ] + }, + "outputs": { + "address": "transformed_address" + }, + "status": "pending", + "branchId": "branch-0", + "depth": 1 + } + ] + } +] \ No newline at end of file diff --git a/flowsint-web/package.json b/flowsint-web/package.json index 2397b7b..3090475 100644 --- a/flowsint-web/package.json +++ b/flowsint-web/package.json @@ -55,6 +55,7 @@ "@tanstack/react-query": "^5.66.8", "@tanstack/react-table": "^8.21.2", "@tanstack/react-virtual": "^3.13.8", + "@testing-library/react": "^16.3.0", "@tiptap/pm": "^2.11.5", "@tiptap/react": "^2.11.5", "@tiptap/starter-kit": "^2.11.5", @@ -63,6 +64,7 @@ "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "cmdk": "1.0.0", + "d3": "^7.9.0", "date-fns": "^4.1.0", "embla-carousel-react": "^8.5.2", "framer-motion": "^12.3.1", @@ -98,6 +100,7 @@ "usehooks-ts": "^3.1.1", "uuid": "^11.1.0", "vaul": "^1.1.2", + "vitest": "^3.1.3", "zod": "^3.24.2" }, "devDependencies": { diff --git a/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/2d-graph.tsx b/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/2d-graph.tsx index b20be11..35387f5 100644 --- a/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/2d-graph.tsx +++ b/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/2d-graph.tsx @@ -1,6 +1,6 @@ "use client" -import { memo, useCallback, useEffect, useMemo, useRef } from "react" +import { memo, useCallback, useEffect, useMemo } from "react" import dynamic from "next/dynamic" import { Button } from "@/components/ui/button" import Loader from "@/components/loader" @@ -8,14 +8,16 @@ import NewActions from "@/components/sketches/new-actions" import { PlusIcon } from "lucide-react" import { useSketchStore } from "@/store/sketch-store" import { shallow } from "zustand/shallow" -import { useNodesDisplaySettings } from "@/store/node-display-settings" +import { ItemType, useNodesDisplaySettings } from "@/store/node-display-settings" import type { NodeData, EdgeData } from "@/types" import { useGraphControls } from "@/store/graph-controls-store" +// @ts-ignore +import { forceCollide } from 'd3' const ARROW_COLOR = "rgba(136, 136, 136, 0.21)"; const LINE_COLOR = "rgba(136, 136, 136, 0.21)"; -const LINE_WIDTH = .3; -const ARROW_HEAD_LENGTH = 3; +const LINE_WIDTH = 0.4; +const ARROW_HEAD_LENGTH = 1; const ForceGraph2D = dynamic(() => import("react-force-graph-2d").then((mod) => mod), { ssr: false, @@ -59,10 +61,10 @@ const stateSelector = (state: { }) const Graph = ({ data, isLoading, width, height }: GraphProps) => { - const fgRef = useRef(null) const colors = useNodesDisplaySettings(c => c.colors) const getIcon = useNodesDisplaySettings(c => c.getIcon) const getSize = useNodesDisplaySettings(c => c.getSize) + const setActions = useGraphControls((s) => s.setActions) const { currentNode, @@ -81,7 +83,8 @@ const Graph = ({ data, isLoading, width, height }: GraphProps) => { shallow, ) - const linkCanvaObject = useMemo(() => (link: any, ctx: any) => { + // Link canvas object rendering function + const linkCanvaObject = useCallback((link: any, ctx: any) => { const start = link.target as any; const end = link.source as any; if ( @@ -121,66 +124,144 @@ const Graph = ({ data, isLoading, width, height }: GraphProps) => { ctx.translate(midX, midY); ctx.rotate(angle); ctx.fillStyle = "#333"; - ctx.font = "1.5px Sans-Serif"; + ctx.font = "1px Sans-Serif"; ctx.textAlign = "center"; ctx.textBaseline = "middle"; ctx.fillText(link.caption, 0, -1); ctx.restore(); } - }, []) + }, [getSize]) - const setActions = useGraphControls((s) => s.setActions); + // Setup graph instance when it's available - using callback approach + const handleGraphRef = useCallback((graphInstance: any) => { + if (!graphInstance) return; - useEffect(() => { - if (!fgRef.current) return; + // Configure the D3 forces + graphInstance.d3Force('charge').distanceMax(25); + graphInstance.d3Force('charge').strength(-15); + graphInstance.d3Force('link').distance(30); + graphInstance.d3Force('collision', forceCollide((node: any) => 2.5 * node.radius)); + graphInstance.d3Force('collide', forceCollide(1)); + + // Set up the actions API setActions({ - zoomToFit: () => fgRef.current?.zoomToFit(400), + zoomToFit: () => graphInstance.zoomToFit(400), zoomIn: () => { - const zoom = fgRef.current?.zoom() ?? 1; - fgRef.current?.zoom(zoom * 1.2); + const zoom = graphInstance.zoom() ?? 1; + graphInstance.zoom(zoom * 1.2); }, zoomOut: () => { - const zoom = fgRef.current?.zoom() ?? 1; - fgRef.current?.zoom(zoom / 1.2); + const zoom = graphInstance.zoom() ?? 1; + graphInstance.zoom(zoom / 1.2); }, }); - }, [!!fgRef.current]); + }, [setActions]); + // Center on current node when it changes + const handleGraphInstance = useCallback((graphInstance: any) => { + if (!graphInstance || !currentNode) return; + graphInstance.centerAt(currentNode.x, currentNode.y, 500); + graphInstance.zoom(8, 500); + }, [currentNode]); + + // Update nodes and edges when data changes useEffect(() => { - if (isLoading) return - if (data?.nds) setNodes(data.nds) - if (data?.rls) setEdges(data.rls) - }, [data?.nds, data?.rls, isLoading, setNodes, setEdges]) + if (isLoading) return; + if (data?.nds) setNodes(data.nds); + if (data?.rls) setEdges(data.rls); + }, [data?.nds, data?.rls, isLoading, setNodes, setEdges]); - useEffect(() => { - if (currentNode && fgRef.current) { - fgRef.current.centerAt(currentNode.x, currentNode.y, 500) - fgRef.current.zoom(8, 500) - } - }, [currentNode]) - - const onNodeClick = useCallback((node: any, event: any) => { - const multiSelect = event.ctrlKey || event.shiftKey || event.altKey - toggleNodeSelection(node, multiSelect) - }, [toggleNodeSelection]) + // Node click handler + const onNodeClick = useCallback((node: NodeData, event: React.MouseEvent) => { + const multiSelect = event.ctrlKey || event.shiftKey || event.altKey; + toggleNodeSelection(node, multiSelect); + }, [toggleNodeSelection]); + // Calculate node color based on selection state const nodeColor = useCallback( - (node: any) => { - const typedNode = node as NodeData; - return selectedNodes.some(n => n.id === typedNode.id) ? '#FFCC00' : '#888888'; + (node: NodeData) => { + return selectedNodes.some(n => n.id === node.id) ? '#FFCC00' : '#888888'; }, [selectedNodes] - ) + ); + // Handle background click to clear selection const handleBackgroundClick = useCallback(() => { - clearSelectedNodes() - }, [clearSelectedNodes]) + clearSelectedNodes(); + }, [clearSelectedNodes]); + // Node canvas object rendering function + const nodeCanvasObject = useCallback((node: NodeData, ctx: CanvasRenderingContext2D, globalScale: number) => { + const isNodeSelected = isSelected(node.id); + const isNodeCurrent = isCurrent(node.id); + const color = colors[node.type as keyof typeof colors] || "#9FAAB8"; + const nodeSize = getSize(node?.type as ItemType) || 25; + const radius = nodeSize / 10 + (isNodeSelected ? 0.5 : 0); + const fontSize = globalScale * 0.2462; + + // Draw node circle + ctx.font = `${fontSize}px Sans-Serif`; + ctx.fillStyle = color; + ctx.beginPath(); + ctx.arc(node.x!, node.y!, radius, 0, 2 * Math.PI, false); + ctx.fill(); + + // Draw node outline + ctx.beginPath(); + ctx.arc(node.x!, node.y!, radius, 0, 2 * Math.PI, false); + ctx.strokeStyle = color || "black"; + ctx.lineWidth = isNodeSelected ? 0.5 : 0.2; + ctx.stroke(); + ctx.shadowColor = 'rgba(0, 0, 0, 0.2)'; + ctx.shadowBlur = 1; + ctx.shadowOffsetX = 1; + ctx.shadowOffsetY = 2; + + ctx.fillStyle = color; + ctx.beginPath(); + ctx.arc(node.x!, node.y!, radius, 0, 2 * Math.PI, false); + ctx.fill(); + // Draw node icon + const size = nodeSize / 10; + ctx.drawImage(getIcon(node.type as ItemType), node.x! - size / 2, node.y! - size / 2, size, size); + + // Draw selection highlight + if (isNodeSelected) { + ctx.beginPath(); + ctx.arc(node.x!, node.y!, radius + 0.8, 0, 2 * Math.PI, false); + ctx.strokeStyle = '#FFCC00'; + ctx.lineWidth = 0.3; + ctx.stroke(); + + // Draw node label when selected + if (node.label) { + ctx.fillStyle = '#FFFFFF'; + ctx.font = `${fontSize}px Sans-Serif`; + ctx.textAlign = 'center'; + ctx.fillText(node.label, node.x!, node.y! + radius + 4.5); + } + } + + // Draw current node indicator + if (isNodeCurrent) { + ctx.beginPath(); + ctx.arc(node.x!, node.y!, radius + 1.5, 0, 2 * Math.PI, false); + ctx.strokeStyle = 'white'; + ctx.lineWidth = 0.2; + ctx.setLineDash([0.5, 0.5]); + ctx.stroke(); + ctx.setLineDash([]); + } + ctx.save(); // Sauvegarde l'état du contexte + }, [colors, getIcon, getSize, isSelected, isCurrent]); + + // Render loading state if (isLoading) { - return + return ; } + // Render empty state if (!nodes.length) { return (
@@ -191,16 +272,21 @@ const Graph = ({ data, isLoading, width, height }: GraphProps) => {
- ) + ); } + // Render graph return (
Selected: {selectedNodes.length}
{ + handleGraphRef(instance); + if (currentNode) handleGraphInstance(instance); + }} graphData={{ nodes, links: edges }} nodeId="id" linkSource="from" @@ -209,87 +295,23 @@ const Graph = ({ data, isLoading, width, height }: GraphProps) => { nodeAutoColorBy="label" width={width} height={height} + // @ts-ignore nodeColor={nodeColor} linkCanvasObject={linkCanvaObject} - // onRenderFramePre={(ctx, globalScale) => { - // const step = 50; - // const dotSize = 1.5 * globalScale; - // const canvas = ctx.canvas; - // const width = canvas.width; - // const height = canvas.height; - // const graphCoords = fgRef.current?.screen2GraphCoords; - - // if (!graphCoords) return; - - // const topLeft = graphCoords(0, 0); - // const bottomRight = graphCoords(width, height); - - // ctx.save(); - // ctx.clearRect(0, 0, width, height); - // ctx.fillStyle = LINE_COLOR; - // for (let x = Math.floor(topLeft.x / step) * step; x < bottomRight.x; x += step) { - // for (let y = Math.floor(topLeft.y / step) * step; y < bottomRight.y; y += step) { - // const screen = fgRef.current.graph2ScreenCoords(x, y); - // ctx.beginPath(); - // ctx.arc(screen.x, screen.y, dotSize, 0, 2 * Math.PI); - // ctx.fill(); - // } - // } - // ctx.restore(); - // }} linkWidth={link => link ? 2 : 1} + // @ts-ignore onNodeClick={onNodeClick} onBackgroundClick={handleBackgroundClick} onNodeDragEnd={(node) => { - node.fx = node.x - node.fy = node.y - node.fz = node.z - }} - nodeCanvasObject={(node: any, ctx, globalScale) => { - const isNodeSelected = isSelected(node.id) - const isNodeCurrent = isCurrent(node.id) - const color = colors[node.type as keyof typeof colors] - const nodeSize = getSize(node?.type) - const radius = nodeSize / 10 + (isNodeSelected ? 0.5 : 0) - const fontSize = globalScale * 0.2462 - ctx.font = `${fontSize}px Sans-Serif` - ctx.fillStyle = color - ctx.beginPath() - ctx.arc(node.x!, node.y!, radius, 0, 2 * Math.PI, false) - ctx.fill() - ctx.beginPath() - ctx.arc(node.x!, node.y!, radius, 0, 2 * Math.PI, false) - ctx.strokeStyle = color || "black" - ctx.lineWidth = isNodeSelected ? 0.5 : 0.2 - ctx.stroke() - const size = nodeSize / 10 - ctx.drawImage(getIcon(node.type), node.x - size / 2, node.y - size / 2, size, size); - if (isNodeSelected) { - ctx.beginPath() - ctx.arc(node.x!, node.y!, radius + 0.8, 0, 2 * Math.PI, false) - ctx.strokeStyle = '#FFCC00' - ctx.lineWidth = 0.3 - ctx.stroke() - if (node.label) { - ctx.fillStyle = '#FFFFFF' - ctx.font = `${fontSize} Sans-Serif` - ctx.textAlign = 'center' - ctx.fillText(node.label, node.x!, node.y! + radius + 4.5) - } - } - if (isNodeCurrent) { - ctx.beginPath() - ctx.arc(node.x!, node.y!, radius + 1.5, 0, 2 * Math.PI, false) - ctx.strokeStyle = 'white' - ctx.lineWidth = 0.2 - ctx.setLineDash([0.5, 0.5]) - ctx.stroke() - ctx.setLineDash([]) - } + node.fx = node.x; + node.fy = node.y; + node.fz = node.z; }} + // @ts-ignore + nodeCanvasObject={nodeCanvasObject} />
- ) -} + ); +}; -export default memo(Graph) \ No newline at end of file +export default memo(Graph); \ No newline at end of file diff --git a/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/console-panel.tsx b/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/console-panel.tsx index 3fcfd3c..38ea380 100644 --- a/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/console-panel.tsx +++ b/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/console-panel.tsx @@ -4,7 +4,7 @@ import { CopyButton } from "@/components/copy" import { Button } from "@/components/ui/button" import { useConfirm } from "@/components/use-confirm-dialog" import { supabase } from "@/lib/supabase/client" -import { TrashIcon } from "lucide-react" +import { Loader2, TrashIcon } from "lucide-react" import { useParams } from "next/navigation" import { memo, useCallback, useEffect, useRef, useState } from "react" import { toast } from "sonner" @@ -26,30 +26,41 @@ const typeToClass: Record = { export const ConsolePanel = memo(function ConsolePanel() { const [logEntries, setLogEntries] = useState([]) + const [isLoading, setIsLoading] = useState(true) const { sketch_id } = useParams() const bottomRef = useRef(null) const { confirm } = useConfirm() const refetch = useCallback(async () => { - const { data, error } = await supabase - .from("logs") - .select("*") - .eq("sketch_id", sketch_id) - .order("created_at", { ascending: true }) - .limit(50) - if (error) { - console.error("Error fetching logs:", error) - return - } - if (data) { - const formattedLogs = data.map((log: any) => ({ - id: log.id, - type: log.type || "INFO", - timestamp: new Date(log.created_at).toLocaleTimeString(), - content: log.content, - created_at: log.created_at, - })) - setLogEntries(formattedLogs) + try { + setIsLoading(true) + const { data, error } = await supabase + .from("logs") + .select("*") + .eq("sketch_id", sketch_id) + .order("created_at", { ascending: true }) + .limit(50) + + if (error) { + toast.error("Failed to load logs") + return + } + + if (data) { + const formattedLogs = data.map((log: any) => ({ + id: log.id, + type: log.type || "INFO", + timestamp: new Date(log.created_at).toLocaleTimeString(), + content: log.content, + created_at: log.created_at, + })) + setLogEntries(formattedLogs) + } + } catch (error) { + console.error("Error in refetch:", error) + toast.error("An unexpected error occurred while loading logs") + } finally { + setIsLoading(false) } }, [sketch_id]) @@ -62,6 +73,7 @@ export const ConsolePanel = memo(function ConsolePanel() { if (!confirmed) return + setIsLoading(true) const { error } = await supabase .from("logs") .delete() @@ -73,9 +85,11 @@ export const ConsolePanel = memo(function ConsolePanel() { } toast.success("Logs cleared.") - refetch() + await refetch() } catch (e) { toast.error("An error occurred deleting the logs.") + } finally { + setIsLoading(false) } }, [confirm, sketch_id, refetch]) @@ -118,11 +132,16 @@ export const ConsolePanel = memo(function ConsolePanel() { return (
-

Console

+
+

Console

+ {isLoading && ( + + )} +
-
- {logEntries.length > 0 ? ( +
+ {isLoading && logEntries.length === 0 ? ( +
+ + Loading logs... +
+ ) : logEntries.length > 0 ? ( <> {logEntries.map((entry, index) => (
@@ -146,9 +170,9 @@ export const ConsolePanel = memo(function ConsolePanel() {
) : ( -
No logs available
+
No logs available for now.
)}
) -}) +}) \ No newline at end of file diff --git a/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/left-panel.tsx b/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/left-panel.tsx index 0db39fc..3663459 100644 --- a/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/left-panel.tsx +++ b/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/left-panel.tsx @@ -74,7 +74,7 @@ export const LeftPanel = memo(function LeftPanel({ isCollapsed, setIsCollapsed } {actionItems.map((item: ActionItem) => { if (item.children && item.children.length > 0) { return ( - +
diff --git a/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/right-panel.tsx b/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/right-panel.tsx index 28b4dd9..33dd47e 100644 --- a/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/right-panel.tsx +++ b/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/right-panel.tsx @@ -2,7 +2,7 @@ import { Button } from '@/components/ui/button' import { ResizablePanel, ResizablePanelGroup, ResizableHandle } from '@/components/ui/resizable' import { ChevronDown, ChevronLeft, InfoIcon } from 'lucide-react' import NodesPanel from '@/components/sketches/sketch/nodes-panel' -import ProfilePanel from '@/components/sketches/sketch/profile-panel' +import DetailsPanel from '@/components/sketches/sketch/details-panel' import { memo, useMemo, useRef } from 'react' import { useSketchStore } from '@/store/sketch-store' import { shallow } from 'zustand/shallow' @@ -47,7 +47,7 @@ export const RightPanel = memo(function RightPanel({ isCollapsed, isLoading, set > {!isCollapsed ? ( - {selectedNodes.length > 0 && + {/* {selectedNodes.length > 0 && <>
@@ -71,11 +71,11 @@ export const RightPanel = memo(function RightPanel({ isCollapsed, isLoading, set - } + } */} {currentNode && ( <> - + diff --git a/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/settings-modal.tsx b/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/settings-modal.tsx index e14de4d..538b0c5 100644 --- a/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/settings-modal.tsx +++ b/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/settings-modal.tsx @@ -12,7 +12,10 @@ import { useModalStore } from "@/store/store-settings" export default function SettingsModal() { const { isSettingsOpen, closeSettings } = useModalStore() - const { colors, setColor, resetAll } = useNodesDisplaySettings() + const colors = useNodesDisplaySettings(s => s.colors) + const setColor = useNodesDisplaySettings(s => s.setColor) + const resetAll = useNodesDisplaySettings(s => s.resetAll) + const [localColors, setLocalColors] = useState>({ ...colors }) const handleColorChange = (itemType: ItemType, color: string) => { diff --git a/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/toolbar.tsx b/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/toolbar.tsx index a8148d9..9208c2d 100644 --- a/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/toolbar.tsx +++ b/flowsint-web/src/app/dashboard/investigations/[investigation_id]/sketches/[sketch_id]/toolbar.tsx @@ -171,10 +171,10 @@ export const Toolbar = memo(function Toolbar({ users={sketchMembers} />
- + /> */}
diff --git a/flowsint-web/src/app/dashboard/layout.tsx b/flowsint-web/src/app/dashboard/layout.tsx index b377220..a495687 100644 --- a/flowsint-web/src/app/dashboard/layout.tsx +++ b/flowsint-web/src/app/dashboard/layout.tsx @@ -4,12 +4,12 @@ import { SidebarProvider, } from "@/components/ui/sidebar" import { redirect } from "next/navigation"; -import { FingerprintIcon } from "lucide-react"; import { Button } from "@/components/ui/button"; import { MainNav } from "@/components/dashboard/main-nav"; import { NavUser } from "@/components/nav-user"; import Feedback from "@/components/dashboard/feedback"; import Link from "next/link"; +import SecondaryNav from "@/components/dashboard/secondary-nav"; const DashboardLayout = async ({ children, @@ -44,14 +44,7 @@ const DashboardLayout = async ({ {/* */} -
-
- - - -
- -
+
{children} diff --git a/flowsint-web/src/app/dashboard/transforms/[transform_id]/page.tsx b/flowsint-web/src/app/dashboard/transforms/[transform_id]/page.tsx index 96e43b4..12134c8 100644 --- a/flowsint-web/src/app/dashboard/transforms/[transform_id]/page.tsx +++ b/flowsint-web/src/app/dashboard/transforms/[transform_id]/page.tsx @@ -17,6 +17,8 @@ export default async function EditorCustom({ const { transform_id } = await (params) const supabase = await createClient() const nodesData = await fetchNodes() - const { data: transform } = await supabase.from("transforms").select("*").eq("id", transform_id as string).single() - return + const { data: transform, error } = await supabase.from("transforms").select("*").eq("id", transform_id as string).single() + if (error || !transform) + return notFound() + return } \ No newline at end of file diff --git a/flowsint-web/src/app/dashboard/transforms/page.tsx b/flowsint-web/src/app/dashboard/transforms/page.tsx index 867de79..78ebbf8 100644 --- a/flowsint-web/src/app/dashboard/transforms/page.tsx +++ b/flowsint-web/src/app/dashboard/transforms/page.tsx @@ -1,20 +1,16 @@ -import { Badge } from "@/components/ui/badge" import { Button } from "@/components/ui/button" -import { Card, CardContent, CardDescription, CardFooter, CardHeader, CardTitle } from "@/components/ui/card" +import { Card, CardContent } from "@/components/ui/card" import { createClient } from "@/lib/supabase/server" -import { hexToRgba } from "@/lib/utils" -import { useNodesDisplaySettings } from "@/store/node-display-settings" -import { PlusCircle, FileCode2, ArrowRight, Calendar } from "lucide-react" +import { PlusCircle, FileCode2 } from "lucide-react" import Link from "next/link" -import { useMemo } from "react" -import { TransformItem } from "./transform-item" +import { TransformItem } from "@/components/transforms/transform-item" const TransformsPage = async () => { const supabase = await createClient() const { data: transforms } = await supabase.from("transforms").select("*") return ( -
+

Transforms {transforms?.length ? `(${transforms.length})` : ""}

@@ -32,7 +28,7 @@ const TransformsPage = async () => { !transforms?.length ? ( ) : ( -
+
{transforms.map((transform) => ( ))} diff --git a/flowsint-web/src/app/layout.tsx b/flowsint-web/src/app/layout.tsx index 6f0720b..0d922f0 100644 --- a/flowsint-web/src/app/layout.tsx +++ b/flowsint-web/src/app/layout.tsx @@ -9,6 +9,13 @@ import NextTopLoader from 'nextjs-toploader'; import { TooltipProvider } from "@/components/ui/tooltip"; import { NuqsAdapter } from 'nuqs/adapters/next/app' import { Toaster } from "@/components/ui/sonner" +import { Inter } from 'next/font/google' + +const inter = Inter({ + subsets: ['latin'], + display: 'swap', + variable: '--font-inter', +}) export const metadata: Metadata = { title: { @@ -34,15 +41,15 @@ export default function RootLayout({ children: React.ReactNode; }) { return ( - - - {/* {process.env.NODE_ENV === "development" && ( */} + + {/* + {process.env.NODE_ENV === "development" && (