8000 #174118
Replies: 1 comment
-
The requirements file listed sqlite3 which is in the Python standard library and should not be pip installed node: networks: |
Beta Was this translation helpful? Give feedback.
-
The requirements file listed sqlite3 which is in the Python standard library and should not be pip installed node: networks: |
Beta Was this translation helpful? Give feedback.
Uh oh!
There was an error while loading. Please reload this page.
-
Select Topic Area
Question
Body
import os
import subprocess
import random
from datetime import datetime
-------------------------------
CONFIG
-------------------------------
repo_name = "Gadraid-AI-FullSystem-Final"
github_user = os.environ.get("GITHUB_USER")
github_token = os.environ.get("GITHUB_TOKEN")
num_nodes = int(os.environ.get("NUM_NODES", 50)) # default 50
node_prefix = "GADRAID-TH"
central_port = 8000
-------------------------------
UTILITIES
-------------------------------
def create_folder(path):
if not os.path.exists(path):
os.makedirs(path)
print(f"[Folder Created] {path}")
def write_file(path, content):
with open(path, "w", encoding="utf-8") as f:
f.write(content)
print(f"[File Created] {path}")
-------------------------------
CREATE FOLDERS
-------------------------------
folders = [
repo_name,
f"{repo_name}/sai_central",
f"{repo_name}/nodes",
f"{repo_name}/dashboard",
f"{repo_name}/logs",
f"{repo_name}/certs"
]
for f in folders:
create_folder(f)
-------------------------------
CENTRAL CODE (HTTPS enabled)
-------------------------------
central_code = f"""
from fastapi import FastAPI, WebSocket
from pydantic import BaseModel
import asyncio, sqlite3, json, logging
from datetime import datetime
logging.basicConfig(filename='logs/central.log', level=logging.INFO)
app = FastAPI(title="S.A.I. Central Advanced")
conn = sqlite3.connect('events.db', check_same_thread=False)
c = conn.cursor()
c.execute('CREATE TABLE IF NOT EXISTS events(timestamp TEXT, node_id TEXT, type TEXT, severity INTEGER, message TEXT)')
conn.commit()
class Event(BaseModel):
node_id: str
type: str
severity: int
message: str
events = []
clients = []
async def notify_clients(event):
for ws in clients:
try: await ws.send_json(event)
except: clients.remove(ws)
async def auto_backup():
while True:
with open('backup_events.json','w') as f:
json.dump(events,f)
await asyncio.sleep(60)
@app.on_event("startup")
async def startup():
asyncio.create_task(auto_backup())
@app.post("/api/event")
async def post_event(event: Event):
event_dict = event.dict()
event_dict["timestamp"] = datetime.utcnow().isoformat()
events.append(event_dict)
c.execute("INSERT INTO events VALUES(?,?,?,?,?)", (event_dict["timestamp"], event_dict["node_id"], event_dict["type"], event_dict["severity"], event_dict["message"]))
conn.commit()
logging.info(f"Received Event: {{event_dict}}")
await notify_clients(event_dict)
return {{"status":"ok"}}
@app.get("/api/events")
async def get_events():
return events
@app.websocket("/ws/events")
async def ws_events(ws: WebSocket):
await ws.accept()
clients.append(ws)
try:
while True:
await ws.receive_text()
except:
clients.remove(ws)
"""
write_file(f"{repo_name}/sai_central/sai_central.py", central_code)
Requirements + Dockerfile
write_file(f"{repo_name}/sai_central/requirements.txt","fastapi\nuvicorn\npydantic\naiohttp\nlogging\nsqlite3\n")
write_file(f"{repo_name}/sai_central/Dockerfile", f"""FROM python:3.12-slim
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY sai_central.py .
COPY ../certs certs
EXPOSE {central_port}
CMD ["uvicorn","sai_central:app","--host","0.0.0.0","--port","{central_port}","--ssl-keyfile","certs/key.pem","--ssl-certfile","certs/cert.pem"]
""")
-------------------------------
NODE CODE FUNCTION
-------------------------------
def generate_node_code(node_id):
return f"""
from fastapi import FastAPI
from pydantic import BaseModel
import asyncio, aiohttp, logging, random, os
app = FastAPI(title="{node_id}")
logging.basicConfig(filename='logs/{node_id}.log', level=logging.INFO)
class Event(BaseModel):
node_id: str
type: str
severity: int
message: str
SAI_URL = os.environ.get("SAI_URL", "http://sai-central:{central_port}/api/event")
NODE_ID = "{node_id}"
async def send_event():
while True:
event = Event(node_id=NODE_ID,
type=random.choice(["cyber_threat","drone_alert","ai_analysis","system_error","heartbeat"]),
severity=random.randint(1,5),
message=random.choice(["Detected anomaly","Simulated threat","Node heartbeat","System warning","Resource spike detected"]))
async with aiohttp.ClientSession() as session:
try:
await session.post(SAI_URL, json=event.dict())
logging.info(f"Sent Event: {{event.dict()}}")
except Exception as e:
logging.error(f"Send failed: {{e}}")
await asyncio.sleep(random.randint(5,15))
@app.on_event("startup")
async def startup():
asyncio.create_task(send_event())
"""
-------------------------------
CREATE NODES + DOCKERFILES
-------------------------------
node_docker_template = """FROM python:3.12-slim
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY {node_file}.py .
EXPOSE 8001
CMD ["uvicorn","{node_file}:app","--host","0.0.0.0","--port","8001"]
"""
node_files = []
for i in range(num_nodes):
node_id = f"{node_prefix}-{str(i+1).zfill(3)}"
node_file_path = f"{repo_name}/nodes/{node_id}.py"
write_file(node_file_path, generate_node_code(node_id))
node_files.append(node_id)
# Dockerfile per node
write_file(f"{repo_name}/nodes/Dockerfile_{node_id}", node_docker_template.replace("{node_file}", node_id))
write_file(f"{repo_name}/nodes/requirements.txt","fastapi\nuvicorn\naiohttp\npydantic\nlogging\n")
-------------------------------
DASHBOARD HTTPS (simple)
-------------------------------
dashboard_html = """
<title>S.A.I. Dashboard</title><script src="https://cdn.jsdelivr.net/npm/chart.js"></script>S.A.I. Central Dashboard
<script> const ul = document.getElementById("events"); const ctx = document.getElementById("severityChart").getContext('2d'); const severityData = [0,0,0,0,0]; const chart = new Chart(ctx,{type:'bar',data:{labels:['S1','S2','S3','S4','S5'],datasets:[{label:'#Events',data:severityData,backgroundColor:['green','lime','orange','red','darkred']}]}}); setInterval(async ()=>{ const res = await fetch("https://localhost:8000/api/events"); const data = await res.json(); ul.innerHTML=""; severityData.fill(0); data.forEach(e=>{ ul.innerHTML+=`
-------------------------------
DOCKER COMPOSE
-------------------------------
compose_lines = ["version: '3.9'", "services:"]
compose_lines += [
f" sai-central:",
f" build: ./sai_central",
f" container_name: sai_central",
f" ports:",
f" - '{central_port}:{central_port}'"
]
for i, node_id in enumerate(node_files):
compose_lines += [
f" {node_id}:",
f" build: ./nodes",
f" dockerfile: Dockerfile_{node_id}",
f" container_name: {node_id}",
f" environment:",
f" - SAI_URL=https://sai-central:{central_port}/api/event",
f" ports:",
f" - '8001'" # auto mapping
]
compose_lines += ["networks:", " default:", " driver: bridge"]
write_file(f"{repo_name}/docker-compose.yml","\n".join(compose_lines))
-------------------------------
README
-------------------------------
readme_md = f"""# {repo_name}
Full Auto Advanced Gadraid System
Nodes: {num_nodes}
Central Advanced with Backup & Live Dashboard
Dashboard: dashboard/dashboard.html
Run:
docker-compose build && docker-compose up -d
"""
write_file(f"{repo_name}/README.md", readme_md)
-------------------------------
GIT PUSH
-------------------------------
os.chdir(repo_name)
subprocess.run(["git","init"])
subprocess.run(["git","add","."])
subprocess.run(["git","commit","-m","Initial commit FullAuto Final with all Node Dockerfiles"])
subprocess.run(["git","branch","-M","main"])
repo_url=f"https://{github_user}:{github_token}@github.com/{github_user}/{repo_name}.git"
subprocess.run(["git","remote","add","origin",repo_url])
subprocess.run(["git","push","-u","origin","main"])
print("✅ Full Generator Complete! All Nodes with individual Dockerfiles, Docker Compose runs everything, HTTPS Dashboard enabled.")
Beta Was this translation helpful? Give feedback.
All reactions