first setup
This commit is contained in:
commit
b2a5f85e4e
7 changed files with 745 additions and 0 deletions
4
README
Normal file
4
README
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
# geolog
|
||||||
|
Tool to monitor the internet backgroung radiation.
|
||||||
|
|
||||||
|
This readme is still to be done.
|
45
access.log
Normal file
45
access.log
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
2025-06-01T08:15:23Z 192.0.2.1 GET /api/service1
|
||||||
|
2025-06-01T08:17:45Z 203.0.113.5 POST /api/service2
|
||||||
|
2025-06-01T08:18:10Z 198.51.100.42 GET /api/service3
|
||||||
|
2025-06-01T09:02:13Z 8.8.8.8 GET /api/service1
|
||||||
|
2025-06-01T09:15:55Z 1.1.1.1 POST /api/service2
|
||||||
|
2025-06-01T09:20:37Z 45.33.32.156 GET /api/service3
|
||||||
|
2025-06-02T11:05:11Z 91.198.174.192 GET /api/service1
|
||||||
|
2025-06-02T11:07:28Z 151.101.65.69 GET /api/service2
|
||||||
|
2025-06-02T11:09:50Z 104.244.42.1 GET /api/service3
|
||||||
|
2025-06-02T11:22:17Z 185.199.108.153 GET /api/service1
|
||||||
|
2025-06-03T14:01:01Z 203.0.113.25 POST /api/service1
|
||||||
|
2025-06-03T14:05:33Z 192.0.2.50 GET /api/service2
|
||||||
|
2025-06-03T14:15:10Z 203.0.113.77 GET /api/service3
|
||||||
|
2025-06-04T16:45:20Z 8.8.4.4 GET /api/service1
|
||||||
|
2025-06-04T16:50:55Z 1.0.0.1 POST /api/service2
|
||||||
|
2025-06-04T16:55:40Z 208.67.222.222 GET /api/service3
|
||||||
|
2025-06-05T10:10:10Z 208.67.220.220 GET /api/service1
|
||||||
|
2025-06-05T10:20:45Z 64.233.160.0 GET /api/service2
|
||||||
|
2025-06-05T10:30:30Z 17.172.224.47 GET /api/service3
|
||||||
|
2025-06-06T12:00:00Z 52.95.110.1 GET /api/service1
|
||||||
|
2025-06-06T12:05:05Z 13.107.246.45 GET /api/service2
|
||||||
|
2025-06-06T12:15:15Z 40.90.22.1 GET /api/service3
|
||||||
|
2025-06-07T18:30:25Z 23.45.67.89 GET /api/service1
|
||||||
|
2025-06-07T18:35:40Z 34.56.78.90 GET /api/service2
|
||||||
|
2025-06-07T18:40:55Z 56.78.90.12 GET /api/service3
|
||||||
|
2025-06-08T21:10:05Z 66.249.64.1 GET /api/service1
|
||||||
|
2025-06-08T21:15:30Z 172.217.10.78 GET /api/service2
|
||||||
|
2025-06-08T21:20:45Z 74.125.224.72 GET /api/service3
|
||||||
|
2025-06-09T06:25:12Z 157.240.20.35 GET /api/service1
|
||||||
|
2025-06-09T06:30:33Z 31.13.71.36 GET /api/service2
|
||||||
|
2025-06-09T06:40:40Z 69.63.176.13 GET /api/service3
|
||||||
|
2025-06-10T13:00:00Z 203.0.113.111 GET /api/service1
|
||||||
|
2025-06-10T13:05:20Z 192.0.2.200 GET /api/service2
|
||||||
|
2025-06-10T13:10:30Z 203.0.113.222 GET /api/service3
|
||||||
|
2025-06-11T09:45:50Z 91.121.0.1 GET /api/service1
|
||||||
|
2025-06-11T09:55:15Z 185.60.216.35 GET /api/service2
|
||||||
|
2025-06-11T10:05:30Z 216.58.214.14 GET /api/service3
|
||||||
|
2025-06-12T15:15:15Z 13.107.21.200 GET /api/service1
|
||||||
|
2025-06-12T15:20:20Z 40.112.72.205 GET /api/service2
|
||||||
|
2025-06-12T15:30:30Z 52.95.245.1 GET /api/service3
|
||||||
|
2025-06-12T15:30:30Z 52.95.245.1 GET /api/service3
|
||||||
|
2025-06-12T15:30:30Z 52.95.245.1 GET /api/service3
|
||||||
|
2025-06-12T15:30:30Z 52.95.245.1 GET /api/service3
|
||||||
|
2025-08-30T20:39:54Z 93.70.82.245 POST /api/v4/users/status/ids
|
||||||
|
2025-08-30T20:40:01Z 82.60.174.5 POST /api/v4/users/status/ids
|
152
backend.py
Normal file
152
backend.py
Normal file
|
@ -0,0 +1,152 @@
|
||||||
|
import os
|
||||||
|
import pandas as pd
|
||||||
|
import geoip2.database
|
||||||
|
from fastapi import FastAPI, Query
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
# ----------------------------
|
||||||
|
# FastAPI Setup
|
||||||
|
# ----------------------------
|
||||||
|
app = FastAPI(title="Reverse Proxy Connections Map API")
|
||||||
|
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["*"],
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# ----------------------------
|
||||||
|
# GeoIP Setup
|
||||||
|
# ----------------------------
|
||||||
|
reader = geoip2.database.Reader("GeoLite2-City.mmdb")
|
||||||
|
geo_cache = {} # cache IP lookups to save CPU
|
||||||
|
|
||||||
|
def ip_to_geo(ip):
|
||||||
|
if ip in geo_cache:
|
||||||
|
return geo_cache[ip]
|
||||||
|
try:
|
||||||
|
response = reader.city(ip)
|
||||||
|
latlon = (response.location.latitude, response.location.longitude)
|
||||||
|
except Exception:
|
||||||
|
latlon = (None, None)
|
||||||
|
geo_cache[ip] = latlon
|
||||||
|
return latlon
|
||||||
|
|
||||||
|
# ----------------------------
|
||||||
|
# Helper: Parse timestamp from line
|
||||||
|
# ----------------------------
|
||||||
|
def line_timestamp(line: str):
|
||||||
|
try:
|
||||||
|
ts_str = line.split(" ", 1)[0]
|
||||||
|
return pd.to_datetime(ts_str)
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# ----------------------------
|
||||||
|
# Binary search on lines
|
||||||
|
# ----------------------------
|
||||||
|
def find_line_index(lines, target_time, seek_start=True):
|
||||||
|
lo, hi = 0, len(lines) - 1
|
||||||
|
best_idx = None
|
||||||
|
|
||||||
|
while lo <= hi:
|
||||||
|
mid = (lo + hi) // 2
|
||||||
|
ts = line_timestamp(lines[mid])
|
||||||
|
if ts is None:
|
||||||
|
# skip malformed line: move lo forward for start, hi backward for end
|
||||||
|
if seek_start:
|
||||||
|
lo = mid + 1
|
||||||
|
else:
|
||||||
|
hi = mid - 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
if seek_start:
|
||||||
|
if ts >= target_time:
|
||||||
|
best_idx = mid
|
||||||
|
hi = mid - 1 # search earlier
|
||||||
|
else:
|
||||||
|
lo = mid + 1 # search later
|
||||||
|
else:
|
||||||
|
if ts <= target_time:
|
||||||
|
best_idx = mid
|
||||||
|
lo = mid + 1 # search later
|
||||||
|
else:
|
||||||
|
hi = mid - 1 # search earlier
|
||||||
|
|
||||||
|
# For end search, make sure we return the **last index ≤ target**
|
||||||
|
if best_idx is None:
|
||||||
|
return len(lines) - 1 if not seek_start else 0
|
||||||
|
return best_idx
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# ----------------------------
|
||||||
|
# Load logs using binary search on lines
|
||||||
|
# ----------------------------
|
||||||
|
def load_logs_binary(service: Optional[str], start: Optional[str], end: Optional[str]):
|
||||||
|
start_dt = pd.to_datetime(start) if start else None
|
||||||
|
end_dt = pd.to_datetime(end) if end else None
|
||||||
|
records = []
|
||||||
|
|
||||||
|
with open("access.log", "r", errors="ignore") as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
|
||||||
|
start_idx = find_line_index(lines, start_dt, seek_start=True) if start_dt else 0
|
||||||
|
end_idx = find_line_index(lines, end_dt, seek_start=False) if end_dt else len(lines) - 1
|
||||||
|
|
||||||
|
|
||||||
|
for line in lines[start_idx:end_idx+1]:
|
||||||
|
try:
|
||||||
|
parts = line.strip().split(" ", 3)
|
||||||
|
if len(parts) != 4:
|
||||||
|
continue
|
||||||
|
timestamp, ip, method, path = parts
|
||||||
|
ts = pd.to_datetime(timestamp)
|
||||||
|
if start_dt and ts < start_dt:
|
||||||
|
continue
|
||||||
|
if end_dt and ts > end_dt:
|
||||||
|
break
|
||||||
|
if service and service not in path:
|
||||||
|
continue
|
||||||
|
lat, lon = ip_to_geo(ip)
|
||||||
|
if lat is None or lon is None:
|
||||||
|
continue
|
||||||
|
records.append({
|
||||||
|
"timestamp": ts.isoformat(),
|
||||||
|
"ip": ip,
|
||||||
|
"path": path,
|
||||||
|
"lat": lat,
|
||||||
|
"lon": lon
|
||||||
|
})
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
return records
|
||||||
|
|
||||||
|
# ----------------------------
|
||||||
|
# API Endpoint
|
||||||
|
# ----------------------------
|
||||||
|
@app.get("/connections")
|
||||||
|
def get_connections(
|
||||||
|
service: Optional[str] = Query(None, description="Filter by service path"),
|
||||||
|
start: Optional[str] = Query(None, description="Start datetime in ISO format"),
|
||||||
|
end: Optional[str] = Query(None, description="End datetime in ISO format")
|
||||||
|
):
|
||||||
|
return load_logs_binary(service, start, end)
|
||||||
|
|
||||||
|
# ----------------------------
|
||||||
|
# Healthcheck
|
||||||
|
# ----------------------------
|
||||||
|
@app.get("/health")
|
||||||
|
def health():
|
||||||
|
size = os.path.getsize("access.log")
|
||||||
|
return {"status": "ok", "log_size_bytes": size, "cached_ips": len(geo_cache)}
|
||||||
|
|
||||||
|
# ----------------------------
|
||||||
|
# Run with Uvicorn
|
||||||
|
# ----------------------------
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import uvicorn
|
||||||
|
uvicorn.run("backend:app", host="0.0.0.0", port=8000, reload=True)
|
88
backend.py.bacca
Normal file
88
backend.py.bacca
Normal file
|
@ -0,0 +1,88 @@
|
||||||
|
# backend.py
|
||||||
|
from fastapi import FastAPI, Query
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from typing import Optional
|
||||||
|
import pandas as pd
|
||||||
|
import geoip2.database
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# ----------------------------
|
||||||
|
# 1. Load Access Logs
|
||||||
|
# ----------------------------
|
||||||
|
logs = []
|
||||||
|
with open("access.log") as f:
|
||||||
|
for line in f:
|
||||||
|
# Example log format: "2025-08-28T12:34:56Z 192.0.2.1 GET /api/service1"
|
||||||
|
parts = line.strip().split(" ", 3)
|
||||||
|
if len(parts) != 4:
|
||||||
|
continue # skip malformed lines
|
||||||
|
timestamp, ip, method, path = parts
|
||||||
|
logs.append({
|
||||||
|
"timestamp": timestamp,
|
||||||
|
"ip": ip,
|
||||||
|
"method": method,
|
||||||
|
"path": path
|
||||||
|
})
|
||||||
|
df = pd.DataFrame(logs)
|
||||||
|
df["timestamp"] = pd.to_datetime(df["timestamp"])
|
||||||
|
# ----------------------------
|
||||||
|
# 2. GeoIP Lookup
|
||||||
|
# ----------------------------
|
||||||
|
reader = geoip2.database.Reader("GeoLite2-City.mmdb")
|
||||||
|
|
||||||
|
def ip_to_geo(ip):
|
||||||
|
try:
|
||||||
|
response = reader.city(ip)
|
||||||
|
return response.location.latitude, response.location.longitude
|
||||||
|
except Exception as e:
|
||||||
|
print(f"GeoIP lookup failed for {ip}: {e}")
|
||||||
|
return None, None
|
||||||
|
df["lat"], df["lon"] = zip(*df["ip"].apply(ip_to_geo))
|
||||||
|
print(df)
|
||||||
|
df = df.dropna(subset=["lat", "lon"])
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# ----------------------------
|
||||||
|
# 3. FastAPI Setup
|
||||||
|
# ----------------------------
|
||||||
|
app = FastAPI(title="Reverse Proxy Connections Map API")
|
||||||
|
|
||||||
|
# Allow frontend to query API from any origin
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["*"],
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# ----------------------------
|
||||||
|
# 4. API Endpoint
|
||||||
|
# ----------------------------
|
||||||
|
@app.get("/connections")
|
||||||
|
def get_connections(
|
||||||
|
service: Optional[str] = Query(None, description="Filter by service path"),
|
||||||
|
start: Optional[str] = Query(None, description="Start datetime in ISO format"),
|
||||||
|
end: Optional[str] = Query(None, description="End datetime in ISO format")
|
||||||
|
):
|
||||||
|
data = df.copy()
|
||||||
|
|
||||||
|
if service:
|
||||||
|
data = data[data["path"].str.contains(service)]
|
||||||
|
|
||||||
|
if start:
|
||||||
|
data = data[data["timestamp"] >= pd.to_datetime(start)]
|
||||||
|
if end:
|
||||||
|
data = data[data["timestamp"] <= pd.to_datetime(end)]
|
||||||
|
return data[["timestamp", "path", "lat", "lon"]].to_dict(orient="records")
|
||||||
|
|
||||||
|
# ----------------------------
|
||||||
|
# 5. Healthcheck Endpoint
|
||||||
|
# ----------------------------
|
||||||
|
@app.get("/health")
|
||||||
|
def health():
|
||||||
|
return {"status": "ok", "total_connections": len(df)}
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import uvicorn
|
||||||
|
uvicorn.run("backend:app", host="0.0.0.0", port=8000, reload=True)
|
276
index.html
Normal file
276
index.html
Normal file
|
@ -0,0 +1,276 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>Connections Animation Map</title>
|
||||||
|
<link rel="stylesheet" href="https://unpkg.com/leaflet/dist/leaflet.css"/>
|
||||||
|
<style>
|
||||||
|
body { margin: 0; font-family: Arial, sans-serif; }
|
||||||
|
#controls {
|
||||||
|
position: absolute;
|
||||||
|
top: 10px;
|
||||||
|
left: 48px;
|
||||||
|
z-index: 1000;
|
||||||
|
background: rgba(255,255,255,0.95);
|
||||||
|
padding: 8px;
|
||||||
|
border-radius: 8px;
|
||||||
|
box-shadow: 0 2px 6px rgba(0,0,0,0.3);
|
||||||
|
max-width: 320px;
|
||||||
|
font-size: 13px;
|
||||||
|
pointer-events: auto;
|
||||||
|
}
|
||||||
|
#controls label { display: block; margin: 6px 0; }
|
||||||
|
#map { height: 100vh; width: 100vw; }
|
||||||
|
.fade-marker { transition: opacity 1s linear; opacity: 1; }
|
||||||
|
.fade-out { opacity: 0 !important; }
|
||||||
|
.arc-path { transition: opacity 1s linear; opacity: 1; stroke-width: 2; }
|
||||||
|
.arc-fade { opacity: 0 !important; }
|
||||||
|
#timeDisplay { font-weight: 600; margin-left: 6px; }
|
||||||
|
.small { font-size: 12px; color: #333; }
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div id="controls">
|
||||||
|
<label>Service: <input type="text" id="service" placeholder="/api/service1"></label>
|
||||||
|
<label>Start: <input type="datetime-local" id="start"></label>
|
||||||
|
<label>End: <input type="datetime-local" id="end"></label>
|
||||||
|
<label>Duration (seconds): <input type="number" id="duration" value="10" min="1"></label>
|
||||||
|
|
||||||
|
<label class="small">Time:
|
||||||
|
<input type="range" id="timeSlider" min="0" max="100" value="0" style="width: 220px;">
|
||||||
|
<span id="timeDisplay">—</span>
|
||||||
|
</label>
|
||||||
|
|
||||||
|
<div style="margin-top:6px;">
|
||||||
|
<button id="startBtn">Start Simulation</button>
|
||||||
|
<button id="stopBtn">Stop</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div id="map"></div>
|
||||||
|
|
||||||
|
<script src="https://unpkg.com/leaflet/dist/leaflet.js"></script>
|
||||||
|
<script>
|
||||||
|
const OUR_COORDS = [37.7749, -122.4194];
|
||||||
|
const MAP_CENTER = [20, 0];
|
||||||
|
const MAP_ZOOM = 2;
|
||||||
|
|
||||||
|
var map = L.map('map').setView(MAP_CENTER, MAP_ZOOM);
|
||||||
|
L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
|
||||||
|
attribution: "© OpenStreetMap contributors"
|
||||||
|
}).addTo(map);
|
||||||
|
|
||||||
|
const serverMarker = L.circleMarker(OUR_COORDS, {
|
||||||
|
radius: 7,
|
||||||
|
color: 'red',
|
||||||
|
fillColor: 'red',
|
||||||
|
fillOpacity: 1
|
||||||
|
}).addTo(map);
|
||||||
|
|
||||||
|
serverMarker.bindTooltip("SRV", {
|
||||||
|
permanent: true,
|
||||||
|
direction: "right",
|
||||||
|
offset: [8, 0],
|
||||||
|
className: "srv-label"
|
||||||
|
}).openTooltip();
|
||||||
|
|
||||||
|
var markersLayer = L.layerGroup().addTo(map);
|
||||||
|
var arcsLayer = L.layerGroup().addTo(map);
|
||||||
|
|
||||||
|
const startBtn = document.getElementById('startBtn');
|
||||||
|
const stopBtn = document.getElementById('stopBtn');
|
||||||
|
const timeSlider = document.getElementById('timeSlider');
|
||||||
|
const timeDisplay = document.getElementById('timeDisplay');
|
||||||
|
|
||||||
|
let allData = [];
|
||||||
|
let animTimers = [];
|
||||||
|
let isPlaying = false;
|
||||||
|
let simStartMs = 0, simEndMs = 0;
|
||||||
|
|
||||||
|
async function fetchConnections(service, start, end) {
|
||||||
|
let url = `http://localhost:8000/connections?`;
|
||||||
|
if (service) url += `service=${encodeURIComponent(service)}&`;
|
||||||
|
if (start) url += `start=${new Date(start).toISOString()}&`;
|
||||||
|
if (end) url += `end=${new Date(end).toISOString()}&`;
|
||||||
|
let response = await fetch(url);
|
||||||
|
return await response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
function latLonToVec(lat, lon) {
|
||||||
|
const rlat = lat * Math.PI / 180;
|
||||||
|
const rlon = lon * Math.PI / 180;
|
||||||
|
return [Math.cos(rlat) * Math.cos(rlon), Math.cos(rlat) * Math.sin(rlon), Math.sin(rlat)];
|
||||||
|
}
|
||||||
|
function vecToLatLon(v) {
|
||||||
|
const x = v[0], y = v[1], z = v[2];
|
||||||
|
const lon = Math.atan2(y, x);
|
||||||
|
const hyp = Math.sqrt(x*x + y*y);
|
||||||
|
const lat = Math.atan2(z, hyp);
|
||||||
|
return [lat * 180/Math.PI, lon * 180/Math.PI];
|
||||||
|
}
|
||||||
|
function slerp(a, b, t) {
|
||||||
|
let dot = a[0]*b[0] + a[1]*b[1] + a[2]*b[2];
|
||||||
|
dot = Math.min(1, Math.max(-1, dot));
|
||||||
|
const omega = Math.acos(dot);
|
||||||
|
if (Math.abs(omega) < 1e-6) return a;
|
||||||
|
const so = Math.sin(omega);
|
||||||
|
const c1 = Math.sin((1 - t) * omega) / so;
|
||||||
|
const c2 = Math.sin(t * omega) / so;
|
||||||
|
return [c1 * a[0] + c2 * b[0], c1 * a[1] + c2 * b[1], c1 * a[2] + c2 * b[2]];
|
||||||
|
}
|
||||||
|
function computeArcPoints(lat1, lon1, lat2, lon2, segments = 60, heightFactor = 0.2) {
|
||||||
|
const v1 = latLonToVec(lat1, lon1);
|
||||||
|
const v2 = latLonToVec(lat2, lon2);
|
||||||
|
const pts = [];
|
||||||
|
for (let i = 0; i <= segments; i++) {
|
||||||
|
const t = i / segments;
|
||||||
|
let vi = slerp(v1, v2, t);
|
||||||
|
const bulge = Math.sin(Math.PI * t) * heightFactor;
|
||||||
|
const len = Math.sqrt(vi[0]*vi[0] + vi[1]*vi[1] + vi[2]*vi[2]);
|
||||||
|
vi = [vi[0]/len * (1 + bulge), vi[1]/len * (1 + bulge), vi[2]/len * (1 + bulge)];
|
||||||
|
pts.push(vecToLatLon(vi));
|
||||||
|
}
|
||||||
|
return pts;
|
||||||
|
}
|
||||||
|
|
||||||
|
function createMarkerAndArc(conn, visibleTimeMs = 2500) {
|
||||||
|
const lat = conn.lat, lon = conn.lon;
|
||||||
|
const marker = L.circleMarker([lat, lon], { radius: 5, fillOpacity: 0.9 })
|
||||||
|
.bindPopup(`<b>IP:</b> ${conn.ip}<br><b>Service:</b> ${conn.path}<br><b>Time:</b> ${conn.timestamp}`)
|
||||||
|
.addTo(markersLayer);
|
||||||
|
|
||||||
|
const arcPoints = computeArcPoints(lat, lon, OUR_COORDS[0], OUR_COORDS[1], 48, 0.18);
|
||||||
|
const arc = L.polyline(arcPoints, { color: '#0077ff', weight: 2, opacity: 1, className: 'arc-path' })
|
||||||
|
.addTo(arcsLayer);
|
||||||
|
|
||||||
|
const markerElem = marker._path;
|
||||||
|
if (markerElem) markerElem.classList.add('fade-marker');
|
||||||
|
const arcElem = arc._path;
|
||||||
|
if (arcElem) arcElem.classList.add('arc-path');
|
||||||
|
|
||||||
|
const t1 = setTimeout(() => {
|
||||||
|
if (markerElem) markerElem.classList.add('fade-out');
|
||||||
|
if (arcElem) arcElem.classList.add('arc-fade');
|
||||||
|
setTimeout(() => {
|
||||||
|
try { markersLayer.removeLayer(marker); } catch(e){}
|
||||||
|
try { arcsLayer.removeLayer(arc); } catch(e){}
|
||||||
|
}, 1000);
|
||||||
|
}, visibleTimeMs);
|
||||||
|
|
||||||
|
return t1;
|
||||||
|
}
|
||||||
|
|
||||||
|
function clearAll() {
|
||||||
|
animTimers.forEach(t => clearTimeout(t));
|
||||||
|
animTimers = [];
|
||||||
|
markersLayer.clearLayers();
|
||||||
|
arcsLayer.clearLayers();
|
||||||
|
isPlaying = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// === NEW: 24H format display ===
|
||||||
|
function updateTimeDisplay(ms) {
|
||||||
|
if (!ms || isNaN(ms)) {
|
||||||
|
timeDisplay.textContent = '—';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const d = new Date(ms);
|
||||||
|
const pad = n => n.toString().padStart(2,'0');
|
||||||
|
timeDisplay.textContent = `${d.getFullYear()}-${pad(d.getMonth()+1)}-${pad(d.getDate())} ` +
|
||||||
|
`${pad(d.getHours())}:${pad(d.getMinutes())}:${pad(d.getSeconds())}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function drawUpTo(ms) {
|
||||||
|
markersLayer.clearLayers();
|
||||||
|
arcsLayer.clearLayers();
|
||||||
|
const subset = allData.filter(ev => new Date(ev.timestamp).getTime() <= ms);
|
||||||
|
subset.forEach(ev => {
|
||||||
|
const m = L.circleMarker([ev.lat, ev.lon], { radius: 5, fillOpacity: 0.9 })
|
||||||
|
.bindPopup(`<b>IP:</b> ${ev.ip}<br><b>Service:</b> ${ev.path}<br><b>Time:</b> ${ev.timestamp}`)
|
||||||
|
.addTo(markersLayer);
|
||||||
|
const arcP = computeArcPoints(ev.lat, ev.lon, OUR_COORDS[0], OUR_COORDS[1], 36, 0.12);
|
||||||
|
L.polyline(arcP, { color: '#0077ff', weight: 2, opacity: 0.6 })
|
||||||
|
.addTo(arcsLayer);
|
||||||
|
});
|
||||||
|
updateTimeDisplay(ms);
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatDatetimeLocal(d) {
|
||||||
|
const pad = n => n.toString().padStart(2,'0');
|
||||||
|
return `${d.getFullYear()}-${pad(d.getMonth()+1)}-${pad(d.getDate())}T${pad(d.getHours())}:${pad(d.getMinutes())}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function startSimulation() {
|
||||||
|
clearAll();
|
||||||
|
const service = document.getElementById("service").value;
|
||||||
|
let startVal = document.getElementById("start").value;
|
||||||
|
let endVal = document.getElementById("end").value;
|
||||||
|
const durationSec = parseFloat(document.getElementById("duration").value) || 10;
|
||||||
|
|
||||||
|
if (!startVal && !endVal) {
|
||||||
|
const now = new Date();
|
||||||
|
const twoWeeksAgo = new Date(now.getTime() - 1000*60*60*24*14);
|
||||||
|
startVal = formatDatetimeLocal(twoWeeksAgo);
|
||||||
|
endVal = formatDatetimeLocal(now);
|
||||||
|
document.getElementById("start").value = startVal;
|
||||||
|
document.getElementById("end").value = endVal;
|
||||||
|
} else if (!startVal || !endVal) {
|
||||||
|
alert('Please set both start & end times');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const startMs = new Date(startVal).getTime();
|
||||||
|
const endMs = new Date(endVal).getTime();
|
||||||
|
if (endMs <= startMs) { alert('End must be after start'); return; }
|
||||||
|
if ((endMs - startMs) > 1000*60*60*24*90) { alert('Max 3 months'); return; }
|
||||||
|
|
||||||
|
const data = await fetchConnections(service, startVal, endVal);
|
||||||
|
if (!data.length) { alert('No data'); return; }
|
||||||
|
|
||||||
|
allData = data.slice().sort((a,b) => new Date(a.timestamp)-new Date(b.timestamp));
|
||||||
|
simStartMs = startMs; simEndMs = endMs;
|
||||||
|
|
||||||
|
timeSlider.min = simStartMs;
|
||||||
|
timeSlider.max = simEndMs;
|
||||||
|
timeSlider.value = simStartMs;
|
||||||
|
updateTimeDisplay(simStartMs);
|
||||||
|
|
||||||
|
isPlaying = true;
|
||||||
|
const totalEvents = allData.length;
|
||||||
|
const totalMs = durationSec * 1000;
|
||||||
|
const baseInterval = totalMs / totalEvents;
|
||||||
|
|
||||||
|
allData.forEach((ev, idx) => {
|
||||||
|
const scheduledAt = Math.round(idx * baseInterval);
|
||||||
|
const visibleTime = Math.max(600, Math.min(4000, totalMs/8));
|
||||||
|
const tshow = setTimeout(() => {
|
||||||
|
const timer = createMarkerAndArc(ev, visibleTime);
|
||||||
|
animTimers.push(timer);
|
||||||
|
const simTime = simStartMs + (scheduledAt/totalMs)*(simEndMs - simStartMs);
|
||||||
|
timeSlider.value = simTime;
|
||||||
|
updateTimeDisplay(simTime);
|
||||||
|
}, scheduledAt);
|
||||||
|
animTimers.push(tshow);
|
||||||
|
});
|
||||||
|
|
||||||
|
const finishTimer = setTimeout(() => { isPlaying = false; }, totalMs+2000);
|
||||||
|
animTimers.push(finishTimer);
|
||||||
|
}
|
||||||
|
|
||||||
|
function stopSimulation() {
|
||||||
|
clearAll();
|
||||||
|
if (simStartMs) {
|
||||||
|
timeSlider.value = simStartMs;
|
||||||
|
updateTimeDisplay(simStartMs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
timeSlider.addEventListener('input', e => {
|
||||||
|
if (isPlaying) clearAll();
|
||||||
|
drawUpTo(parseInt(e.target.value,10));
|
||||||
|
});
|
||||||
|
|
||||||
|
startBtn.addEventListener('click', startSimulation);
|
||||||
|
stopBtn.addEventListener('click', stopSimulation);
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
114
index.html.bacca
Normal file
114
index.html.bacca
Normal file
|
@ -0,0 +1,114 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>Connections Animation Map</title>
|
||||||
|
<link rel="stylesheet" href="https://unpkg.com/leaflet/dist/leaflet.css"/>
|
||||||
|
<style>
|
||||||
|
body { margin: 0; font-family: Arial, sans-serif; }
|
||||||
|
#controls {
|
||||||
|
position: absolute; top: 10px; left: 10px; z-index: 1000;
|
||||||
|
background: white; padding: 10px; border-radius: 8px;
|
||||||
|
box-shadow: 0 2px 6px rgba(0,0,0,0.3);
|
||||||
|
}
|
||||||
|
#map { height: 100vh; width: 100vw; }
|
||||||
|
label { font-size: 14px; display: block; margin-top: 5px; }
|
||||||
|
.fade-marker {
|
||||||
|
transition: opacity 1s linear;
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
.fade-out {
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div id="controls">
|
||||||
|
<label>Service: <input type="text" id="service" placeholder="/api/service1"></label>
|
||||||
|
<label>Start: <input type="datetime-local" id="start"></label>
|
||||||
|
<label>End: <input type="datetime-local" id="end"></label>
|
||||||
|
<label>Duration (seconds): <input type="number" id="duration" value="10"></label>
|
||||||
|
<button onclick="startSimulation()">Start Simulation</button>
|
||||||
|
</div>
|
||||||
|
<div id="map"></div>
|
||||||
|
|
||||||
|
<script src="https://unpkg.com/leaflet/dist/leaflet.js"></script>
|
||||||
|
<script>
|
||||||
|
var map = L.map('map').setView([20, 0], 2);
|
||||||
|
L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
|
||||||
|
attribution: "© OpenStreetMap contributors"
|
||||||
|
}).addTo(map);
|
||||||
|
|
||||||
|
var markersLayer = L.layerGroup().addTo(map);
|
||||||
|
|
||||||
|
async function fetchConnections(service, start, end) {
|
||||||
|
let url = `http://localhost:8000/connections?`;
|
||||||
|
if (service) url += `service=${encodeURIComponent(service)}&`;
|
||||||
|
if (start) url += `start=${new Date(start).toISOString()}&`;
|
||||||
|
if (end) url += `end=${new Date(end).toISOString()}&`;
|
||||||
|
|
||||||
|
let response = await fetch(url);
|
||||||
|
return await response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
function createFadingMarker(lat, lon, popupContent) {
|
||||||
|
var marker = L.circleMarker([lat, lon], {
|
||||||
|
radius: 6,
|
||||||
|
color: 'red',
|
||||||
|
fillOpacity: 0.8
|
||||||
|
}).bindPopup(popupContent);
|
||||||
|
|
||||||
|
marker.addTo(markersLayer);
|
||||||
|
|
||||||
|
// apply fade effect after a delay
|
||||||
|
setTimeout(() => {
|
||||||
|
let elem = marker._path; // SVG circle element
|
||||||
|
if (elem) {
|
||||||
|
elem.classList.add("fade-marker");
|
||||||
|
elem.classList.add("fade-out");
|
||||||
|
}
|
||||||
|
// remove after fade
|
||||||
|
setTimeout(() => markersLayer.removeLayer(marker), 1000);
|
||||||
|
}, 2000); // marker stays for 2s before fading
|
||||||
|
}
|
||||||
|
|
||||||
|
async function startSimulation() {
|
||||||
|
markersLayer.clearLayers();
|
||||||
|
|
||||||
|
let service = document.getElementById("service").value;
|
||||||
|
let start = document.getElementById("start").value;
|
||||||
|
let end = document.getElementById("end").value;
|
||||||
|
let duration = parseInt(document.getElementById("duration").value) || 10;
|
||||||
|
|
||||||
|
let data = await fetchConnections(service, start, end);
|
||||||
|
|
||||||
|
if (!data.length) {
|
||||||
|
alert("No data found in this range.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by timestamp
|
||||||
|
data.sort((a, b) => new Date(a.timestamp) - new Date(b.timestamp));
|
||||||
|
|
||||||
|
let totalTime = (new Date(end) - new Date(start)) / 1000; // total seconds
|
||||||
|
if (totalTime > 60 * 60 * 24 * 90) {
|
||||||
|
alert("Range too long (max 3 months).");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let stepInterval = duration * 1000 / data.length; // ms between markers
|
||||||
|
|
||||||
|
data.forEach((conn, i) => {
|
||||||
|
setTimeout(() => {
|
||||||
|
createFadingMarker(
|
||||||
|
conn.lat, conn.lon,
|
||||||
|
`<b>IP:</b> ${conn.ip}<br>` +
|
||||||
|
`<b>Service:</b> ${conn.path}<br>` +
|
||||||
|
`<b>Time:</b> ${conn.timestamp}`
|
||||||
|
);
|
||||||
|
}, i * stepInterval);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
66
tail_service.py
Normal file
66
tail_service.py
Normal file
|
@ -0,0 +1,66 @@
|
||||||
|
mport re
|
||||||
|
import ipaddress
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
import time
|
||||||
|
|
||||||
|
ACCESS_LOG = "/var/log/nginx/access.log"
|
||||||
|
OUTPUT_LOG = "./file.log"
|
||||||
|
|
||||||
|
INTERNAL_NETWORKS = [
|
||||||
|
ipaddress.ip_network("10.0.0.0/8"),
|
||||||
|
ipaddress.ip_network("192.168.0.0/16"),
|
||||||
|
ipaddress.ip_network("172.16.0.0/12"),
|
||||||
|
]
|
||||||
|
|
||||||
|
log_line_re = re.compile(
|
||||||
|
r'(?P<ip>\S+) - - \[(?P<time>[^\]]+)\] "(?P<method>\S+) (?P<path>\S+) \S+"'
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_external(ip):
|
||||||
|
ip_addr = ipaddress.ip_address(ip)
|
||||||
|
return not any(ip_addr in net for net in INTERNAL_NETWORKS)
|
||||||
|
|
||||||
|
def parse_nginx_line(line):
|
||||||
|
match = log_line_re.match(line)
|
||||||
|
if not match:
|
||||||
|
return None
|
||||||
|
data = match.groupdict()
|
||||||
|
if not is_external(data["ip"]):
|
||||||
|
return None
|
||||||
|
dt = datetime.strptime(data["time"], "%d/%b/%Y:%H:%M:%S %z")
|
||||||
|
dt_utc = dt.astimezone(timezone.utc)
|
||||||
|
iso_time = dt_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
return f'{iso_time} {data["ip"]} {data["method"]} {data["path"]}'
|
||||||
|
|
||||||
|
def tail(f):
|
||||||
|
f.seek(0, 2) # Go to the end of the file
|
||||||
|
while True:
|
||||||
|
line = f.readline()
|
||||||
|
if not line:
|
||||||
|
time.sleep(0.01) # Sleep very briefly
|
||||||
|
continue
|
||||||
|
yield line
|
||||||
|
|
||||||
|
def main():
|
||||||
|
buffer = []
|
||||||
|
buffer_size = 10 # adjust for your throughput
|
||||||
|
flush_interval = 0.5 # seconds
|
||||||
|
|
||||||
|
with open(ACCESS_LOG, "r") as f:
|
||||||
|
tail_lines = tail(f)
|
||||||
|
last_flush = time.time()
|
||||||
|
for line in tail_lines:
|
||||||
|
parsed = parse_nginx_line(line)
|
||||||
|
if parsed:
|
||||||
|
buffer.append(parsed)
|
||||||
|
# Flush buffer if size reached or interval passed
|
||||||
|
if len(buffer) >= buffer_size or (time.time() - last_flush) > flush_interval:
|
||||||
|
if buffer:
|
||||||
|
with open(OUTPUT_LOG, "a") as out:
|
||||||
|
out.write("\n".join(buffer) + "\n")
|
||||||
|
buffer.clear()
|
||||||
|
last_flush = time.time()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue