aboutsummaryrefslogtreecommitdiffstats
path: root/api/app
diff options
context:
space:
mode:
Diffstat (limited to 'api/app')
-rw-r--r--api/app/__init__.py0
-rw-r--r--api/app/alembic/README1
-rw-r--r--api/app/alembic/env.py67
-rw-r--r--api/app/alembic/script.py.mako25
-rw-r--r--api/app/alembic/versions/.keep0
-rw-r--r--api/app/alembic/versions/106dece647f0_add_oplog.py36
-rw-r--r--api/app/api/__init__.py0
-rw-r--r--api/app/api/deps.py18
-rw-r--r--api/app/api/main.py11
-rw-r--r--api/app/api/routes/__init__.py0
-rw-r--r--api/app/api/routes/devices.py19
-rw-r--r--api/app/api/routes/oplog.py32
-rw-r--r--api/app/api/routes/prometheus.py63
-rw-r--r--api/app/api/routes/public.py110
-rw-r--r--api/app/api/routes/read.py99
-rw-r--r--api/app/api/routes/utils.py7
-rw-r--r--api/app/core/__init__.py0
-rw-r--r--api/app/core/cache.py13
-rw-r--r--api/app/core/config.py65
-rw-r--r--api/app/core/db.py5
-rw-r--r--api/app/core/netbox.py13
-rw-r--r--api/app/core/prometheus.py0
-rw-r--r--api/app/main.py35
-rw-r--r--api/app/models/__init__.py0
-rw-r--r--api/app/models/config.py14
-rw-r--r--api/app/models/device.py47
-rw-r--r--api/app/models/network.py19
-rw-r--r--api/app/models/oplog.py21
-rw-r--r--api/app/models/ping.py14
-rw-r--r--api/app/models/snmp.py12
30 files changed, 746 insertions, 0 deletions
diff --git a/api/app/__init__.py b/api/app/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/api/app/__init__.py
diff --git a/api/app/alembic/README b/api/app/alembic/README
new file mode 100644
index 0000000..98e4f9c
--- /dev/null
+++ b/api/app/alembic/README
@@ -0,0 +1 @@
+Generic single-database configuration. \ No newline at end of file
diff --git a/api/app/alembic/env.py b/api/app/alembic/env.py
new file mode 100644
index 0000000..224c4ff
--- /dev/null
+++ b/api/app/alembic/env.py
@@ -0,0 +1,67 @@
+from logging.config import fileConfig
+
+from alembic import context
+from sqlalchemy import engine_from_config, pool
+
+config = context.config
+fileConfig(config.config_file_name)
+
+from app.models.oplog import SQLModel # noqa
+from app.core.config import settings # noqa
+
+target_metadata = SQLModel.metadata
+
+
+def get_url():
+ return str(settings.SQLALCHEMY_DATABASE_URI)
+
+
+def run_migrations_offline():
+ """Run migrations in 'offline' mode.
+
+ This configures the context with just a URL
+ and not an Engine, though an Engine is acceptable
+ here as well. By skipping the Engine creation
+ we don't even need a DBAPI to be available.
+
+ Calls to context.execute() here emit the given string to the
+ script output.
+
+ """
+ url = get_url()
+ context.configure(
+ url=url, target_metadata=target_metadata, literal_binds=True, compare_type=True
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+def run_migrations_online():
+ """Run migrations in 'online' mode.
+
+ In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+ configuration = config.get_section(config.config_ini_section)
+ configuration["sqlalchemy.url"] = get_url()
+ connectable = engine_from_config(
+ configuration,
+ prefix="sqlalchemy.",
+ poolclass=pool.NullPool,
+ )
+
+ with connectable.connect() as connection:
+ context.configure(
+ connection=connection, target_metadata=target_metadata, compare_type=True
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ run_migrations_online()
diff --git a/api/app/alembic/script.py.mako b/api/app/alembic/script.py.mako
new file mode 100644
index 0000000..ff0b528
--- /dev/null
+++ b/api/app/alembic/script.py.mako
@@ -0,0 +1,25 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlmodel.sql.sqltypes
+${imports if imports else ""}
+
+# revision identifiers, used by Alembic.
+revision = ${repr(up_revision)}
+down_revision = ${repr(down_revision)}
+branch_labels = ${repr(branch_labels)}
+depends_on = ${repr(depends_on)}
+
+
+def upgrade():
+ ${upgrades if upgrades else "pass"}
+
+
+def downgrade():
+ ${downgrades if downgrades else "pass"} \ No newline at end of file
diff --git a/api/app/alembic/versions/.keep b/api/app/alembic/versions/.keep
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/api/app/alembic/versions/.keep
diff --git a/api/app/alembic/versions/106dece647f0_add_oplog.py b/api/app/alembic/versions/106dece647f0_add_oplog.py
new file mode 100644
index 0000000..ad397ae
--- /dev/null
+++ b/api/app/alembic/versions/106dece647f0_add_oplog.py
@@ -0,0 +1,36 @@
+"""Add oplog
+
+Revision ID: 106dece647f0
+Revises:
+Create Date: 2025-04-11 17:16:18.611889
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlmodel.sql.sqltypes
+
+
+# revision identifiers, used by Alembic.
+revision = '106dece647f0'
+down_revision = None
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table('oplog',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('time', sa.Integer(), nullable=False),
+ sa.Column('username', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
+ sa.Column('systems', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
+ sa.Column('message', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
+ sa.PrimaryKeyConstraint('id')
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_table('oplog')
+ # ### end Alembic commands ### \ No newline at end of file
diff --git a/api/app/api/__init__.py b/api/app/api/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/api/app/api/__init__.py
diff --git a/api/app/api/deps.py b/api/app/api/deps.py
new file mode 100644
index 0000000..473f9b4
--- /dev/null
+++ b/api/app/api/deps.py
@@ -0,0 +1,18 @@
+from collections.abc import Generator
+from sqlmodel import Session
+import redis
+
+from app.core.db import engine
+from app.core.cache import pool
+from app.core.netbox import nb
+
+
+def get_db() -> Generator[Session, None, None]:
+ with Session(engine) as session:
+ yield session
+
+def get_cache():
+ return redis.Redis(connection_pool=pool)
+
+def get_netbox():
+ return nb
diff --git a/api/app/api/main.py b/api/app/api/main.py
new file mode 100644
index 0000000..2c2571c
--- /dev/null
+++ b/api/app/api/main.py
@@ -0,0 +1,11 @@
+from fastapi import APIRouter
+
+from app.api.routes import utils, public, prometheus, oplog, read, devices
+
+api_router = APIRouter()
+api_router.include_router(utils.router)
+api_router.include_router(public.router)
+api_router.include_router(prometheus.router)
+api_router.include_router(oplog.router)
+api_router.include_router(read.router)
+api_router.include_router(devices.router)
diff --git a/api/app/api/routes/__init__.py b/api/app/api/routes/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/api/app/api/routes/__init__.py
diff --git a/api/app/api/routes/devices.py b/api/app/api/routes/devices.py
new file mode 100644
index 0000000..d50b771
--- /dev/null
+++ b/api/app/api/routes/devices.py
@@ -0,0 +1,19 @@
+from fastapi import APIRouter, HTTPException, Depends
+
+from app.models.device import Placement
+from app.api.deps import get_netbox
+
+router = APIRouter(prefix="/v2/devices", tags=["devices"])
+
+# set device placement
+@router.post("/{device_name}/placement")
+async def set_device_placement(
+ device_name, placement: Placement, nb=Depends(get_netbox)
+) -> Placement:
+ device = nb.dcim.devices.get(name=device_name)
+ if device is None:
+ raise HTTPException(status_code=404, detail="Device not found")
+ device["custom_fields"]["gondul_placement"] = placement.dict()
+ nb.dcim.devices.update([device])
+
+ return device["custom_fields"]["gondul_placement"]
diff --git a/api/app/api/routes/oplog.py b/api/app/api/routes/oplog.py
new file mode 100644
index 0000000..de1bee6
--- /dev/null
+++ b/api/app/api/routes/oplog.py
@@ -0,0 +1,32 @@
+import hashlib
+import time
+
+from fastapi import APIRouter, Request, Response, HTTPException, Depends
+from sqlmodel import select
+
+from app.api.deps import get_db
+from app.models.oplog import Oplog, OplogBase, OplogCreate
+from app.api.deps import get_db
+
+router = APIRouter(prefix="/v2/oplog", tags=["oplog"])
+
+# oplog
+@router.get("/")
+async def list_oplog(request: Request, response: Response, db=Depends(get_db)) -> Oplog:
+ last = db.exec(select(OplogBase).order_by(OplogBase.time.desc())).first()
+ updated = str(last.time) if last is not None else None
+ etag = None
+ if updated is not None:
+ etag = hashlib.md5(updated.encode("utf-8")).hexdigest()
+ response.headers["etag"] = etag
+ if etag is not None and request.headers.get("If-None-Match") == etag:
+ raise HTTPException(status_code=304)
+
+ oplogs = db.exec(select(OplogBase)).all()
+ return {"oplog": oplogs, "time": updated, "hash": etag}
+
+@router.post("/")
+async def create_oplog(oplog: OplogCreate, db=Depends(get_db)):
+ db.add(OplogBase.model_validate(oplog, update={"time": round(time.time())}))
+ db.commit()
+ return oplog
diff --git a/api/app/api/routes/prometheus.py b/api/app/api/routes/prometheus.py
new file mode 100644
index 0000000..02be2c9
--- /dev/null
+++ b/api/app/api/routes/prometheus.py
@@ -0,0 +1,63 @@
+from fastapi import APIRouter, Depends
+import json
+
+from app.api.deps import get_cache
+
+router = APIRouter(prefix="/v2/prometheus", tags=["prometheus"])
+
+@router.get("/ping")
+async def devices(cache=Depends(get_cache)):
+ output = []
+
+ devices = (
+ json.loads(cache.get("devices:data")) if cache.exists("devices:data") else {}
+ )
+ for sysname in devices:
+ device = devices[sysname]
+ if device["mgmt_v4_addr"]:
+ output.append(
+ {
+ "targets": [device["mgmt_v4_addr"]],
+ "labels": {"sysname": device["sysname"], "type": "v4"},
+ }
+ )
+ if device["mgmt_v6_addr"]:
+ output.append(
+ {
+ "targets": [device["mgmt_v6_addr"]],
+ "labels": {"sysname": device["sysname"], "type": "v6"},
+ }
+ )
+ return output
+
+
+@router.get("/snmp")
+async def devices(cache=Depends(get_cache)):
+ output = []
+
+ devices = (
+ json.loads(cache.get("devices:data")) if cache.exists("devices:data") else {}
+ )
+ for sysname in devices:
+ device = devices[sysname]
+ if device["mgmt_v6_addr"]:
+ output.append(
+ {
+ "targets": [device["mgmt_v6_addr"]],
+ "labels": {
+ "sysname": device["sysname"],
+ "platform": device["platform"],
+ },
+ }
+ )
+ elif device["mgmt_v4_addr"]:
+ output.append(
+ {
+ "targets": [device["mgmt_v4_addr"]],
+ "labels": {
+ "sysname": device["sysname"],
+ "platform": device["platform"],
+ },
+ }
+ )
+ return output
diff --git a/api/app/api/routes/public.py b/api/app/api/routes/public.py
new file mode 100644
index 0000000..1f68760
--- /dev/null
+++ b/api/app/api/routes/public.py
@@ -0,0 +1,110 @@
+from fastapi import APIRouter, Request, Response, HTTPException, Depends
+import json
+import time
+import hashlib
+
+from app.core.config import settings
+from app.models.config import Config
+from app.models.device import PublicDevice, PublicDevices
+from app.models.ping import Ping
+from app.api.deps import get_cache
+
+router = APIRouter(prefix="/public", tags=["public"])
+
+# config
+@router.get("/config")
+async def config(request: Request, response: Response) -> Config:
+ # TODO Read from settings
+ config = {
+ "sitename": f"{settings.PROJECT_NAME} - {settings.ENVIRONMENT}",
+ "publicvhost": "example.gondul.tg.no",
+ "public": False,
+ "shortname": "tgX",
+ }
+
+ etag = hashlib.md5(json.dumps(config, sort_keys=True).encode("utf-8")).hexdigest()
+ response.headers["etag"] = etag
+ if request.headers.get("If-None-Match") == etag:
+ raise HTTPException(status_code=304)
+ return {"config": config, "time": round(time.time()), "hash": etag}
+
+# switches
+@router.get("/switches")
+async def devices(
+ request: Request, response: Response, cache=Depends(get_cache)
+) -> PublicDevices:
+ updated = cache.get("devices:updated")
+ etag = None
+ if updated is not None:
+ etag = hashlib.md5(updated.encode("utf-8")).hexdigest()
+ response.headers["etag"] = etag
+ if etag is not None and request.headers.get("If-None-Match") == etag:
+ raise HTTPException(status_code=304)
+
+ devices = (
+ json.loads(cache.get("devices:data")) if cache.exists("devices:data") else {}
+ )
+ return {
+ "switches": {
+ device: {
+ key: devices[device][key]
+ for key in list(PublicDevice.__fields__.keys())
+ }
+ for device in devices
+ },
+ "time": cache.get("devices:updated"),
+ "hash": etag,
+ }
+
+# ping
+@router.get("/ping")
+async def ping(
+ request: Request, response: Response, cache=Depends(get_cache)
+) -> Ping:
+ updated = cache.get("ping:updated")
+ etag = None
+ if updated is not None:
+ etag = hashlib.md5(updated.encode("utf-8")).hexdigest()
+ response.headers["etag"] = etag
+ if etag is not None and request.headers.get("If-None-Match") == etag:
+ raise HTTPException(status_code=304)
+
+ output = {}
+ ping = json.loads(cache.get("ping:data")) if cache.exists("ping:data") else {}
+ for device in ping:
+ latency4 = (
+ round(ping[device]["v4_rtt"] * 1000, 2)
+ if "v4_rtt" in ping[device] and ping[device]["v4_rtt"] is not None
+ else None
+ )
+ latency6 = (
+ round(ping[device]["v6_rtt"] * 1000, 2)
+ if "v6_rtt" in ping[device] and ping[device]["v6_rtt"] is not None
+ else None
+ )
+ age4 = (
+ (time.time() - ping[device]["v4_time"])
+ if "v4_time" in ping[device]
+ else None
+ )
+ age6 = (
+ (time.time() - ping[device]["v6_time"])
+ if "v6_time" in ping[device]
+ else None
+ )
+ output.update(
+ {
+ device: {
+ "latency4": latency4,
+ "latency6": latency6,
+ "age4": age4,
+ "age6": age6,
+ }
+ }
+ )
+ return {"switches": output, "time": cache.get("ping:updated"), "hash": etag}
+
+# Not implemented
+@router.get("/switch-state")
+async def switch_state():
+ return {}
diff --git a/api/app/api/routes/read.py b/api/app/api/routes/read.py
new file mode 100644
index 0000000..d810ff1
--- /dev/null
+++ b/api/app/api/routes/read.py
@@ -0,0 +1,99 @@
+from fastapi import APIRouter, Request, Response, HTTPException, Depends
+import json
+import time
+import hashlib
+
+
+from app.models.device import DeviceManagement, DevicesManagement
+from app.models.network import Network, Networks
+from app.models.snmp import Snmp
+from app.api.deps import get_cache
+
+router = APIRouter(prefix="/read", tags=["read"])
+
+# switches-management
+@router.get("/switches-management")
+async def devices(
+ request: Request, response: Response, cache=Depends(get_cache)
+) -> DevicesManagement:
+ updated = cache.get("devices:updated")
+ etag = None
+ if updated is not None:
+ etag = hashlib.md5(updated.encode("utf-8")).hexdigest()
+ response.headers["etag"] = etag
+ if request.headers.get("If-None-Match") == etag:
+ raise HTTPException(status_code=304)
+
+ devices = (
+ json.loads(cache.get("devices:data")) if cache.exists("devices:data") else {}
+ )
+ return {
+ "switches": {
+ device: {
+ key: devices[device][key]
+ for key in list(DeviceManagement.__fields__.keys())
+ }
+ for device in devices
+ },
+ "time": cache.get("devices:updated"),
+ "hash": etag,
+ }
+
+
+# networks
+@router.get("/networks")
+async def networks(cache=Depends(get_cache)) -> Networks:
+ devices = (
+ json.loads(cache.get("networks:data")) if cache.exists("networks:data") else {}
+ )
+ return {
+ "networks": {
+ device: {
+ key: devices[device][key] for key in list(Network.__fields__.keys())
+ }
+ for device in devices
+ },
+ "time": cache.get("networks:updated"),
+ "hash": "",
+ }
+
+
+# snmp
+@router.get("/snmp")
+async def snmp(request: Request, response: Response, cache=Depends(get_cache)) -> Snmp:
+ updated = cache.get("snmp:updated")
+ etag = None
+ if updated is not None:
+ etag = hashlib.md5(updated.encode("utf-8")).hexdigest()
+ response.headers["etag"] = etag
+ if request.headers.get("If-None-Match") == etag:
+ raise HTTPException(status_code=304)
+
+ output = {}
+ snmp_data = (
+ json.loads(cache.get("snmp:data:data"))
+ if cache.exists("snmp:data:data")
+ else {}
+ )
+ ports_data = (
+ json.loads(cache.get("snmp:ports:data"))
+ if cache.exists("snmp:ports:data")
+ else {}
+ )
+ for device in snmp_data:
+ output.update(
+ {
+ device: {
+ "ports": ports_data[device]["ports"] if device in ports_data else {},
+ "misc": {
+ "sysName": {"0": snmp_data[device]["sysName"]},
+ "sysUpTimeInstance": {"": snmp_data[device]["sysUpTime"]},
+ "sysDescr": {"0": snmp_data[device]["sysDescr"]},
+ "entPhysicalSerialNum": {
+ "1": snmp_data[device]["entPhysicalSerialNum"]
+ },
+ }
+ }
+ }
+ )
+ return {"snmp": output, "time": cache.get("snmp:updated"), "hash": etag}
diff --git a/api/app/api/routes/utils.py b/api/app/api/routes/utils.py
new file mode 100644
index 0000000..af3c871
--- /dev/null
+++ b/api/app/api/routes/utils.py
@@ -0,0 +1,7 @@
+from fastapi import APIRouter
+
+router = APIRouter(prefix="/utils", tags=["utils"])
+
+@router.get("/health-check/")
+async def health_check() -> bool:
+ return True
diff --git a/api/app/core/__init__.py b/api/app/core/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/api/app/core/__init__.py
diff --git a/api/app/core/cache.py b/api/app/core/cache.py
new file mode 100644
index 0000000..09d03c8
--- /dev/null
+++ b/api/app/core/cache.py
@@ -0,0 +1,13 @@
+from app.core.config import settings
+import redis
+
+def create_redis():
+ return redis.ConnectionPool(
+ host=settings.REDIS_SERVER,
+ port=settings.REDIS_PORT,
+ db=settings.REDIS_DB,
+ decode_responses=True,
+ )
+
+
+pool = create_redis() \ No newline at end of file
diff --git a/api/app/core/config.py b/api/app/core/config.py
new file mode 100644
index 0000000..d9174d8
--- /dev/null
+++ b/api/app/core/config.py
@@ -0,0 +1,65 @@
+from typing import Annotated, Any, Literal
+
+from pydantic import (
+ AnyUrl,
+ BeforeValidator,
+ PostgresDsn,
+ computed_field
+)
+from pydantic_core import MultiHostUrl
+from pydantic_settings import BaseSettings, SettingsConfigDict
+
+
+def parse_cors(v: Any) -> list[str] | str:
+ if isinstance(v, str) and not v.startswith("["):
+ return [i.strip() for i in v.split(",")]
+ elif isinstance(v, list | str):
+ return v
+ raise ValueError(v)
+
+
+class Settings(BaseSettings):
+ model_config = SettingsConfigDict(
+ env_file=".env",
+ env_ignore_empty=True,
+ extra="ignore",
+ )
+ ENVIRONMENT: Literal["local", "production"] = "local"
+
+ BACKEND_CORS_ORIGINS: Annotated[
+ list[AnyUrl] | str, BeforeValidator(parse_cors)
+ ] = []
+
+ @computed_field # type: ignore[prop-decorator]
+ @property
+ def all_cors_origins(self) -> list[str]:
+ return [str(origin).rstrip("/") for origin in self.BACKEND_CORS_ORIGINS]
+
+ PROJECT_NAME: str = "Gondul"
+ POSTGRES_SERVER: str = "localhost"
+ POSTGRES_PORT: int = 5432
+ POSTGRES_USERNAME: str = "postgres"
+ POSTGRES_PASSWORD: str
+ POSTGRES_DATABASE: str = "postgres"
+
+ REDIS_SERVER: str = "localhost"
+ REDIS_PORT: int = 6379
+ REDIS_DB: int = 0
+
+ NETBOX_URL: str = "http://localhost"
+ NETBOX_TOKEN: str = ""
+
+ @computed_field # type: ignore[prop-decorator]
+ @property
+ def SQLALCHEMY_DATABASE_URI(self) -> PostgresDsn:
+ return MultiHostUrl.build(
+ scheme="postgresql+psycopg",
+ username=self.POSTGRES_USERNAME,
+ password=self.POSTGRES_PASSWORD,
+ host=self.POSTGRES_SERVER,
+ port=self.POSTGRES_PORT,
+ path=self.POSTGRES_DATABASE,
+ )
+
+
+settings = Settings() # type: ignore
diff --git a/api/app/core/db.py b/api/app/core/db.py
new file mode 100644
index 0000000..b67ca87
--- /dev/null
+++ b/api/app/core/db.py
@@ -0,0 +1,5 @@
+from sqlmodel import create_engine
+from app.core.config import settings
+
+
+engine = create_engine(str(settings.SQLALCHEMY_DATABASE_URI)) \ No newline at end of file
diff --git a/api/app/core/netbox.py b/api/app/core/netbox.py
new file mode 100644
index 0000000..78a77c2
--- /dev/null
+++ b/api/app/core/netbox.py
@@ -0,0 +1,13 @@
+import os
+import pynetbox
+
+from app.core.config import settings
+
+def setup_netbox():
+ return pynetbox.api(
+ settings.NETBOX_URL,
+ token=settings.NETBOX_TOKEN,
+ threading=True,
+ )
+
+nb = setup_netbox() \ No newline at end of file
diff --git a/api/app/core/prometheus.py b/api/app/core/prometheus.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/api/app/core/prometheus.py
diff --git a/api/app/main.py b/api/app/main.py
new file mode 100644
index 0000000..e7bf524
--- /dev/null
+++ b/api/app/main.py
@@ -0,0 +1,35 @@
+import time
+
+from fastapi import FastAPI, Request
+from starlette.middleware.cors import CORSMiddleware
+from typing import Callable
+
+
+from app.api.main import api_router
+from app.core.config import settings
+
+async def add_process_time_header(request: Request, call_next: Callable):
+ start_time = time.time()
+ response = await call_next(request)
+ process_time = (time.time() - start_time) * 1000
+ response.headers["Server-Timing"] = f"Total;dur={process_time: .6f}"
+ return response
+
+app = FastAPI(
+ title=settings.PROJECT_NAME,
+ openapi_url="/api/openapi.json"
+)
+app.middleware("http")(add_process_time_header)
+
+# Set all CORS enabled origins
+if settings.all_cors_origins:
+ app.add_middleware(
+ CORSMiddleware,
+ allow_origins=settings.all_cors_origins,
+ allow_credentials=True,
+ allow_methods=["*"],
+ allow_headers=["*"],
+ expose_headers=["etag"],
+ )
+
+app.include_router(api_router, prefix="/api") \ No newline at end of file
diff --git a/api/app/models/__init__.py b/api/app/models/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/api/app/models/__init__.py
diff --git a/api/app/models/config.py b/api/app/models/config.py
new file mode 100644
index 0000000..e54599c
--- /dev/null
+++ b/api/app/models/config.py
@@ -0,0 +1,14 @@
+from pydantic import BaseModel
+
+
+class ConfigData(BaseModel):
+ sitename: str | None = None
+ publicvhost: str | None = "public-gondul.tg.no"
+ public: bool = False
+ shortname: str | None = "tg25"
+
+
+class Config(BaseModel):
+ config: ConfigData
+ time: int | None = None
+ hash: str | None = None
diff --git a/api/app/models/device.py b/api/app/models/device.py
new file mode 100644
index 0000000..95ed525
--- /dev/null
+++ b/api/app/models/device.py
@@ -0,0 +1,47 @@
+from pydantic import BaseModel
+import ipaddress
+
+
+class Placement(BaseModel):
+ x: int
+ y: int
+ height: int
+ width: int
+
+
+class PublicDevice(BaseModel):
+ distro_name: str | None = None
+ tags: list[str] = []
+ placement: Placement | None = None
+
+
+class PublicDevices(BaseModel):
+ switches: dict[str, PublicDevice]
+ time: int | None = None
+ hash: str | None = None
+
+
+class DeviceInterface(BaseModel):
+ name: str
+ descr: str
+ type: str
+
+
+class DeviceManagement(BaseModel):
+ sysname: str = "e1-1"
+ serial: str | None = None
+ platform: str | None = None
+ mgmt_v4_addr: ipaddress.IPv4Address | None = None
+ mgmt_v6_addr: ipaddress.IPv6Address | None = None
+ mgmt_vlan: str | None = None
+ traffic_vlan: str | None = None
+ last_updated: str | None = None
+ distro_name: str | None = None
+ distro_phy_port: str | None = None
+ # interfaces: dict[str, DeviceInterface]
+
+
+class DevicesManagement(BaseModel):
+ switches: dict[str, DeviceManagement]
+ time: int | None = None
+ hash: str | None = None
diff --git a/api/app/models/network.py b/api/app/models/network.py
new file mode 100644
index 0000000..ec567a5
--- /dev/null
+++ b/api/app/models/network.py
@@ -0,0 +1,19 @@
+from pydantic import BaseModel
+import ipaddress
+
+
+# name, vlan, networks.tags, switches.sysname as router, subnet4, subnet6, gw4, gw6
+class Network(BaseModel):
+ name: str = "switches-mgmt"
+ vlan: int | None = 1337
+ tags: list[str] = []
+ subnet4: ipaddress.IPv4Network | None = "198.51.100.0/24"
+ subnet6: ipaddress.IPv6Network | None = "2001:db8:5b96::/64"
+ gw4: ipaddress.IPv4Address | None = "198.51.100.1"
+ gw6: ipaddress.IPv6Address | None = "2001:db8:5b96::1"
+
+
+class Networks(BaseModel):
+ networks: dict[str, Network]
+ time: int | None = None
+ hash: str | None = None
diff --git a/api/app/models/oplog.py b/api/app/models/oplog.py
new file mode 100644
index 0000000..f5c1d49
--- /dev/null
+++ b/api/app/models/oplog.py
@@ -0,0 +1,21 @@
+from pydantic import BaseModel
+from sqlmodel import Field, SQLModel
+
+class OplogBase(SQLModel, table=True):
+ __tablename__ = "oplog"
+
+ id: int = Field(default=None, primary_key=True)
+ time: int
+ username: str | None = None
+ systems: str | None = None
+ message: str
+
+class Oplog(BaseModel):
+ oplog: list[OplogBase]
+ time: int | None = None
+ hash: str | None = None
+
+class OplogCreate(BaseModel):
+ username: str | None = None
+ systems: str | None = None
+ message: str
diff --git a/api/app/models/ping.py b/api/app/models/ping.py
new file mode 100644
index 0000000..a2e8caf
--- /dev/null
+++ b/api/app/models/ping.py
@@ -0,0 +1,14 @@
+from pydantic import BaseModel, Field
+
+
+class PingData(BaseModel):
+ latency4: float | None = Field(description="In milliseconds")
+ latency6: float | None = Field(description="In milliseconds")
+ age4: float | None = Field(description="In seconds")
+ age6: float | None = Field(description="In seconds")
+
+
+class Ping(BaseModel):
+ switches: dict[str, PingData]
+ time: int | None = None
+ hash: str | None = None
diff --git a/api/app/models/snmp.py b/api/app/models/snmp.py
new file mode 100644
index 0000000..421b968
--- /dev/null
+++ b/api/app/models/snmp.py
@@ -0,0 +1,12 @@
+from pydantic import BaseModel
+
+
+class Stats(BaseModel):
+ misc: dict | None = None
+ ports: dict | None = None
+
+
+class Snmp(BaseModel):
+ snmp: dict[str, Stats]
+ time: int | None = None
+ hash: str | None = None