From e2ad2dab1ba338f315932b0060e26decfa135270 Mon Sep 17 00:00:00 2001 From: ibraam Date: Thu, 5 Jun 2025 12:35:49 +0300 Subject: [PATCH 01/43] Added initial files structure --- .gitignore | 1 + README.md | 73 +++++++++++ alembic.ini | 121 ++++++++++++++++++ alembic/README | 1 + alembic/env.py | 76 +++++++++++ alembic/script.py.mako | 28 ++++ .../569035a81799_initial_migration.py | 34 +++++ ...hanged_defult_language_to_ar_in_country.py | 38 ++++++ ...82e57154c5_removed_columns_from_country.py | 34 +++++ config/database.py | 20 +++ controllers/country_controller.py | 31 +++++ dockerfile | 13 ++ dtos/country_dto.py | 8 ++ dtos/pagination_dto.py | 7 + main.py | 12 ++ models/country_model.py | 13 ++ repositories/country_repository.py | 17 +++ requirements.txt | 7 + scripts/apply_migrations.sh | 39 ++++++ scripts/setup_database.sh | 47 +++++++ services/country_service.py | 14 ++ utils/pagination.py | 54 ++++++++ 22 files changed, 688 insertions(+) create mode 100644 README.md create mode 100644 alembic.ini create mode 100644 alembic/README create mode 100644 alembic/env.py create mode 100644 alembic/script.py.mako create mode 100644 alembic/versions/569035a81799_initial_migration.py create mode 100644 alembic/versions/59fba5ae8f0b_changed_defult_language_to_ar_in_country.py create mode 100644 alembic/versions/bd82e57154c5_removed_columns_from_country.py create mode 100644 config/database.py create mode 100644 controllers/country_controller.py create mode 100644 dockerfile create mode 100644 dtos/country_dto.py create mode 100644 dtos/pagination_dto.py create mode 100644 main.py create mode 100644 models/country_model.py create mode 100644 repositories/country_repository.py create mode 100644 requirements.txt create mode 100755 scripts/apply_migrations.sh create mode 100755 scripts/setup_database.sh create mode 100644 services/country_service.py create mode 100644 utils/pagination.py diff --git a/.gitignore b/.gitignore index 7b004e5..41b92af 100644 --- a/.gitignore +++ b/.gitignore @@ -35,6 +35,7 @@ MANIFEST # Installer logs pip-log.txt pip-delete-this-directory.txt +notes.txt # Unit test / coverage reports htmlcov/ diff --git a/README.md b/README.md new file mode 100644 index 0000000..4bac52e --- /dev/null +++ b/README.md @@ -0,0 +1,73 @@ +# FastAPI Application + +This project is IHR FastAPI-based application. You can run it either in a **Python virtual environment** or using **Docker**. + +--- + +## Running the Application + +You can run this application in two ways: +1. **Using a Python Virtual Environment** +2. **Using Docker** + +--- + +## 1. Running in a Virtual Environment + +### **1️⃣ Create and Activate a Virtual Environment** +#### On Windows (Command Prompt or PowerShell): +```sh +python3 -m venv venv +venv\Scripts\activate +``` +#### On macOS/Linux: +```sh +python3 -m venv venv +source venv/bin/activate +``` + +### **2️⃣ Install Dependencies** +```sh +pip install -r requirements.txt +``` + +### **3️⃣ Run the FastAPI Application** +```sh +uvicorn main:app --host 0.0.0.0 --port 8000 --reload +``` + +### **4️⃣ Access the API** +Once running, you can access: +- API: **[http://localhost:8000/ihr/api](http://localhost:8000/ihr/api)** +- Interactive Docs (Swagger UI): **[http://localhost:8000/ihr/api/docs](http://localhost:8000/docs)** +- Redoc Docs: **[http://localhost:8000/ihr/api/redoc](http://localhost:8000/redoc)** + +--- + +## 🐳 2. Running with Docker + +### **1️⃣ Build the Docker Image** +```sh +docker build -t ihr-fastapi . +``` + +### **2️⃣ Run the Container** +```sh +docker run -p 8000:8000 ihr-fastapi +``` + +### **3️⃣ start the Container** +```sh +docker start +# Attach to the logs +docker logs -f +``` + + +### **3️⃣ Access the API** +Once running, you can access: +- API: **[http://localhost:8000/ihr/api](http://localhost:8000/ihr/api)** +- Interactive Docs (Swagger UI): **[http://localhost:8000/ihr/api/docs](http://localhost:8000/docs)** +- Redoc Docs: **[http://localhost:8000/ihr/api/redoc](http://localhost:8000/redoc)** + + diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..914efe5 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,121 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +# Use forward slashes (/) also on windows to provide an os agnostic path +script_location = alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +# version_path_separator = newline +# +# Use os.pathsep. Default configuration used for new projects. +version_path_separator = os + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = postgresql://postgres:123password456@localhost:5435/ihr +# user:django , password:123password456,database:ihr +#user:django-timescaledb + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/README b/alembic/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000..48f1363 --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,76 @@ +import os +import sys +from logging.config import fileConfig +from sqlalchemy import engine_from_config, pool +from alembic import context +from config.database import Base +import importlib.util +import pathlib + +# Get Alembic config +config = context.config + +# Setup logging +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# Automatically discover and import models +models_path = pathlib.Path(__file__).parent.parent / "models" +sys.path.append(str(models_path.parent)) # Ensure parent directory is in path + +for file in models_path.glob("*.py"): + if file.name != "__init__.py": + module_name = f"models.{file.stem}" + importlib.import_module(module_name) + +# Set target metadata to Base +target_metadata = Base.metadata + + +def include_object(object, name, type_, reflected, compare_to): + # Prevent dropping tables and indexes + if reflected and compare_to is None: + if type_ in ("table", "index"): + return False # Don't drop it + return True + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode.""" + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + include_object=include_object, + compare_type=True, # detect type changes (e.g. from Int to BigInt) + # detect default value changes of a column(will apply to newly inserted records) + compare_server_default=True + ) + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode.""" + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, + target_metadata=target_metadata, + include_object=include_object, + compare_type=True, + compare_server_default=True) + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000..480b130 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,28 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + """Upgrade schema.""" + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + """Downgrade schema.""" + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/569035a81799_initial_migration.py b/alembic/versions/569035a81799_initial_migration.py new file mode 100644 index 0000000..87b1748 --- /dev/null +++ b/alembic/versions/569035a81799_initial_migration.py @@ -0,0 +1,34 @@ +"""Initial migration + +Revision ID: 569035a81799 +Revises: +Create Date: 2025-06-05 11:10:26.127839 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '569035a81799' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('ihr_country', sa.Column('continent', sa.String(length=50), nullable=True)) + op.add_column('ihr_country', sa.Column('language', sa.String(length=50), server_default=sa.text("'EN'"), nullable=False)) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('ihr_country', 'language') + op.drop_column('ihr_country', 'continent') + # ### end Alembic commands ### diff --git a/alembic/versions/59fba5ae8f0b_changed_defult_language_to_ar_in_country.py b/alembic/versions/59fba5ae8f0b_changed_defult_language_to_ar_in_country.py new file mode 100644 index 0000000..1a42708 --- /dev/null +++ b/alembic/versions/59fba5ae8f0b_changed_defult_language_to_ar_in_country.py @@ -0,0 +1,38 @@ +"""Changed defult language to AR in Country + +Revision ID: 59fba5ae8f0b +Revises: 569035a81799 +Create Date: 2025-06-05 11:14:35.640233 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '59fba5ae8f0b' +down_revision: Union[str, None] = '569035a81799' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('ihr_country', 'language', + existing_type=sa.VARCHAR(length=50), + server_default=sa.text("'AR'"), + existing_nullable=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('ihr_country', 'language', + existing_type=sa.VARCHAR(length=50), + server_default=sa.text("'EN'::character varying"), + existing_nullable=False) + # ### end Alembic commands ### diff --git a/alembic/versions/bd82e57154c5_removed_columns_from_country.py b/alembic/versions/bd82e57154c5_removed_columns_from_country.py new file mode 100644 index 0000000..8d4f6e2 --- /dev/null +++ b/alembic/versions/bd82e57154c5_removed_columns_from_country.py @@ -0,0 +1,34 @@ +"""Removed columns from country + +Revision ID: bd82e57154c5 +Revises: 59fba5ae8f0b +Create Date: 2025-06-05 11:17:52.058567 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = 'bd82e57154c5' +down_revision: Union[str, None] = '59fba5ae8f0b' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('ihr_country', 'language') + op.drop_column('ihr_country', 'continent') + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('ihr_country', sa.Column('continent', sa.VARCHAR(length=50), autoincrement=False, nullable=True)) + op.add_column('ihr_country', sa.Column('language', sa.VARCHAR(length=50), server_default=sa.text("'AR'::character varying"), autoincrement=False, nullable=False)) + # ### end Alembic commands ### diff --git a/config/database.py b/config/database.py new file mode 100644 index 0000000..1a8365b --- /dev/null +++ b/config/database.py @@ -0,0 +1,20 @@ +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker, declarative_base +import os +from dotenv import load_dotenv + +load_dotenv() + +DATABASE_URL = "postgresql://ihr_bash_user:ihr_password@localhost:5434/ihr_bash" + +engine = create_engine(DATABASE_URL) +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) +Base = declarative_base() + +# Dependency for FastAPI routes +def get_db(): + db = SessionLocal() + try: + yield db + finally: + db.close() diff --git a/controllers/country_controller.py b/controllers/country_controller.py new file mode 100644 index 0000000..13747c8 --- /dev/null +++ b/controllers/country_controller.py @@ -0,0 +1,31 @@ +from fastapi import APIRouter, Depends, Query, Request +from sqlalchemy.orm import Session +from services.country_service import CountryService +from utils.pagination import PaginatedResponse, paginate_and_order +from dtos.country_dto import CountryDTO +from config.database import get_db +from typing import Optional +from dtos.pagination_dto import PaginationParams + + +router = APIRouter(prefix="/countries", tags=["Countries"]) + + +class CountryController: + service = CountryService() + + @staticmethod + @router.get("/", response_model=PaginatedResponse[CountryDTO]) + def get_all_countries( + request: Request, + db: Session = Depends(get_db), + pagination: PaginationParams = Depends(), # Generic pagination params + code: Optional[str] = Query( + None, description="Filter by country code"), + name: Optional[str] = Query( + None, description="Search by country name (substring)") + ): + """Retrieves all countries with optional filters.""" + countries = CountryController.service.get_all_countries( + db, code=code, name=name) + return paginate_and_order(countries, request, pagination.page, pagination.ordering) diff --git a/dockerfile b/dockerfile new file mode 100644 index 0000000..d3d1482 --- /dev/null +++ b/dockerfile @@ -0,0 +1,13 @@ +FROM python:3.13-slim +RUN apt-get update && apt-get install -y libpq-dev gcc && rm -rf /var/lib/apt/lists/* +WORKDIR /app +COPY requirements.txt . +# Install dependencies +RUN pip install --no-cache-dir -r requirements.txt +# Copy the rest of the application code +COPY . . +# Expose FastAPI default port +EXPOSE 8000 +# Command to run FastAPI with Uvicorn +CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"] + diff --git a/dtos/country_dto.py b/dtos/country_dto.py new file mode 100644 index 0000000..268f47d --- /dev/null +++ b/dtos/country_dto.py @@ -0,0 +1,8 @@ +from pydantic import BaseModel + +class CountryDTO(BaseModel): + code: str + name: str + + class Config: + from_attributes = True diff --git a/dtos/pagination_dto.py b/dtos/pagination_dto.py new file mode 100644 index 0000000..edbe4a4 --- /dev/null +++ b/dtos/pagination_dto.py @@ -0,0 +1,7 @@ +from typing import Optional +from fastapi import Query +from pydantic import BaseModel + +class PaginationParams(BaseModel): + page: int = Query(1, ge=1, description="Page number") + ordering: Optional[str] = Query(None, description="Field to order by") diff --git a/main.py b/main.py new file mode 100644 index 0000000..32ba25e --- /dev/null +++ b/main.py @@ -0,0 +1,12 @@ +import importlib +import pkgutil +from fastapi import FastAPI +from controllers import __path__ as controllers_path # Adjusted for `ihr` structure + +app = FastAPI(root_path="/ihr/api") + +# Automatically import and register all routers inside "ihr/controllers" +for _, module_name, _ in pkgutil.iter_modules(controllers_path): + module = importlib.import_module(f"controllers.{module_name}") + if hasattr(module, "router"): + app.include_router(module.router) diff --git a/models/country_model.py b/models/country_model.py new file mode 100644 index 0000000..791e478 --- /dev/null +++ b/models/country_model.py @@ -0,0 +1,13 @@ +from sqlalchemy import Column, String, Boolean,text +from config.database import Base + + +class Country(Base): + __tablename__ = "ihr_country" + + code = Column(String(4), primary_key=True) + name = Column(String(255), nullable=False) + tartiflette = Column(Boolean, default=False, nullable=False) + disco = Column(Boolean, default=False, nullable=False) + #continent = Column(String(50), nullable=True) + #language = Column(String(50), nullable=False, server_default=text("'AR'")) diff --git a/repositories/country_repository.py b/repositories/country_repository.py new file mode 100644 index 0000000..cdec9c7 --- /dev/null +++ b/repositories/country_repository.py @@ -0,0 +1,17 @@ +from sqlalchemy.orm import Session +from models.country_model import Country +from typing import Optional, List + + +class CountryRepository: + def get_all(self, db: Session, code: Optional[str] = None, name: Optional[str] = None) -> List[Country]: + """Retrieves countries, optionally filtering by code and name substring.""" + query = db.query(Country) + + if code: + query = query.filter(Country.code == code) + + if name: + query = query.filter(Country.name.ilike(f"%{name}%")) + + return query.all() diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..20a8242 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,7 @@ +fastapi==0.115.11 +pydantic==2.10.6 +python-dotenv==1.0.1 +SQLAlchemy==2.0.38 +uvicorn==0.34.0 +psycopg2==2.9.10 +alembic==1.15.1 \ No newline at end of file diff --git a/scripts/apply_migrations.sh b/scripts/apply_migrations.sh new file mode 100755 index 0000000..2a7af6e --- /dev/null +++ b/scripts/apply_migrations.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +set -e # Exit immediately if a command fails + +# Get the script's directory +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +cd "$SCRIPT_DIR/.." || { echo "Error: Project directory not found!"; exit 1; } + +VERSIONS_DIR="alembic/versions" + +# Check if the versions directory exists and contains migration files +if [ ! -d "$VERSIONS_DIR" ] || ! find "$VERSIONS_DIR" -mindepth 1 | read; then + echo "⚠️ No migration files found. Initializing Alembic migration..." + alembic revision --autogenerate -m "Initial migration" + alembic upgrade head + echo "Database initialized with first migration." + exit 0 +fi + +echo "Checking for model changes..." +MIGRATION_OUTPUT=$(alembic revision --autogenerate -m "Auto migration" 2>&1) + +# Check if Alembic detected changes +if echo "$MIGRATION_OUTPUT" | grep -q "No changes detected"; then + echo "No changes detected. Skipping migration." + exit 0 +fi + +# Extract the new migration file name +NEW_MIGRATION_FILE=$(echo "$MIGRATION_OUTPUT" | grep -oE "alembic/versions/[0-9a-f]+_.*\.py" | tail -n 1) + +if [ -z "$NEW_MIGRATION_FILE" ]; then + echo "No valid migration file found after autogenerate. Skipping upgrade." + exit 1 +fi + +echo "Applying migration: $NEW_MIGRATION_FILE" +alembic upgrade head +echo "Migration applied successfully." diff --git a/scripts/setup_database.sh b/scripts/setup_database.sh new file mode 100755 index 0000000..5fe2272 --- /dev/null +++ b/scripts/setup_database.sh @@ -0,0 +1,47 @@ +#!/bin/bash + +# Load environment variables +DB_NAME=${DB_NAME:-"ihr_bash"} +DB_USER=${DB_USER:-"ihr_bash_user"} +DB_PASSWORD=${DB_PASSWORD:-"ihr_password"} +DB_HOST=${DB_HOST:-"localhost"} +DB_PORT=${DB_PORT:-5434} +ADMIN_USER=${ADMIN_USER:-"django"} # Superuser for setup + +echo "Setting up PostgreSQL database..." + +# Ensure PostgreSQL service is running +if ! pg_isready -h "$DB_HOST" -p "$DB_PORT" -U "$ADMIN_USER" > /dev/null 2>&1; then + echo "Error: PostgreSQL is not running on $DB_HOST:$DB_PORT" + exit 1 +fi + +# Check if the database exists +DB_EXISTS=$(psql -h "$DB_HOST" -p "$DB_PORT" -U "$ADMIN_USER" -d postgres -tAc "SELECT 1 FROM pg_database WHERE datname='$DB_NAME'") +if [ "$DB_EXISTS" != "1" ]; then + echo "Creating database $DB_NAME..." + psql -h "$DB_HOST" -p "$DB_PORT" -U "$ADMIN_USER" -d postgres -c "CREATE DATABASE \"$DB_NAME\";" \ + && echo "Database $DB_NAME created." +else + echo "Database $DB_NAME already exists." +fi + +# Check if the user exists +USER_EXISTS=$(psql -h "$DB_HOST" -p "$DB_PORT" -U "$ADMIN_USER" -d postgres -tAc "SELECT 1 FROM pg_roles WHERE rolname='$DB_USER'") +if [ "$USER_EXISTS" != "1" ]; then + echo "Creating user $DB_USER..." + psql -h "$DB_HOST" -p "$DB_PORT" -U "$ADMIN_USER" -d postgres -c "CREATE USER \"$DB_USER\" WITH PASSWORD '$DB_PASSWORD';" \ + && echo "User $DB_USER created." +else + echo "User $DB_USER already exists." +fi + +# Grant privileges +echo "Setting permissions..." +psql -h "$DB_HOST" -p "$DB_PORT" -U "$ADMIN_USER" -d postgres -c "GRANT ALL PRIVILEGES ON DATABASE \"$DB_NAME\" TO \"$DB_USER\";" +psql -h "$DB_HOST" -p "$DB_PORT" -U "$ADMIN_USER" -d postgres -c "ALTER DATABASE \"$DB_NAME\" OWNER TO \"$DB_USER\";" + +# Grant schema privileges inside the new database +psql -h "$DB_HOST" -p "$DB_PORT" -U "$ADMIN_USER" -d "$DB_NAME" -c "GRANT ALL ON SCHEMA public TO \"$DB_USER\";" + +echo "Database setup completed." diff --git a/services/country_service.py b/services/country_service.py new file mode 100644 index 0000000..1f693d8 --- /dev/null +++ b/services/country_service.py @@ -0,0 +1,14 @@ +from sqlalchemy.orm import Session +from repositories.country_repository import CountryRepository +from dtos.country_dto import CountryDTO +from typing import Optional, List + + +class CountryService: + def __init__(self): + self.repository = CountryRepository() + + def get_all_countries(self, db: Session, code: Optional[str] = None, name: Optional[str] = None) -> List[CountryDTO]: + """Fetches all countries, applying filters if provided.""" + countries = self.repository.get_all(db, code, name) + return [CountryDTO(code=c.code, name=c.name) for c in countries] diff --git a/utils/pagination.py b/utils/pagination.py new file mode 100644 index 0000000..4394fbf --- /dev/null +++ b/utils/pagination.py @@ -0,0 +1,54 @@ +from typing import TypeVar, List, Optional, Callable, Generic, Any +from fastapi import Request +from urllib.parse import urlencode, urlunparse +from pydantic import BaseModel + +T = TypeVar("T") + +class PaginatedResponse(BaseModel, Generic[T]): + count: int + next: Optional[str] + previous: Optional[str] + results: List[T] + + +def build_url(request: Request, page: Optional[int]) -> Optional[str]: + if page is None: + return None + query_params = dict(request.query_params) + query_params["page"] = str(page) + return urlunparse(( + request.url.scheme, + request.url.netloc, + request.url.path, + "", + urlencode(query_params), + "" + )) + +def paginate_and_order( + items: List[Any], + request: Request, + page: int, + order_by: Optional[Callable[[Any], Any]] = None +) -> PaginatedResponse: + # If order_by is provided, but is not callable (i.e., it's a string), convert it to a callable + if order_by: + if not callable(order_by): + # Assume order_by is the attribute name to sort by. + order_field = order_by + order_by = lambda x: getattr(x, order_field) + items = sorted(items, key=order_by) + total_count = len(items) + page_size = 5 + offset = (page - 1) * page_size + paginated_items = items[offset : offset + page_size] + next_page = page + 1 if offset + page_size < total_count else None + prev_page = page - 1 if page > 1 else None + + return PaginatedResponse( + count=total_count, + next=build_url(request, next_page), + previous=build_url(request, prev_page), + results=paginated_items + ) From 7270b7413f7a00ee6cab75fb66118bda24b014df Mon Sep 17 00:00:00 2001 From: ibraam Date: Sun, 8 Jun 2025 20:45:55 +0300 Subject: [PATCH 02/43] Modified env.py to deal with hypertables --- alembic.ini | 2 +- alembic/env.py | 152 ++++++++++++++++-- .../083c114aab2c_initial_migration.py | 64 ++++++++ .../569035a81799_initial_migration.py | 34 ---- ...hanged_defult_language_to_ar_in_country.py | 38 ----- ...82e57154c5_removed_columns_from_country.py | 34 ---- models/asn_model.py | 17 ++ models/hegemonycone.py | 40 +++++ 8 files changed, 262 insertions(+), 119 deletions(-) create mode 100644 alembic/versions/083c114aab2c_initial_migration.py delete mode 100644 alembic/versions/569035a81799_initial_migration.py delete mode 100644 alembic/versions/59fba5ae8f0b_changed_defult_language_to_ar_in_country.py delete mode 100644 alembic/versions/bd82e57154c5_removed_columns_from_country.py create mode 100644 models/asn_model.py create mode 100644 models/hegemonycone.py diff --git a/alembic.ini b/alembic.ini index 914efe5..84adbe4 100644 --- a/alembic.ini +++ b/alembic.ini @@ -64,7 +64,7 @@ version_path_separator = os # are written from script.py.mako # output_encoding = utf-8 -sqlalchemy.url = postgresql://postgres:123password456@localhost:5435/ihr +sqlalchemy.url = postgresql://postgres:123password456@localhost:5435/ihr-fastapi # user:django , password:123password456,database:ihr #user:django-timescaledb diff --git a/alembic/env.py b/alembic/env.py index 48f1363..25aae28 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -6,6 +6,7 @@ from config.database import Base import importlib.util import pathlib +from alembic.operations import ops # Get Alembic config config = context.config @@ -14,18 +15,37 @@ if config.config_file_name is not None: fileConfig(config.config_file_name) -# Automatically discover and import models +# Modify the model discovery section models_path = pathlib.Path(__file__).parent.parent / "models" -sys.path.append(str(models_path.parent)) # Ensure parent directory is in path +sys.path.append(str(models_path.parent)) +model_classes = [] + +# First pass: Load all models for file in models_path.glob("*.py"): if file.name != "__init__.py": module_name = f"models.{file.stem}" - importlib.import_module(module_name) + module = importlib.import_module(module_name) + for attr_name in dir(module): + attr = getattr(module, attr_name) + if isinstance(attr, type) and issubclass(attr, Base) and attr != Base: + model_classes.append(attr) -# Set target metadata to Base +# Set target metadata target_metadata = Base.metadata +# Second pass: Associate metadata (hypertable and indexes) with table objects +for model in model_classes: + table = target_metadata.tables.get(model.__tablename__) + if table is not None: + # Associate hypertable metadata + if hasattr(model, '__hypertable__'): + setattr(table, '__hypertable__', model.__hypertable__) + + # Associate indexes metadata + if hasattr(model, '__indexes__'): + setattr(table, '__indexes__', model.__indexes__) + def include_object(object, name, type_, reflected, compare_to): # Prevent dropping tables and indexes @@ -35,6 +55,112 @@ def include_object(object, name, type_, reflected, compare_to): return True +def process_revision_directives(context, revision, directives): + """Called during 'alembic revision --autogenerate'""" + if directives[0].upgrade_ops is not None: + # Process create table operations and inject hypertable commands + process_ops( + context, directives[0].upgrade_ops, directives[0].downgrade_ops) + + +def create_hypertable_ops(table_name, hypertable_meta, is_existing=False): + """Generate hypertable creation operations.""" + upgrade_ops = [] + downgrade_ops = [] + + time_col = hypertable_meta['time_column'] + chunk_interval = hypertable_meta.get('chunk_time_interval', '1 day') + + # Create hypertable with migrate_data for existing tables + hypertable_sql = ( + f"SELECT create_hypertable('{table_name}', by_range('{time_col}', INTERVAL '{chunk_interval}'));" + ) + + upgrade_ops.append(ops.ExecuteSQLOp(hypertable_sql)) + + # Handle compression + if hypertable_meta.get('compress', False): + segment_by = hypertable_meta.get('compress_segmentby', '') + order_by = hypertable_meta.get('compress_orderby', time_col) + compress_sql = ( + f"ALTER TABLE {table_name} SET (" + f"timescaledb.compress, " + f"timescaledb.compress_segmentby = '{segment_by}', " + f"timescaledb.compress_orderby = '{order_by}'" + f");" + ) + upgrade_ops.append(ops.ExecuteSQLOp(compress_sql)) + downgrade_ops.append( + ops.ExecuteSQLOp( + f"ALTER TABLE {table_name} SET (timescaledb.compress = false);" + ) + ) + + # Handle compression policy + if hypertable_meta.get('compress_policy', False): + compress_after = hypertable_meta.get('compress_after', '7 days') + policy_sql = ( + f"SELECT add_compression_policy('{table_name}', " + f"INTERVAL '{compress_after}');" + ) + upgrade_ops.append(ops.ExecuteSQLOp(policy_sql)) + downgrade_ops.append( + ops.ExecuteSQLOp( + f"SELECT remove_compression_policy('{table_name}', if_exists => TRUE);" + ) + ) + + return upgrade_ops, downgrade_ops + + +def create_index_ops(table_name, indexes_meta): + """Generate index creation operations.""" + upgrade_ops = [] + downgrade_ops = [] + + for idx in indexes_meta: + col_list = ', '.join(idx['columns']) + create_index_sql = f"CREATE INDEX IF NOT EXISTS {idx['name']} ON {table_name} ({col_list});" + upgrade_ops.append(ops.ExecuteSQLOp(create_index_sql)) + + drop_index_sql = f"DROP INDEX IF EXISTS {idx['name']};" + downgrade_ops.append(ops.ExecuteSQLOp(drop_index_sql)) + + return upgrade_ops, downgrade_ops + + +def process_ops(context, upgrade_ops, downgrade_ops): + """Process upgrade and downgrade operations.""" + final_upgrade_ops = [] + final_downgrade_ops = [] + + # First pass: Handle table creations and their features + for op_ in upgrade_ops.ops: + table_name = op_.table_name + table_obj = target_metadata.tables.get(table_name) + final_upgrade_ops.append(op_) + + # Create hypertable if configured + hypertable_meta = getattr(table_obj, '__hypertable__', None) + if hypertable_meta: + upgrade, downgrade = create_hypertable_ops( + table_name, hypertable_meta) + final_upgrade_ops.extend(upgrade) + final_downgrade_ops.extend(downgrade) + + # Create indexes if configured + indexes_meta = getattr(table_obj, '__indexes__', None) + if indexes_meta: + upgrade, downgrade = create_index_ops(table_name, indexes_meta) + final_upgrade_ops.extend(upgrade) + final_downgrade_ops.extend(downgrade) + + # Update operations + upgrade_ops.ops = final_upgrade_ops + downgrade_ops.ops = final_downgrade_ops + downgrade_ops.ops + + + def run_migrations_offline() -> None: """Run migrations in 'offline' mode.""" url = config.get_main_option("sqlalchemy.url") @@ -44,10 +170,9 @@ def run_migrations_offline() -> None: literal_binds=True, dialect_opts={"paramstyle": "named"}, include_object=include_object, - compare_type=True, # detect type changes (e.g. from Int to BigInt) - # detect default value changes of a column(will apply to newly inserted records) - compare_server_default=True + process_revision_directives=process_revision_directives, ) + with context.begin_transaction(): context.run_migrations() @@ -61,11 +186,14 @@ def run_migrations_online() -> None: ) with connectable.connect() as connection: - context.configure(connection=connection, - target_metadata=target_metadata, - include_object=include_object, - compare_type=True, - compare_server_default=True) + context.configure( + connection=connection, + target_metadata=target_metadata, + include_object=include_object, + process_revision_directives=process_revision_directives, + compare_type=True + ) + with context.begin_transaction(): context.run_migrations() diff --git a/alembic/versions/083c114aab2c_initial_migration.py b/alembic/versions/083c114aab2c_initial_migration.py new file mode 100644 index 0000000..7906fc5 --- /dev/null +++ b/alembic/versions/083c114aab2c_initial_migration.py @@ -0,0 +1,64 @@ +"""initial migration + +Revision ID: 083c114aab2c +Revises: +Create Date: 2025-06-08 20:40:31.500471 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '083c114aab2c' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('ihr_asn', + sa.Column('number', sa.BigInteger(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('tartiflette', sa.Boolean(), nullable=False), + sa.Column('disco', sa.Boolean(), nullable=False), + sa.Column('ashash', sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint('number') + ) + op.create_table('ihr_country', + sa.Column('code', sa.String(length=4), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('tartiflette', sa.Boolean(), nullable=False), + sa.Column('disco', sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint('code') + ) + op.create_table('ihr_hegemonycone', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', sa.DateTime(), nullable=False), + sa.Column('conesize', sa.Integer(), nullable=False), + sa.Column('af', sa.Integer(), nullable=False), + sa.Column('asn_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_hegemonycone', by_range('timebin', INTERVAL '2 day'));") + op.execute("ALTER TABLE ihr_hegemonycone SET (timescaledb.compress, timescaledb.compress_segmentby = 'asn_id,af', timescaledb.compress_orderby = 'timebin');") + op.execute("SELECT add_compression_policy('ihr_hegemonycone', INTERVAL '7 days');") + op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemonycone_asn_id_timebin_idx ON ihr_hegemonycone (asn_id, timebin DESC);') + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.execute('ALTER TABLE ihr_hegemonycone SET (timescaledb.compress = false);') + op.execute("SELECT remove_compression_policy('ihr_hegemonycone', if_exists => TRUE);") + op.execute('DROP INDEX IF EXISTS ihr_hegemonycone_asn_id_timebin_idx;') + op.drop_table('ihr_hegemonycone') + op.drop_table('ihr_country') + op.drop_table('ihr_asn') + # ### end Alembic commands ### diff --git a/alembic/versions/569035a81799_initial_migration.py b/alembic/versions/569035a81799_initial_migration.py deleted file mode 100644 index 87b1748..0000000 --- a/alembic/versions/569035a81799_initial_migration.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Initial migration - -Revision ID: 569035a81799 -Revises: -Create Date: 2025-06-05 11:10:26.127839 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = '569035a81799' -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - """Upgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('ihr_country', sa.Column('continent', sa.String(length=50), nullable=True)) - op.add_column('ihr_country', sa.Column('language', sa.String(length=50), server_default=sa.text("'EN'"), nullable=False)) - # ### end Alembic commands ### - - -def downgrade() -> None: - """Downgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('ihr_country', 'language') - op.drop_column('ihr_country', 'continent') - # ### end Alembic commands ### diff --git a/alembic/versions/59fba5ae8f0b_changed_defult_language_to_ar_in_country.py b/alembic/versions/59fba5ae8f0b_changed_defult_language_to_ar_in_country.py deleted file mode 100644 index 1a42708..0000000 --- a/alembic/versions/59fba5ae8f0b_changed_defult_language_to_ar_in_country.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Changed defult language to AR in Country - -Revision ID: 59fba5ae8f0b -Revises: 569035a81799 -Create Date: 2025-06-05 11:14:35.640233 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = '59fba5ae8f0b' -down_revision: Union[str, None] = '569035a81799' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - """Upgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('ihr_country', 'language', - existing_type=sa.VARCHAR(length=50), - server_default=sa.text("'AR'"), - existing_nullable=False) - # ### end Alembic commands ### - - -def downgrade() -> None: - """Downgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('ihr_country', 'language', - existing_type=sa.VARCHAR(length=50), - server_default=sa.text("'EN'::character varying"), - existing_nullable=False) - # ### end Alembic commands ### diff --git a/alembic/versions/bd82e57154c5_removed_columns_from_country.py b/alembic/versions/bd82e57154c5_removed_columns_from_country.py deleted file mode 100644 index 8d4f6e2..0000000 --- a/alembic/versions/bd82e57154c5_removed_columns_from_country.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Removed columns from country - -Revision ID: bd82e57154c5 -Revises: 59fba5ae8f0b -Create Date: 2025-06-05 11:17:52.058567 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = 'bd82e57154c5' -down_revision: Union[str, None] = '59fba5ae8f0b' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - """Upgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('ihr_country', 'language') - op.drop_column('ihr_country', 'continent') - # ### end Alembic commands ### - - -def downgrade() -> None: - """Downgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('ihr_country', sa.Column('continent', sa.VARCHAR(length=50), autoincrement=False, nullable=True)) - op.add_column('ihr_country', sa.Column('language', sa.VARCHAR(length=50), server_default=sa.text("'AR'::character varying"), autoincrement=False, nullable=False)) - # ### end Alembic commands ### diff --git a/models/asn_model.py b/models/asn_model.py new file mode 100644 index 0000000..39d35fb --- /dev/null +++ b/models/asn_model.py @@ -0,0 +1,17 @@ +from sqlalchemy import Column, BigInteger, Integer, String, Boolean, DateTime, ForeignKey, Index +from sqlalchemy.orm import relationship +from config.database import Base + +class ASN(Base): + __tablename__ = 'ihr_asn' + + number = Column(BigInteger, primary_key=True, doc='Autonomous System Number (ASN) or IXP ID. Note that IXP ID are negative to avoid collision.') + name = Column(String(255), nullable=False, doc='Name registered for the network.') + tartiflette = Column(Boolean, default=False, nullable=False, doc='True if participate in link delay and forwarding anomaly analysis.') + disco = Column(Boolean, default=False, nullable=False, doc='True if participate in network disconnection analysis.') + ashash = Column(Boolean, default=False, nullable=False, doc='True if participate in AS dependency analysis.') + + # Relationship to HegemonyCone + hegemony_cones = relationship('HegemonyCone', back_populates='asn_relation') + + diff --git a/models/hegemonycone.py b/models/hegemonycone.py new file mode 100644 index 0000000..d104cde --- /dev/null +++ b/models/hegemonycone.py @@ -0,0 +1,40 @@ +from sqlalchemy import Column, BigInteger, Integer, DateTime, ForeignKey, PrimaryKeyConstraint +from sqlalchemy.orm import relationship +from config.database import Base + + +class HegemonyCone(Base): + __tablename__ = 'ihr_hegemonycone' + + __table_args__ = ( + PrimaryKeyConstraint('id', 'timebin'), + ) + + __indexes__ = [ + { + 'name': 'ihr_hegemonycone_asn_id_timebin_idx', + 'columns': ['asn_id', 'timebin DESC'] + } + ] + + __hypertable__ = { + 'time_column': 'timebin', + 'chunk_time_interval': '2 day', + 'compress': True, + 'compress_segmentby': 'asn_id,af', + 'compress_orderby': 'timebin', + 'compress_policy': True, + 'compress_after': '7 days' + } + + id = Column(BigInteger, autoincrement=True) + timebin = Column(DateTime, nullable=False, + doc='Timestamp of reported value.') + conesize = Column(Integer, default=0, nullable=False, + doc="Number of dependent networks, namely, networks that are reached through the asn.") + af = Column(Integer, default=0, nullable=False, + doc='Address Family (IP version), values are either 4 or 6.') + asn_id = Column(BigInteger, ForeignKey('ihr_asn.number', ondelete='CASCADE'), + nullable=False, doc='Autonomous System Number (ASN).') + + asn_relation = relationship('ASN', back_populates='hegemony_cones') From 1ea5fac5c88fa5c2fcb7abfcf7ea24cd605f24a3 Mon Sep 17 00:00:00 2001 From: ibraam Date: Sun, 8 Jun 2025 22:01:38 +0300 Subject: [PATCH 03/43] Modified index creation in env.py --- alembic/env.py | 73 +++++++++++++++---- .../650fd402b9d5_initial_migration3.py | 32 ++++++++ .../8865d4f4ee7f_initial_migration2.py | 32 ++++++++ ...n.py => bbb6ce9ca16b_initial_migration.py} | 7 +- models/country_model.py | 4 +- models/hegemonycone.py | 4 + 6 files changed, 131 insertions(+), 21 deletions(-) create mode 100644 alembic/versions/650fd402b9d5_initial_migration3.py create mode 100644 alembic/versions/8865d4f4ee7f_initial_migration2.py rename alembic/versions/{083c114aab2c_initial_migration.py => bbb6ce9ca16b_initial_migration.py} (93%) diff --git a/alembic/env.py b/alembic/env.py index 25aae28..7331ed3 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -7,6 +7,8 @@ import importlib.util import pathlib from alembic.operations import ops +from alembic.operations.ops import CreateIndexOp +from sqlalchemy import inspect, text # Get Alembic config config = context.config @@ -129,36 +131,75 @@ def create_index_ops(table_name, indexes_meta): return upgrade_ops, downgrade_ops +def check_index_exists(context, table_name, index_name): + """Check if index exists using MigrationContext.""" + print(index_name) + # In offline mode, assume index doesn't exist + if not hasattr(context, 'bind'): + return False + + sql = text(""" + SELECT EXISTS ( + SELECT 1 + FROM pg_indexes + WHERE tablename = :table_name + AND indexname = :index_name + ); + """) + + try: + return context.bind.execute(sql, { + 'table_name': table_name, + 'index_name': index_name + }).scalar() + except Exception: + return False + + def process_ops(context, upgrade_ops, downgrade_ops): """Process upgrade and downgrade operations.""" final_upgrade_ops = [] - final_downgrade_ops = [] + new_downgrade_ops = [] + + # Get all table names from metadata + all_tables = target_metadata.tables.keys() - # First pass: Handle table creations and their features + # Handle table creations and their features for op_ in upgrade_ops.ops: table_name = op_.table_name table_obj = target_metadata.tables.get(table_name) - final_upgrade_ops.append(op_) - # Create hypertable if configured - hypertable_meta = getattr(table_obj, '__hypertable__', None) - if hypertable_meta: - upgrade, downgrade = create_hypertable_ops( - table_name, hypertable_meta) - final_upgrade_ops.extend(upgrade) - final_downgrade_ops.extend(downgrade) + # Always add table creation + final_upgrade_ops.append(op_) - # Create indexes if configured + # Only process hypertable ops if this is a table creation + if isinstance(op_, ops.CreateTableOp): + hypertable_meta = getattr(table_obj, '__hypertable__', None) + if hypertable_meta: + upgrade, downgrade = create_hypertable_ops( + table_name, hypertable_meta) + final_upgrade_ops.extend(upgrade) + new_downgrade_ops.extend(downgrade) + + # Handle index creations + for table_name in all_tables: + table_obj = target_metadata.tables.get(table_name) indexes_meta = getattr(table_obj, '__indexes__', None) + if indexes_meta: - upgrade, downgrade = create_index_ops(table_name, indexes_meta) - final_upgrade_ops.extend(upgrade) - final_downgrade_ops.extend(downgrade) + new_indexes = [ + idx for idx in indexes_meta + if not check_index_exists(context, table_name, idx['name']) + ] + if new_indexes: + upgrade, downgrade = create_index_ops( + table_name, new_indexes) + final_upgrade_ops.extend(upgrade) + new_downgrade_ops.extend(downgrade) # Update operations upgrade_ops.ops = final_upgrade_ops - downgrade_ops.ops = final_downgrade_ops + downgrade_ops.ops - + downgrade_ops.ops = new_downgrade_ops + downgrade_ops.ops def run_migrations_offline() -> None: diff --git a/alembic/versions/650fd402b9d5_initial_migration3.py b/alembic/versions/650fd402b9d5_initial_migration3.py new file mode 100644 index 0000000..be2f157 --- /dev/null +++ b/alembic/versions/650fd402b9d5_initial_migration3.py @@ -0,0 +1,32 @@ +"""initial migration3 + +Revision ID: 650fd402b9d5 +Revises: 8865d4f4ee7f +Create Date: 2025-06-08 21:57:18.231591 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '650fd402b9d5' +down_revision: Union[str, None] = '8865d4f4ee7f' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('ihr_country', sa.Column('continent', sa.String(length=50), nullable=True)) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('ihr_country', 'continent') + # ### end Alembic commands ### diff --git a/alembic/versions/8865d4f4ee7f_initial_migration2.py b/alembic/versions/8865d4f4ee7f_initial_migration2.py new file mode 100644 index 0000000..59b334c --- /dev/null +++ b/alembic/versions/8865d4f4ee7f_initial_migration2.py @@ -0,0 +1,32 @@ +"""initial migration2 + +Revision ID: 8865d4f4ee7f +Revises: bbb6ce9ca16b +Create Date: 2025-06-08 21:56:37.124252 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '8865d4f4ee7f' +down_revision: Union[str, None] = 'bbb6ce9ca16b' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemonycone_asn_id_idx ON ihr_hegemonycone (asn_id);') + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.execute('DROP INDEX IF EXISTS ihr_hegemonycone_asn_id_idx;') + # ### end Alembic commands ### diff --git a/alembic/versions/083c114aab2c_initial_migration.py b/alembic/versions/bbb6ce9ca16b_initial_migration.py similarity index 93% rename from alembic/versions/083c114aab2c_initial_migration.py rename to alembic/versions/bbb6ce9ca16b_initial_migration.py index 7906fc5..97bd12f 100644 --- a/alembic/versions/083c114aab2c_initial_migration.py +++ b/alembic/versions/bbb6ce9ca16b_initial_migration.py @@ -1,8 +1,8 @@ """initial migration -Revision ID: 083c114aab2c +Revision ID: bbb6ce9ca16b Revises: -Create Date: 2025-06-08 20:40:31.500471 +Create Date: 2025-06-08 21:55:20.768018 """ from typing import Sequence, Union @@ -12,7 +12,7 @@ # revision identifiers, used by Alembic. -revision: str = '083c114aab2c' +revision: str = 'bbb6ce9ca16b' down_revision: Union[str, None] = None branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None @@ -34,6 +34,7 @@ def upgrade() -> None: sa.Column('name', sa.String(length=255), nullable=False), sa.Column('tartiflette', sa.Boolean(), nullable=False), sa.Column('disco', sa.Boolean(), nullable=False), + sa.Column('language', sa.String(length=50), server_default=sa.text("'AR'"), nullable=False), sa.PrimaryKeyConstraint('code') ) op.create_table('ihr_hegemonycone', diff --git a/models/country_model.py b/models/country_model.py index 791e478..d076fa8 100644 --- a/models/country_model.py +++ b/models/country_model.py @@ -9,5 +9,5 @@ class Country(Base): name = Column(String(255), nullable=False) tartiflette = Column(Boolean, default=False, nullable=False) disco = Column(Boolean, default=False, nullable=False) - #continent = Column(String(50), nullable=True) - #language = Column(String(50), nullable=False, server_default=text("'AR'")) + continent = Column(String(50), nullable=True) + language = Column(String(50), nullable=False, server_default=text("'AR'")) diff --git a/models/hegemonycone.py b/models/hegemonycone.py index d104cde..a3e6045 100644 --- a/models/hegemonycone.py +++ b/models/hegemonycone.py @@ -14,6 +14,9 @@ class HegemonyCone(Base): { 'name': 'ihr_hegemonycone_asn_id_timebin_idx', 'columns': ['asn_id', 'timebin DESC'] + },{ + 'name': 'ihr_hegemonycone_asn_id_idx', + 'columns': ['asn_id'] } ] @@ -38,3 +41,4 @@ class HegemonyCone(Base): nullable=False, doc='Autonomous System Number (ASN).') asn_relation = relationship('ASN', back_populates='hegemony_cones') + From 63c6680a906a13d89659d15bffb48517e74e1bd1 Mon Sep 17 00:00:00 2001 From: ibraam Date: Thu, 12 Jun 2025 13:35:25 +0300 Subject: [PATCH 04/43] Cleaned the files and added comments --- alembic.ini | 5 +- alembic/env.py | 68 ++++++++++++++----- .../650fd402b9d5_initial_migration3.py | 32 --------- .../8865d4f4ee7f_initial_migration2.py | 32 --------- .../bbb6ce9ca16b_initial_migration.py | 65 ------------------ config/database.py | 10 ++- controllers/country_controller.py | 44 ++++++++---- dtos/generic_response_dto.py | 28 ++++++++ dtos/pagination_dto.py | 7 -- globals.py | 2 + main.py | 1 + models/country_model.py | 2 - models/hegemonycone.py | 14 ++-- repositories/country_repository.py | 35 ++++++++-- scripts/apply_migrations.sh | 39 ----------- scripts/setup_database.sh | 47 ------------- services/country_service.py | 24 +++++-- utils/pagination.py | 54 --------------- 18 files changed, 178 insertions(+), 331 deletions(-) delete mode 100644 alembic/versions/650fd402b9d5_initial_migration3.py delete mode 100644 alembic/versions/8865d4f4ee7f_initial_migration2.py delete mode 100644 alembic/versions/bbb6ce9ca16b_initial_migration.py create mode 100644 dtos/generic_response_dto.py delete mode 100644 dtos/pagination_dto.py create mode 100644 globals.py delete mode 100755 scripts/apply_migrations.sh delete mode 100755 scripts/setup_database.sh delete mode 100644 utils/pagination.py diff --git a/alembic.ini b/alembic.ini index 84adbe4..d31b693 100644 --- a/alembic.ini +++ b/alembic.ini @@ -64,9 +64,8 @@ version_path_separator = os # are written from script.py.mako # output_encoding = utf-8 -sqlalchemy.url = postgresql://postgres:123password456@localhost:5435/ihr-fastapi -# user:django , password:123password456,database:ihr -#user:django-timescaledb +# placeholder (will be replaced dynamically in /alembic/env.py file to reflect the database url found in .env file) +sqlalchemy.url = driver://user:pass@localhost/dbname [post_write_hooks] # post_write_hooks defines scripts or Python functions that are run diff --git a/alembic/env.py b/alembic/env.py index 7331ed3..ac65105 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -9,21 +9,32 @@ from alembic.operations import ops from alembic.operations.ops import CreateIndexOp from sqlalchemy import inspect, text +from dotenv import load_dotenv + # Get Alembic config config = context.config +# Load environment variables from .env file +load_dotenv() +# Override Alembic DB URL (found in alembic.ini) with the DB URL found in the .env file +DATABASE_URL = os.getenv("DATABASE_URL") +if DATABASE_URL: + config.set_main_option("sqlalchemy.url", DATABASE_URL) + # Setup logging if config.config_file_name is not None: fileConfig(config.config_file_name) -# Modify the model discovery section +# --- DYNAMIC MODEL DISCOVERY --- + +# Add models directory to the path models_path = pathlib.Path(__file__).parent.parent / "models" sys.path.append(str(models_path.parent)) model_classes = [] -# First pass: Load all models +# Load all model classes inheriting from Base (excluding __init__.py) for file in models_path.glob("*.py"): if file.name != "__init__.py": module_name = f"models.{file.stem}" @@ -33,10 +44,11 @@ if isinstance(attr, type) and issubclass(attr, Base) and attr != Base: model_classes.append(attr) -# Set target metadata + +# Set Alembic's metadata target for autogeneration target_metadata = Base.metadata -# Second pass: Associate metadata (hypertable and indexes) with table objects +# Attach custom metadata (hypertable and indexes) to the tables for model in model_classes: table = target_metadata.tables.get(model.__tablename__) if table is not None: @@ -49,38 +61,47 @@ setattr(table, '__indexes__', model.__indexes__) +# --- MIGRATION BEHAVIOR CUSTOMIZATION --- + + def include_object(object, name, type_, reflected, compare_to): - # Prevent dropping tables and indexes + # Prevent dropping tables and indexes that are found in database and aren't found in the models if reflected and compare_to is None: if type_ in ("table", "index"): - return False # Don't drop it + return False return True def process_revision_directives(context, revision, directives): - """Called during 'alembic revision --autogenerate'""" + """ + Hook to modify the autogenerated alembic migration file before it's generated. + Injects hypertable and index creation SQL. + """ if directives[0].upgrade_ops is not None: - # Process create table operations and inject hypertable commands process_ops( context, directives[0].upgrade_ops, directives[0].downgrade_ops) +# --- HELPERS FOR HYPERTABLES AND INDEXES --- + def create_hypertable_ops(table_name, hypertable_meta, is_existing=False): - """Generate hypertable creation operations.""" + """ + Generate SQL operations for TimescaleDB hypertable creation and compression. + """ upgrade_ops = [] downgrade_ops = [] time_col = hypertable_meta['time_column'] chunk_interval = hypertable_meta.get('chunk_time_interval', '1 day') - # Create hypertable with migrate_data for existing tables + # Create hypertable SQL hypertable_sql = ( f"SELECT create_hypertable('{table_name}', by_range('{time_col}', INTERVAL '{chunk_interval}'));" ) upgrade_ops.append(ops.ExecuteSQLOp(hypertable_sql)) - # Handle compression + # Handle compression settings if hypertable_meta.get('compress', False): segment_by = hypertable_meta.get('compress_segmentby', '') order_by = hypertable_meta.get('compress_orderby', time_col) @@ -116,7 +137,9 @@ def create_hypertable_ops(table_name, hypertable_meta, is_existing=False): def create_index_ops(table_name, indexes_meta): - """Generate index creation operations.""" + """ + Generate SQL operations for index creation and deletion. + """ upgrade_ops = [] downgrade_ops = [] @@ -132,7 +155,10 @@ def create_index_ops(table_name, indexes_meta): def check_index_exists(context, table_name, index_name): - """Check if index exists using MigrationContext.""" + """ + Query PostgreSQL system tables to determine if an index already exists. + This prevents Alembic from generating duplicate index statements. + """ print(index_name) # In offline mode, assume index doesn't exist if not hasattr(context, 'bind'): @@ -157,7 +183,9 @@ def check_index_exists(context, table_name, index_name): def process_ops(context, upgrade_ops, downgrade_ops): - """Process upgrade and downgrade operations.""" + """ + Inject hypertable and index-related SQL into Alembic upgrade/downgrade steps. + """ final_upgrade_ops = [] new_downgrade_ops = [] @@ -202,8 +230,12 @@ def process_ops(context, upgrade_ops, downgrade_ops): downgrade_ops.ops = new_downgrade_ops + downgrade_ops.ops +# --- OFFLINE/ONLINE MIGRATION EXECUTION --- + def run_migrations_offline() -> None: - """Run migrations in 'offline' mode.""" + """ + Run migrations without connecting to a database. + """ url = config.get_main_option("sqlalchemy.url") context.configure( url=url, @@ -219,7 +251,9 @@ def run_migrations_offline() -> None: def run_migrations_online() -> None: - """Run migrations in 'online' mode.""" + """ + Run migrations in online mode (connected to the database). + """ connectable = engine_from_config( config.get_section(config.config_ini_section, {}), prefix="sqlalchemy.", @@ -238,7 +272,7 @@ def run_migrations_online() -> None: with context.begin_transaction(): context.run_migrations() - +# Execute appropriate migration mode if context.is_offline_mode(): run_migrations_offline() else: diff --git a/alembic/versions/650fd402b9d5_initial_migration3.py b/alembic/versions/650fd402b9d5_initial_migration3.py deleted file mode 100644 index be2f157..0000000 --- a/alembic/versions/650fd402b9d5_initial_migration3.py +++ /dev/null @@ -1,32 +0,0 @@ -"""initial migration3 - -Revision ID: 650fd402b9d5 -Revises: 8865d4f4ee7f -Create Date: 2025-06-08 21:57:18.231591 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = '650fd402b9d5' -down_revision: Union[str, None] = '8865d4f4ee7f' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - """Upgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('ihr_country', sa.Column('continent', sa.String(length=50), nullable=True)) - # ### end Alembic commands ### - - -def downgrade() -> None: - """Downgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('ihr_country', 'continent') - # ### end Alembic commands ### diff --git a/alembic/versions/8865d4f4ee7f_initial_migration2.py b/alembic/versions/8865d4f4ee7f_initial_migration2.py deleted file mode 100644 index 59b334c..0000000 --- a/alembic/versions/8865d4f4ee7f_initial_migration2.py +++ /dev/null @@ -1,32 +0,0 @@ -"""initial migration2 - -Revision ID: 8865d4f4ee7f -Revises: bbb6ce9ca16b -Create Date: 2025-06-08 21:56:37.124252 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = '8865d4f4ee7f' -down_revision: Union[str, None] = 'bbb6ce9ca16b' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - """Upgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemonycone_asn_id_idx ON ihr_hegemonycone (asn_id);') - # ### end Alembic commands ### - - -def downgrade() -> None: - """Downgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.execute('DROP INDEX IF EXISTS ihr_hegemonycone_asn_id_idx;') - # ### end Alembic commands ### diff --git a/alembic/versions/bbb6ce9ca16b_initial_migration.py b/alembic/versions/bbb6ce9ca16b_initial_migration.py deleted file mode 100644 index 97bd12f..0000000 --- a/alembic/versions/bbb6ce9ca16b_initial_migration.py +++ /dev/null @@ -1,65 +0,0 @@ -"""initial migration - -Revision ID: bbb6ce9ca16b -Revises: -Create Date: 2025-06-08 21:55:20.768018 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = 'bbb6ce9ca16b' -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - """Upgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('ihr_asn', - sa.Column('number', sa.BigInteger(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('tartiflette', sa.Boolean(), nullable=False), - sa.Column('disco', sa.Boolean(), nullable=False), - sa.Column('ashash', sa.Boolean(), nullable=False), - sa.PrimaryKeyConstraint('number') - ) - op.create_table('ihr_country', - sa.Column('code', sa.String(length=4), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('tartiflette', sa.Boolean(), nullable=False), - sa.Column('disco', sa.Boolean(), nullable=False), - sa.Column('language', sa.String(length=50), server_default=sa.text("'AR'"), nullable=False), - sa.PrimaryKeyConstraint('code') - ) - op.create_table('ihr_hegemonycone', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', sa.DateTime(), nullable=False), - sa.Column('conesize', sa.Integer(), nullable=False), - sa.Column('af', sa.Integer(), nullable=False), - sa.Column('asn_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_hegemonycone', by_range('timebin', INTERVAL '2 day'));") - op.execute("ALTER TABLE ihr_hegemonycone SET (timescaledb.compress, timescaledb.compress_segmentby = 'asn_id,af', timescaledb.compress_orderby = 'timebin');") - op.execute("SELECT add_compression_policy('ihr_hegemonycone', INTERVAL '7 days');") - op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemonycone_asn_id_timebin_idx ON ihr_hegemonycone (asn_id, timebin DESC);') - # ### end Alembic commands ### - - -def downgrade() -> None: - """Downgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.execute('ALTER TABLE ihr_hegemonycone SET (timescaledb.compress = false);') - op.execute("SELECT remove_compression_policy('ihr_hegemonycone', if_exists => TRUE);") - op.execute('DROP INDEX IF EXISTS ihr_hegemonycone_asn_id_timebin_idx;') - op.drop_table('ihr_hegemonycone') - op.drop_table('ihr_country') - op.drop_table('ihr_asn') - # ### end Alembic commands ### diff --git a/config/database.py b/config/database.py index 1a8365b..d1f4384 100644 --- a/config/database.py +++ b/config/database.py @@ -3,15 +3,19 @@ import os from dotenv import load_dotenv +# Load environment variables from .env file load_dotenv() -DATABASE_URL = "postgresql://ihr_bash_user:ihr_password@localhost:5434/ihr_bash" - +# Read the database URL from the environment variable +DATABASE_URL = os.getenv("DATABASE_URL") +# Create the SQLAlchemy engine with the database URL engine = create_engine(DATABASE_URL) +# Create a session factory SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) +# Declare a base class for ORM models Base = declarative_base() -# Dependency for FastAPI routes +# Dependency to get a DB session for FastAPI routes (used in controllers) def get_db(): db = SessionLocal() try: diff --git a/controllers/country_controller.py b/controllers/country_controller.py index 13747c8..2dbdeff 100644 --- a/controllers/country_controller.py +++ b/controllers/country_controller.py @@ -1,13 +1,13 @@ from fastapi import APIRouter, Depends, Query, Request from sqlalchemy.orm import Session from services.country_service import CountryService -from utils.pagination import PaginatedResponse, paginate_and_order +from dtos.generic_response_dto import GenericResponseDTO, build_url from dtos.country_dto import CountryDTO from config.database import get_db from typing import Optional -from dtos.pagination_dto import PaginationParams - +from globals import page_size +# Define a router for all endpoints under /countries router = APIRouter(prefix="/countries", tags=["Countries"]) @@ -15,17 +15,37 @@ class CountryController: service = CountryService() @staticmethod - @router.get("/", response_model=PaginatedResponse[CountryDTO]) + @router.get("/", response_model=GenericResponseDTO[CountryDTO]) def get_all_countries( request: Request, db: Session = Depends(get_db), - pagination: PaginationParams = Depends(), # Generic pagination params + page: Optional[int] = Query( + None, ge=1, description="A page number within the paginated result set"), code: Optional[str] = Query( - None, description="Filter by country code"), + None, description="Search by country code"), name: Optional[str] = Query( - None, description="Search by country name (substring)") - ): - """Retrieves all countries with optional filters.""" - countries = CountryController.service.get_all_countries( - db, code=code, name=name) - return paginate_and_order(countries, request, pagination.page, pagination.ordering) + None, description="Search for a substring in countries name"), + ordering: Optional[str] = Query( + None, description="Which field to use when ordering the results") + ) -> GenericResponseDTO[CountryDTO]: + """Retrieves paginated countries with optional filters.""" + + page = page or 1 + countries, total_count = CountryController.service.get_all_countries( + db, + code=code, + name=name, + page=page, + order_by=ordering + ) + + # Calculate next and previous pages + next_page = page + 1 if (page * page_size) < total_count else None + prev_page = page - 1 if page > 1 else None + + return GenericResponseDTO( + count=total_count, + next=build_url(request, next_page), + previous=build_url(request, prev_page), + results=countries + ) diff --git a/dtos/generic_response_dto.py b/dtos/generic_response_dto.py new file mode 100644 index 0000000..3070280 --- /dev/null +++ b/dtos/generic_response_dto.py @@ -0,0 +1,28 @@ +from typing import TypeVar, List, Optional, Generic +from fastapi import Request +from urllib.parse import urlencode, urlunparse +from pydantic import BaseModel + +T = TypeVar("T") + +# The generic response format returned by all endpoints +class GenericResponseDTO(BaseModel, Generic[T]): + count: int + next: Optional[str] + previous: Optional[str] + results: List[T] + +# Builds the url returned by "next" and "previous" fields in the GenericResponseDTO +def build_url(request: Request, page: Optional[int]) -> Optional[str]: + if page is None: + return None + query_params = dict(request.query_params) + query_params["page"] = str(page) + return urlunparse(( + request.url.scheme, + request.url.netloc, + request.url.path, + "", + urlencode(query_params), + "" + )) diff --git a/dtos/pagination_dto.py b/dtos/pagination_dto.py deleted file mode 100644 index edbe4a4..0000000 --- a/dtos/pagination_dto.py +++ /dev/null @@ -1,7 +0,0 @@ -from typing import Optional -from fastapi import Query -from pydantic import BaseModel - -class PaginationParams(BaseModel): - page: int = Query(1, ge=1, description="Page number") - ordering: Optional[str] = Query(None, description="Field to order by") diff --git a/globals.py b/globals.py new file mode 100644 index 0000000..113770b --- /dev/null +++ b/globals.py @@ -0,0 +1,2 @@ +# page size represents the number of objects returned by "results" field of GenericResponseDTO +page_size = 5 \ No newline at end of file diff --git a/main.py b/main.py index 32ba25e..cb2edfd 100644 --- a/main.py +++ b/main.py @@ -3,6 +3,7 @@ from fastapi import FastAPI from controllers import __path__ as controllers_path # Adjusted for `ihr` structure +# The base URL of the app app = FastAPI(root_path="/ihr/api") # Automatically import and register all routers inside "ihr/controllers" diff --git a/models/country_model.py b/models/country_model.py index d076fa8..13a5f0f 100644 --- a/models/country_model.py +++ b/models/country_model.py @@ -9,5 +9,3 @@ class Country(Base): name = Column(String(255), nullable=False) tartiflette = Column(Boolean, default=False, nullable=False) disco = Column(Boolean, default=False, nullable=False) - continent = Column(String(50), nullable=True) - language = Column(String(50), nullable=False, server_default=text("'AR'")) diff --git a/models/hegemonycone.py b/models/hegemonycone.py index a3e6045..108f062 100644 --- a/models/hegemonycone.py +++ b/models/hegemonycone.py @@ -1,6 +1,7 @@ from sqlalchemy import Column, BigInteger, Integer, DateTime, ForeignKey, PrimaryKeyConstraint from sqlalchemy.orm import relationship from config.database import Base +from sqlalchemy.dialects.postgresql import TIMESTAMP class HegemonyCone(Base): @@ -14,10 +15,7 @@ class HegemonyCone(Base): { 'name': 'ihr_hegemonycone_asn_id_timebin_idx', 'columns': ['asn_id', 'timebin DESC'] - },{ - 'name': 'ihr_hegemonycone_asn_id_idx', - 'columns': ['asn_id'] - } + }, ] __hypertable__ = { @@ -31,14 +29,14 @@ class HegemonyCone(Base): } id = Column(BigInteger, autoincrement=True) - timebin = Column(DateTime, nullable=False, - doc='Timestamp of reported value.') + timebin = Column(TIMESTAMP(timezone=True), nullable=False, + doc='Timestamp with time zone.') + conesize = Column(Integer, default=0, nullable=False, doc="Number of dependent networks, namely, networks that are reached through the asn.") af = Column(Integer, default=0, nullable=False, doc='Address Family (IP version), values are either 4 or 6.') - asn_id = Column(BigInteger, ForeignKey('ihr_asn.number', ondelete='CASCADE'), + asn_id = Column(BigInteger, ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_ihr_hegemonycone_asn_id'), nullable=False, doc='Autonomous System Number (ASN).') asn_relation = relationship('ASN', back_populates='hegemony_cones') - diff --git a/repositories/country_repository.py b/repositories/country_repository.py index cdec9c7..b31e2e9 100644 --- a/repositories/country_repository.py +++ b/repositories/country_repository.py @@ -1,17 +1,42 @@ from sqlalchemy.orm import Session from models.country_model import Country -from typing import Optional, List +from typing import Optional, List, Tuple # Added Tuple for return type +from sqlalchemy import asc +from globals import page_size class CountryRepository: - def get_all(self, db: Session, code: Optional[str] = None, name: Optional[str] = None) -> List[Country]: - """Retrieves countries, optionally filtering by code and name substring.""" + def get_all( + self, + db: Session, + code: Optional[str] = None, + name: Optional[str] = None, + page: int = 1, # Page number, defaults to 1 + order_by: Optional[str] = None, # Column name to sort by + ) -> Tuple[List[Country], int]: # Returns list of countries and total count + """ + Retrieves countries with pagination and ordering at database level. + Returns: Tuple[List[Country], total_count] + """ + # Initialize base query query = db.query(Country) + # Apply filters if provided if code: query = query.filter(Country.code == code) - if name: query = query.filter(Country.name.ilike(f"%{name}%")) - return query.all() + #Executes getting total count of countries + total_count = query.count() + + # Apply ordering if specified + if order_by and hasattr(Country, order_by): + query = query.order_by(asc(getattr(Country, order_by))) + + # Calculate offset based on page number and size + offset = (page - 1) * page_size + # Apply pagination and execute query + results = query.offset(offset).limit(page_size).all() + + return results, total_count diff --git a/scripts/apply_migrations.sh b/scripts/apply_migrations.sh deleted file mode 100755 index 2a7af6e..0000000 --- a/scripts/apply_migrations.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/bin/bash - -set -e # Exit immediately if a command fails - -# Get the script's directory -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -cd "$SCRIPT_DIR/.." || { echo "Error: Project directory not found!"; exit 1; } - -VERSIONS_DIR="alembic/versions" - -# Check if the versions directory exists and contains migration files -if [ ! -d "$VERSIONS_DIR" ] || ! find "$VERSIONS_DIR" -mindepth 1 | read; then - echo "⚠️ No migration files found. Initializing Alembic migration..." - alembic revision --autogenerate -m "Initial migration" - alembic upgrade head - echo "Database initialized with first migration." - exit 0 -fi - -echo "Checking for model changes..." -MIGRATION_OUTPUT=$(alembic revision --autogenerate -m "Auto migration" 2>&1) - -# Check if Alembic detected changes -if echo "$MIGRATION_OUTPUT" | grep -q "No changes detected"; then - echo "No changes detected. Skipping migration." - exit 0 -fi - -# Extract the new migration file name -NEW_MIGRATION_FILE=$(echo "$MIGRATION_OUTPUT" | grep -oE "alembic/versions/[0-9a-f]+_.*\.py" | tail -n 1) - -if [ -z "$NEW_MIGRATION_FILE" ]; then - echo "No valid migration file found after autogenerate. Skipping upgrade." - exit 1 -fi - -echo "Applying migration: $NEW_MIGRATION_FILE" -alembic upgrade head -echo "Migration applied successfully." diff --git a/scripts/setup_database.sh b/scripts/setup_database.sh deleted file mode 100755 index 5fe2272..0000000 --- a/scripts/setup_database.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/bash - -# Load environment variables -DB_NAME=${DB_NAME:-"ihr_bash"} -DB_USER=${DB_USER:-"ihr_bash_user"} -DB_PASSWORD=${DB_PASSWORD:-"ihr_password"} -DB_HOST=${DB_HOST:-"localhost"} -DB_PORT=${DB_PORT:-5434} -ADMIN_USER=${ADMIN_USER:-"django"} # Superuser for setup - -echo "Setting up PostgreSQL database..." - -# Ensure PostgreSQL service is running -if ! pg_isready -h "$DB_HOST" -p "$DB_PORT" -U "$ADMIN_USER" > /dev/null 2>&1; then - echo "Error: PostgreSQL is not running on $DB_HOST:$DB_PORT" - exit 1 -fi - -# Check if the database exists -DB_EXISTS=$(psql -h "$DB_HOST" -p "$DB_PORT" -U "$ADMIN_USER" -d postgres -tAc "SELECT 1 FROM pg_database WHERE datname='$DB_NAME'") -if [ "$DB_EXISTS" != "1" ]; then - echo "Creating database $DB_NAME..." - psql -h "$DB_HOST" -p "$DB_PORT" -U "$ADMIN_USER" -d postgres -c "CREATE DATABASE \"$DB_NAME\";" \ - && echo "Database $DB_NAME created." -else - echo "Database $DB_NAME already exists." -fi - -# Check if the user exists -USER_EXISTS=$(psql -h "$DB_HOST" -p "$DB_PORT" -U "$ADMIN_USER" -d postgres -tAc "SELECT 1 FROM pg_roles WHERE rolname='$DB_USER'") -if [ "$USER_EXISTS" != "1" ]; then - echo "Creating user $DB_USER..." - psql -h "$DB_HOST" -p "$DB_PORT" -U "$ADMIN_USER" -d postgres -c "CREATE USER \"$DB_USER\" WITH PASSWORD '$DB_PASSWORD';" \ - && echo "User $DB_USER created." -else - echo "User $DB_USER already exists." -fi - -# Grant privileges -echo "Setting permissions..." -psql -h "$DB_HOST" -p "$DB_PORT" -U "$ADMIN_USER" -d postgres -c "GRANT ALL PRIVILEGES ON DATABASE \"$DB_NAME\" TO \"$DB_USER\";" -psql -h "$DB_HOST" -p "$DB_PORT" -U "$ADMIN_USER" -d postgres -c "ALTER DATABASE \"$DB_NAME\" OWNER TO \"$DB_USER\";" - -# Grant schema privileges inside the new database -psql -h "$DB_HOST" -p "$DB_PORT" -U "$ADMIN_USER" -d "$DB_NAME" -c "GRANT ALL ON SCHEMA public TO \"$DB_USER\";" - -echo "Database setup completed." diff --git a/services/country_service.py b/services/country_service.py index 1f693d8..f61b65a 100644 --- a/services/country_service.py +++ b/services/country_service.py @@ -1,14 +1,28 @@ from sqlalchemy.orm import Session from repositories.country_repository import CountryRepository from dtos.country_dto import CountryDTO -from typing import Optional, List +from typing import Optional, List, Tuple class CountryService: def __init__(self): self.repository = CountryRepository() - def get_all_countries(self, db: Session, code: Optional[str] = None, name: Optional[str] = None) -> List[CountryDTO]: - """Fetches all countries, applying filters if provided.""" - countries = self.repository.get_all(db, code, name) - return [CountryDTO(code=c.code, name=c.name) for c in countries] + def get_all_countries( + self, + db: Session, + code: Optional[str] = None, + name: Optional[str] = None, + page: int = 1, # Page number, defaults to 1 + order_by: Optional[str] = None # Column name to sort by + ) -> Tuple[List[CountryDTO], int]: + """Fetches paginated countries, applying filters if provided.""" + + countries, total_count = self.repository.get_all( + db, + code=code, + name=name, + page=page, + order_by=order_by + ) + return [CountryDTO(code=c.code, name=c.name) for c in countries], total_count diff --git a/utils/pagination.py b/utils/pagination.py deleted file mode 100644 index 4394fbf..0000000 --- a/utils/pagination.py +++ /dev/null @@ -1,54 +0,0 @@ -from typing import TypeVar, List, Optional, Callable, Generic, Any -from fastapi import Request -from urllib.parse import urlencode, urlunparse -from pydantic import BaseModel - -T = TypeVar("T") - -class PaginatedResponse(BaseModel, Generic[T]): - count: int - next: Optional[str] - previous: Optional[str] - results: List[T] - - -def build_url(request: Request, page: Optional[int]) -> Optional[str]: - if page is None: - return None - query_params = dict(request.query_params) - query_params["page"] = str(page) - return urlunparse(( - request.url.scheme, - request.url.netloc, - request.url.path, - "", - urlencode(query_params), - "" - )) - -def paginate_and_order( - items: List[Any], - request: Request, - page: int, - order_by: Optional[Callable[[Any], Any]] = None -) -> PaginatedResponse: - # If order_by is provided, but is not callable (i.e., it's a string), convert it to a callable - if order_by: - if not callable(order_by): - # Assume order_by is the attribute name to sort by. - order_field = order_by - order_by = lambda x: getattr(x, order_field) - items = sorted(items, key=order_by) - total_count = len(items) - page_size = 5 - offset = (page - 1) * page_size - paginated_items = items[offset : offset + page_size] - next_page = page + 1 if offset + page_size < total_count else None - prev_page = page - 1 if page > 1 else None - - return PaginatedResponse( - count=total_count, - next=build_url(request, next_page), - previous=build_url(request, prev_page), - results=paginated_items - ) From caa710068ce6bbd8bfea2836f6089588086eaf83 Mon Sep 17 00:00:00 2001 From: ibraam Date: Thu, 12 Jun 2025 14:34:57 +0300 Subject: [PATCH 05/43] Added documentation files --- LICENSE | 674 --------------------------- README.md | 74 ++- docs/images/project_architecture.png | Bin 0 -> 78972 bytes docs/project_architecture.md | 53 +++ docs/project_structure.md | 26 ++ 5 files changed, 132 insertions(+), 695 deletions(-) delete mode 100644 LICENSE create mode 100644 docs/images/project_architecture.png create mode 100644 docs/project_architecture.md create mode 100644 docs/project_structure.md diff --git a/LICENSE b/LICENSE deleted file mode 100644 index f288702..0000000 --- a/LICENSE +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - Copyright (C) - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. diff --git a/README.md b/README.md index 4bac52e..44a6efe 100644 --- a/README.md +++ b/README.md @@ -1,42 +1,67 @@ -# FastAPI Application +# IHR FastAPI App -This project is IHR FastAPI-based application. You can run it either in a **Python virtual environment** or using **Docker**. +This project is a FastAPI-based backend for the IHR system. It is designed for flexibility and can be run either in a **Python virtual environment** or using **Docker**. + +--- + +## Getting Started + +### 1. Clone the Repository + +### 2. Create a `.env` File + +In the project root directory, create a `.env` file to define environment-specific settings, including the database connection. + +Example `.env` content: + +```env +DATABASE_URL=postgresql://postgres:123password456@localhost:5435/ihr-fastapi +``` + +> Make sure PostgreSQL is running and the database exists before continuing. --- ## Running the Application -You can run this application in two ways: -1. **Using a Python Virtual Environment** -2. **Using Docker** +You can run this application in one of the following ways: + +* Using a Python virtual environment +* Using Docker --- -## 1. Running in a Virtual Environment +## Option 1: Run in a Python Virtual Environment + +### Step 1: Create and Activate a Virtual Environment + +#### On Windows: -### **1️⃣ Create and Activate a Virtual Environment** -#### On Windows (Command Prompt or PowerShell): ```sh -python3 -m venv venv +python -m venv venv venv\Scripts\activate ``` + #### On macOS/Linux: + ```sh python3 -m venv venv source venv/bin/activate ``` -### **2️⃣ Install Dependencies** +### Step 2: Install Dependencies + ```sh pip install -r requirements.txt ``` -### **3️⃣ Run the FastAPI Application** +### Step 3: Run the Application + ```sh uvicorn main:app --host 0.0.0.0 --port 8000 --reload ``` -### **4️⃣ Access the API** +### **Step 4: Access the API** Once running, you can access: - API: **[http://localhost:8000/ihr/api](http://localhost:8000/ihr/api)** - Interactive Docs (Swagger UI): **[http://localhost:8000/ihr/api/docs](http://localhost:8000/docs)** @@ -44,30 +69,37 @@ Once running, you can access: --- -## 🐳 2. Running with Docker +## Option 2: Run with Docker + +### Step 1: Build the Docker Image -### **1️⃣ Build the Docker Image** ```sh docker build -t ihr-fastapi . ``` -### **2️⃣ Run the Container** +### Step 2: Run the Docker Container + ```sh -docker run -p 8000:8000 ihr-fastapi +docker run -p 8000:8000 --env-file .env ihr-fastapi ``` -### **3️⃣ start the Container** +### (Optional) Step 3: Manage the Container + +Start an existing container: + ```sh docker start -# Attach to the logs +``` + +View logs: + +```sh docker logs -f ``` +### Step 4: Access the API -### **3️⃣ Access the API** Once running, you can access: - API: **[http://localhost:8000/ihr/api](http://localhost:8000/ihr/api)** - Interactive Docs (Swagger UI): **[http://localhost:8000/ihr/api/docs](http://localhost:8000/docs)** - Redoc Docs: **[http://localhost:8000/ihr/api/redoc](http://localhost:8000/redoc)** - - diff --git a/docs/images/project_architecture.png b/docs/images/project_architecture.png new file mode 100644 index 0000000000000000000000000000000000000000..5b36117c0b94a109c5a2f12a9cdc7dba4a2d03da GIT binary patch literal 78972 zcma&Oby$>J+de#uj8Y~Df(jO>AfO;2ttg;^fW$})AV^3zNP{g17>G1V*U&k{Af-ch zmrA#Ef7gxf_ubFl@9#UFKQ&o*yuluFK9qGMPOjIZoYVS>%>vvHo%m507 zW~SH;f79%m`UCzTv$-pM4TY~eHikl-K;684RoPK%vX`>ZXK{ObJJ7Y3ne*1?8wt@F z47b#kKggU?zn(#RKKj}XnI^U;@#jp`G-RI8DZZgP|B}B;g|xlxvMtuW>U+tiE>v&0 zhL6{8dCJ~=Bs6HkNqc5_lc}y|!U>n^;QM&be&)+eXo?dasQ>%Nb(Ir)$X4Q=ni&7@ ze@8xe#{(t8O-_D50!8+J{?I)kffCytZ~OB9@g1-RnWNUO|Mw=!B|K0rGzw#f{_pjX zFGkN6?*5+*uuyF*wriU!!^6q0Gp}R{{2ZuSa6aRw-{KJO=CwTMR!^Ec)hWPS<^4eO z({M=Z{6XUq%T2@K&}ad_8ehJ=)MVc)mL)YcjotiF?UDxDj3Zy)luaF)CbVl0%rj2W zZn(sY73vwZV4c>bGW{%FZ&+_wy7l$=M*AgChwvKKNorAj3T<}2Fq_wzZV{cDxu6xt zSR^G`#MSJ)dHkh(vCAsYy8N4|gF6Q%;{=<(>HaF{1aiU>pD8w0MHeSQ3S^_H&W;>;cHd15@{!g>2G ziU)^jjn@_Sd8U3-5O?0RwBiWg=CCp)_to@3ZM-?!vQ+PC6ac0}DjYUC&+jyU2<^@2cz;h#J|IOTB25=XUM55}D@A zoR3%P7^ZeHl!1orM74NOGKC{c;TP9|{WNq>*3!cJ^h&QjTkxXvtiO?gyO7D>T-{W2 z?gwww37P2dLCq+Ark7R*^*3ifo(?mK9^q-aw8tW_(d6=Mc0}LD)2qve9Z&LDt}pP+ zXXB(CqRB1Z1@-f0niKwx*~i>9Pu4M2A<+Llms=3W-K3?Lt8LpAsor+pl!Q%86$6IZ%uO9yE zOFTRxFv?um7~A$_HNBbQb@Q(t8xP~c3|!Y1(rxz1ixanUG2Wo%LQLfSB#QU10(yaD~%Fk_p$YYNPGrQoGIBh4-*2a^};Fli0%bTwg#tjSa{XZ7NcxEy~&Xv*UZEvmj9>!&RVW_+G{6q~%ZgD67ovRYgq=5}gG{txqE%!M;Y!+MY zv5}h?I)3vs%KePfq&M=oX~d%zh4*v&oRUJWjy1(hL6psHmKPWGG%RoU9f|**zdTRV zIxaLyn=OnsJ`A&+>}Z_M9|^zUa0?|?Z9Dnm_wh~l5~iANb7yP2tycURuARUShd&w) zdMSKT$`^j&A1U?vO$qPJeT+P3QgUy;V^afD&1?}STe*;)PK$U$9(sFoIpX1snY~B( z^KSYa7j9GD6aMVaGC{Nr*KiRozg`mL@*U^CBT-Xr_192}a>CsYkZx-1J`gpk?|*yC zx-?t1pK#Z{m*C(KGi9_buhWjl@mmf^bZ0$$hjZUtY%6wk=1`75Ao6E-12&%B=7id! zDn*XF(J$6lJM{c&7vd?2hQU6^f8XUtVF$e3L|d)gHFw$FG)Me9XmuBUf4KbSz;eHP z?$+jd_he`0e#WbfpKnYhXVzuBlNam6{H*@KX(D~x?)^H^{tM;VdtiLuFMsbih9ovV z{imV}4U*>J1!e@TBB%5#_q-Mpy=e=Y6;f%7v~0H`9KTZvkA&JBGG(J-*fAZ?7TSh> zb5@4Qp8RGxE$@tRS8W4YP6}dNpUHE^(;gD9s4s)JiLDImO2QFxbiszaMIa@?2zr)1onh8!K~MX~~fpq?D|O&1_>lo4n5E*{-uUm~bAAz0ZGrGhNbD z^{p5CvlGjoXvydMvaNnp=KHc^@uu@5_4UeecELj~7dy?L9ioKiwSX$6*s_8e`A%LC}S|XkUTt*mtg$_@Qs_s-_`u62@Xf zX1`ILTmS762ljYIxUGFlR7v`Qzfg@-9~yuVuI4;nE6_at%GfFutVkUmBuY5%$Wm<0 zHiMR9DYN4MnSh0U$?{ZpJH(TtWl?J-CPmEe7b1nxwpS~ui4$LBC)!ddiR;^RJlB4g z+D+ue>(jyx4pGxaEzU^JxtDM^=G386D3*h?N0D8=!#a>s8>%*z zx?i&=$2KdH)iN~=mLuG^9;T>gXIy=}TMbDXx*O?C(OUaS@0;@%=*_Dm#7mrAFI)75 zGR>xY#cBWkHr+FkcIL6NLEoL)d)?r646jT7eW#JK1()hQ;`u!FD8V@V@ zF9sQgTTLX7jzAD^Y_+Q&ox$g*MB10XIGE$U?KV$8E*pJetiqdt;N!0EloVb>AIqtk zXKCw6n=BB9?G49!b7(aOqY?vBZ#ZP@aQ>B$4oMF{v`XhlEo|Dm(xN19XEJ%C2`uq* zIX_~FUexh*qumxIW@jT@GB|MsmrD)HX8(9hoCg}OSLK^s#q3<8{9&kI0-w1)Q_@dG zf8pbx2i)LDMttFZ>!417vbxOsyNc>Zp(L2!A_r2r;pM4lH!aquwtJqC<{hV=(B4Ul3wI(xd6FC*F8oe% zr`|ZBhEdkCTw543cUseml6p-O2YFHQ6hD$c4{1m&_Jfs9uzk8wVqUb^RvvyJN_yvG z#5i;*nPaOpRxK#XSBSmrV3K_S{iRmjSmT$fm8=(0slwBhua6qWg<9WwHh*V$FP%rb zSE0>Fom_}P%&h>;pwHGrB-Z(sprgN&>rIX{qsCJc9qCaa{oteTmTa78*-#N#+WF3A1}ZUTEx0AX%!84ZA?EYkbBvY6dP@J)@4aPlR(y zSw@Y@vG=EcU)q^uB2?P<130k!MeZgo@oz*|f4*a^dx~;-cHI=oM`v<8jO*`&3SLrL z{C3E>(ues^I-od*O8s?8{fP?MiqqnFev<{n2WsIH0j7$tXDMmfKJvu2+NNu|hB|(K z*(vtMeQUL<&uN0w;p)KS{bFerC7W%n*6n*_aR(GEMU|;(h#UkBwZ2d2S)L}ZS@m~P zRnQW!BtxvTZaeOh9yq1ScwB)& z$!THqzD4nBY`60KM0;vPugiSx2S^n@_9Kt}9OrC;M3QRCWUoWN-OP;zQ7B5qPM-`% z?);Euq*|uI^sa^UD&~+SPrlN)^1u8i#>1Y0XL{ZE>s zyK^nXCtVe1Ybc0kr}GVdB2nf#8iM~Pq&2p$WHdZxO>KK!IP>720`mHW$bnp*&cihO zXf8^3mFfrLhDiuZz?^rUaVVr?X2S7d&P~&Gg%R`-&8z@f4cY-&mI_7_>j4=hi%K z{*FT^&kpbzamea{DJhXQ(Q=-Km={n4m_C{xuC)|6F0D3B^<<_q^X$(httm&>nV$Si z4i;K=id0lK&*))WD0UpwwMpzHqo2Kd9L3frc{Q?)1FC0LO`4)#hUn?Idy+ZgBow69 zuZ)ELxYaT0^`tGDvL-QAG3wdkbZ>VZltun(dQ5@nZiikwY}XCXrkLA=;BhR@F5qZj zWeW9yf+z{S#;3bqb6zaNsgmw0MdGen>*sNJYi~|m3QHX@-KILso<}Tr%W#bkv!$Xg z6Z5!9xSnK-wJ+rP(UPU35h3Q<#F_nW@SEYTzakgGqtAKtjx=AuSL!<`3g#2gnn&>L zMxsi&>cA@wd>W+p{QZk0ny5c*l7uNa-PbT^*r}T57S%d2xJ*)N@WxGpHjG%cSY-0!M0|Zjb3~&6=9Z z?Vlb`bSAT}y-{k6$!NW%-wL*+^6Z9pJ$t0M_0O-88GQj)`!fyYO?nHmg;n#GY6bf9 zesP+#q6u6V^T2>=wz@SI63ct#4RTs zGn-a%Y+Ihg^U(2ve(P#Zu4>Jg`{)q#LX@PV0K2C)no?b>worimzOc zy!yDO!Q}r09 zD`WhicFg5WQr6T_x|k23hIv1c#&oI~fQGCS>7?rOyg0IPVyr|FPhkMr_I$lKA?`hE zYNWl6+F{Iyu-*83)l)b0@4wH-SE%xKd+nO_T<3U^Vrs3w%%KyesB}PczQv{0*6M1E z9=>?ZSm{82QVD)?4H~R`vpUgb&AV^$lLfi!MMOgg4Ozc z5uW;Bf%8=cRF+3F{o7Wr46WvCcoNBdB+Q{llF2*cSH18|VHwxCbC0!cW-H^S^iys8 z8_mWUS7$~31m$vV35JLr!``-Jp`UVRns5 z6wdBz_KK0El7ET&_7=bM(qvZL`$n+`Uk?zLx=e6s9U7s^45Q$6)Q{N@q|nSQWHgt3 zOHlSPuHDlf-R>1lHNM9%_4$N!sYUJW(uIMp$-yPXZ^toTxQiB=%0gQsyk_J@7hCQV zpcxzEO6uor`zm8{go?Hn3&1P(&o|`<3tD}yj@+Mo zFcr>_`l|PoL4}}ws=Cu39LlqX%66)&b+|TUq)&T{D;FGZrDi-s6wb|G2d!>9^;)3M zQcW{)kNp1xaLV{&ue|cqLvMFZkRD*w0tc*lN>3|%JEUKqwBcj?!bDqNqaUh~X+unF z_KC5i)Z%gUI(x$ByX>DxErnZa6XxB>{{730oH->Dbqc+@6Ir*3yY}Zry2pFi7`(SV zcs1r8z|Ynl=ImFZQ29KSyv9`K9A8s(9H0c!Y$JHBsgv_7OK4HYjz6}@LzUbjd*~Z% zqSO2(w_qUhuDI4kO$NDz`|6z=q=I(Lf!@QB^qQntKrOf&S=gh*1s{vg<`vLNjIh0} zu;oBD?omdvSKLbO~@9YX`~;>RDgKHW!qz+ghx8@8|HS{M>t{@nR}e zGRy!b$UCc3$0o}9_npRsGqjesKi%8vg^i$U(6(z;sgq=o$)ZTPwRNrxH{@xXekM<~ zIGInrw>C0XqJ&E#{*3>UQa)4I9qY`>oh7N zZzSjq0!AF9JmBH?;^k{7hmb(?O2V?zE={9j4b!!BzHOI;cN4xmYUsHK> z978;8)IP(`8g+s|8irfhYN3l)ZF>Y5EAKJM_`O~;G773i=mBdA^Mc0`*_sHHj{Ow} zX!Ep}6kvwEE80$Dtj))F6)1eBacS*r?|tfY=SQnxzY?EzeJ`V3Um!&eLts9mBQO+IisR#)Qc1Du(LKTT2R$VDV;Si&%ZGK^(CJh; z)i)T|gKtlDWy>*~u|28t>*u%Wd<8k`y7BK`T=eHC@IuR^GtJ-AjMcN0JBPctiek3{ z^zj6;pD8p815JYJ=TsS}zO?G}18|!(m`W_-q8eSYPPH$Z7V|I37%z$)C=BKjA?MdG znvl%r(lNRNIFw~>oXMSEjTkD-=wC_N|09}RE>3un&PGwK013wBB2_+2i22+DMzvho z6hX3g4y-h&wa=Hzk^_ZTA2uerU>SpI$$t zWxKyg9>0CnWig7z71PuDyo>T|sJ|)!S7Oq`?iYnB3hZU+(AFVp`I(rnNgU0yOsq__ z7E1hvS6?E9w_`A^h3$oH$!b5Q$s5wq6S*8KDKzf-NzPS`TZ6&Vmm4a`aU1?c88b!d z>jioeovY9h+ZUwLavn>Xk8AgAS3aucz*`f{SB+a~IF6zk-V@&cLfhrf382tc3=QiT zR+0Fu@HY3GsRup$*2i>F$k~`;D2P>Ufqgr;&s`;n7$xO-CBEvYPP%w(P655n-t2n` z=$h(4ZU<^FUsaEV7xGk!lJ-a9E z`cythvARB>?|@$VXU2$%o))W@S4hTuHfmzLH3=tG!n4lHblYIKHV2*>F$;fJH0{|t zww}vSjuuF`IK4)>EQK-nK7oHLzX9f>*>UCEj`h5yo&;bE^Dva7E_95GD$An34}j@ozgDQ!Eqd}AwcJ+qu^(;(4Fbk&pFLyeAfcng zR+!OkrzmlCVj2!y(=yK z9S|i>45RBsmqTv{8#Twi<`52q*vT^ATuO$tk1JFVJ5$A_cW@{fEi%lD!%sGWE5Yqu z1g?az_JgX9@~gsnJbGeO1*(zmFknq;?GIJceIMW8pcKzkrpm!<~0O_u0qe< zJf1f3<>@qi-gLo)CQ1a0*L>qp^Wo32c}UP>vWj1mwjRobK4*4%fpVcg-iefgGE@lB zZ^aZuj-1+#;p?~$RmOb%%7I=Z>-h{C$xHx7IlNR^Hes!amzXM zY-3c+S@^vZbL0>{`>yEyn`<4=_Jn4wacyzJ5c-I0;A?3;mS+-l5rk4=67dZn(f{!{ z@Gs*OvJqmvYfDomV9z;#>K^cr4^yg_ML3Z%_|$yj-}Z$(4f4OA&tBB4q?rIZ0S9mh zzy0LB|8j?c+HRfGh4Xg^zepTY331`4069>q*g+0DsNw31E=m9E>pWWye!OB63-oU_ z0&zPJ2N?pPRF_~F3r|KIgcR~f+(H}R|+xY|2+P!!}kgu)9pqZN}*LX=QXG~ z@!#F@y^IGhd!Yui2Ue-OCWsdZp8w*O9e=zQke&MgEL9iKo0LRpzYpO^C@H5sfr@Y( z@j3XLTKt&2CnNx5sGZ3M<=Qv~EW`GC?$3$GH~?SLhCe?2qN@X5Y6|5S6y45=M-X+_0cRxA2nR=Y6vuHdo#ep1Q1&@qqHbGem?^JZhd-dIu^=XE;!xR{pDF1XRZaDp{3yg;E6K z{x6TQH3_Q$0W`&B^;axLt=Z(tK4uZei*`mI&%@LCJArd5&4lvm{mW%ugb{=JOePTu z2N;GI#M3@N zKs9{%I_&?rIe#>;F~vBD!3dGx2^I$7Q0OWc|6Mh>MD_^R;eYD~8tL!lnuqZL#Or%y z_=wV#-~}ee*~Ydwad;Y{Av~I^{k0?e{&nAHvWO75Q=kJ-EwFv4bcG2$OX(`ahyWRu z58zXNHGW0^j0InR^B2G`#H}~guXO<^zX?*J=@H_{!V1v$X4@O{@?8#_$5w_!x8YF= zs#GC8)>^o*D?%cnNrMdo7F?8msWwA<;5@gi@9Dzf1Vco9FZgoMtan3cM$P0C= zdw16F@P&H~uNb(eivg+|E?%&Cd5En|8T$PEFwsq%;5TS9?cu9!+y;WebDWzvjedM%c&(><6h%hmN@a|eGCd| zYry#AG~W@fB?WTN@Sg+eLu;Z+HPl7#6&@fQGN24yI2{Fk5wevhLasFQi2vfje@C5~ z{%}u!S%btc%5yJ-OxiAJKxmo*q<7>mslCfVJLxiMI)*?4e~ZQ^<}j5$jC!Y~eMKIeA~k_{A|uS2bRO^WioRPp zKfsa`D3XUU8p`+^+U$*Q`|3Z1?{~SNUF`Dw_?;oW_jWAf69c7>@jrdSo9hR2=EIkB z_V#J{Rb^~$%-2s(d%@m1A=DTE86hvMxwWy{`#o0Fc`1uWpHyyrM7m`e=uTa%)}xr? zb}forN90jkF9bW09m<#@fyH&}2m;0?958!}pwq@w@Tc=I>`CC0u~$!{J=B5q3C?p(I;ge6xj% zFv-*WJ$o$(RoGv5;0NMh8H#Y+{JJ*h{^PU}(OgH4Sx9TDcK_?_J7GfnW?flNjySN> z+|_>wk>LaVlXe{Cevw&^)L8rtr=yVffndn47vBy=6{gnhd%UwW;=BBhZ3O>wX|jIm zg|X%;gPMQSdguYBYBS_f$nUn{NLXmP?J5}=>HK*2-U|-XJvnw0 zs_5C`E4(!1i0M6s(-4fbk@@G!RFIgeAbbp0I#hG!b^ti+0=~t!+Gs&tc>PztKd6KUgc`IATmnEq$+|L4F!S?xeRNsC>pG$~oT%tRH zVv*D=_=iAEM$q$<;f8gqukf@W!t8Lf z;ggrWxeC6%zAt;lmk9r(ACosIFJ z*J-_$47zJ139niE(2Q@BBQll{kw+QMI*>_JL8X*8k0_lGsX%4Ael$IOkceY7j zYqDDN-qTUZF2=;FbyF)B*y4#wNC$c}vdjPB*`RV^ZOG8Stx@EZjnLR2fQov;^t=XS=$DbY_kB24@WvQ^Mk6Kl}s}^&dcUWT15p%E2~)CO+i;i zN4DY^LFem-Sj25OkHVi&de2`Ux%5Rn%V@CbnpyYC%FRBN6WA$AxSyug)SeAkZH0WV@>h zi2d;4okTQs0C#aV>(z~4YVMZrTp^LL-D&q9gRl6YTrNb8(Sk+EkRy@U^23qS|M}1o z_7vy5)jL~^L0l7OH`T=_yl)~z@7#+N;yTiQ025;mp#o9konr2X959TYJsa-$S#+sG zPqol-?!~g`+r5ypbdX&B15r`>MJL5i9$gVqFL5ghIH%}^a#4OThD0S93D^OpW783y zf9yb(9PyVVja>Z1`ck(=E@7rvn5}BgB#zY6cH=o5e7lg{_=6(nF5EF&-PAz z1UXyA<0qZYkUplrJm%?~$@{Ty0_wVp1{_;Lb=FlrQ5zjQkDlqQgXqYGzb5~-z83Hkb}0uj8h}Ww(2fa=%bCD)|xXX zsM!2Kbpc}9X;PjDh~Z<`hD)~Ak`^IV&~s@TnRVx6J5T3VQPG_{u$X>+9t4V{Cy1yR zI4({l>I72i?LaQoxI4#>3faaZt~RbmGd&B6teYB+c72799(vAvWDyM8wE?;HVfHA{ zy(Z9Ou%TG$X}_c+Ex02Bh{TCi{Ubcjz9YV6y*-)JZEhoZH+B_>|B#fjZYTh&3!oev z;e-;y{A34c2>TG#8N|fQBCvIHD|yT?B<7BIKe>6J+v6S{gYXl0-oQVIX_jh$1LZyx zfbD{;bisXVLy<+#@;TE4!htfkflMIng+nvu9Vvi=a36GKY`wzt*2h{l_g+S#rf2kF zXtuKAKD_)xX2U`6)Ey7*>Oe7pfAUHlpnn9%t$DCtI%gHK(jja^ z7yJI0+-wCLwF}H}nz!#f!=<@5<8;B;mhRxNp4Edp6WGY-{}XBFif3w`5`=oD~x zMA#8)&4F}xh(PNSC9wq14Mfe0XmwwMLKsm{Cai+pOyJF3fIr7o7LT`A6@buX=x6I= zzeTI&iETgetu$CPUk-wue${aY&5l}VZsuiSNCSfMQKU=R#q<1csUOK#G8CS~LzmTM z7P8y5s@trRwDQpHXTxmYCiv#vsy{c2rzWE^PdcBLgeuIL3+8FdMva3$RaRf8km)Bo z#s-8e8c@n#tCQ?D>o4vF-yI`G7lJ9`C*=TbnQD|~vGd!*tk+v$H|<4)$d7Bw=jnw6 zt&gFmx23MAixBVEZ%DE){JuBny!JblQ6g#sIs~L(_-c$*?B=$VMfyq*c|My8LAkVa zj8VXzXX9Xq$UU4v>giub^1DLy-$WNkYDT;(lE~%xo|*KBIt{M7Nq>;T+o>t~rad4Q zU7$1S`J6;$&Cwxh-XoZ#@X70F7g)zsPP11!yDJpzYfuPjaynqfq1$mIfLi1N0GiW} z%ml2yU&oRvyQO2yb~;>ma@_F^kd?#*_%~i+lMkQtRZP_q&22~krszx^2$+&Vh;MF6 zigX|F+@pCw%k$BUWBO$p8v7)vEjX6r)nzwYmVu?x2hCk#x2pc5CJu({D6vy_`FF4p zEF`+`;llrvmmk291zgMrk*#JNBnjE13g+IZC(~1qDYKYGo$_=D8TBgSxRMF00BgmE zOqjxc=NVDwU*FE+ULNDaT~_FIZ1rnLr?8&{denYIx@-K;utUjc2z1sp{Zp~etOPq?VZ8y8(9@68>D+$%Eyw`(7brn0L$Sv z-jcxYvSLX9Q9jPz#eUC#_}b`ctk~JUTANlu+gyo!>!C0EUPAV+r>{b=S=|fJ1XL1j zk+lB%`gXbu|BGn${?9dLeEPm6n@ioDJocE9!yDL(8jD{7g76Z*pxV**X5h{Nujz&Y z1>~dp`~V~dzI6M>8km>{c{07KiGI7qhAO9I_6s#4mJcB{o}+bx_=vBm zg_d(5qct_}I~A7Q0EEEipW1-{(ttq*9I;5@(=~Hv%#Q#Z{(_7Cx1@duH9fxKTaE(s zn5BTy)MTeke8v@MtZG}(c(tdhA4+m($9<0rXLQLaFTeA1c^3m|>peLoFkpnr8X(p`50#WF64fm5d|*Fw8GL?Bgz2xAp9 zw$!Ph)N2WFRb(TCTeRcDs6#o3Nx>6<)~0t$U7aV!GR=^}u~J1mBPvxmR$cSF!QBY7 zI#eB4k7-#9MX#)>E%w?Slwde%0Ps9`V3yH+gz^l2Y5a3}roSJsxk2<)*9<6c)1gTC zf>{EDu*;N^xW^!5ofVbpeXvhAjggb)_HGyZ4eB39kkot(0fY}mNdBWcU%52^-AHyd z7oq0$O|=nqI=(i!&KZlM#Shfal6t(+lZzAW4$oU{#5Wh-aU6`2$RB^n(T(ezAsJvr z9f80(beSoar+(M2*)DBXs1+-q7_ag8w)&dE+GWfcbz`ZQS zhUnRYZ!!?w9q9D{$6fBZWQ`=D|IO!~qg3&`3+P2vK%VyG{AsZ^IaV#Cvd*Z^R!Bt8 z0-#3qsDry?qsbhYCk7vEWkJS*3a=Kc{;!dG3o18kqVl(?q>7v1-*(NP@kqX>j^wc1 zh7po{8avL=#2@3Q%KT4CYSudHH%)wb>^0A>7NtI&cskYkG6=hET}MBD3dVX>CQ+x# zy6KNr6(lTeLLWcnwMkl9zCAx)jEL{>rt*1Gs6K8(A1lOxE`gb`oW57^k2XX`kc?Ae zoYOq<>1`D}Yg;Wf(V{Es8>X^Zqi_MrcpJOH0Ls)FTTIJg%^A>q)a2N%@ye(J&PRdX73!80 z$Ua=e)XB9gHp6)Xn-BTV|5=et&KhqbM6JtCFQSXBJuJ);CqzkjfJZrda z{Vch>kpRxYsTut{M%_g2tK{OtizB^EjN^}H(k<7et$h~OYHPN}UY8Q|d7G{{Y6Xe9 z#c%TH(T%MXPQ6c;ye&%Rda?QW0oO5ALq3?WtSX8q8p$@BwRhrb`BB=9*$NNRbJC;f z5s%`H-P%=BC{|d-vrZ5wky%%6Q;XMou#{KYEiL5h?~0{jyxi>hMvAs}i;FH?1D+t4*g0 z`b^kc3L#p}w@PS(UwiBazh0k+c3Gdw<+8b2>HsACF|J6WYo7PTL9SErfkanyF11@- zcU?DI&wNnP${|PD&138x$yF_0yDCe`24t4@GNg$$xfZ!CUVZ&c;uHmpE>(18J^Ya| z>%|a#`^g-y)K0;-Lg5%A`MxxnnAhJ0{jp~4GBgSWJeNUCUH9Q+BK@W}cd-$0tpc1b ziRBmd2VnGSi0ihP))dsFInY<+m93vgr54O2%9jAtmX#s#$p-?`L_nDpVnn7$(3NXA zcudN|0B{}qd$mGEZyG~h7T8TZOUY)qx*XJX-QrN8>@C;*HN`D6W1=HwM~zE}7Zg%i zMCot3cjn{b^)S9~2J~XJ?wI;WXku&|*@}ei3x&;26=_I!JFh#6PpF!^V*2df_2F1c z8JsdqN)#OSVEP)PBX)@v_GfDkgzj=*UXr&E?|gt=|L)CgifHg}FX|Kx>klJL6u_b3 zxUxaPo$N=+9_eEdV2%)yxV?7bIWWT+bgTW`*N}0CK=x65*Qzm8t92hT1l|;~v8yJJ zaw%T`MRUM3Wjiv$@NH*=0gd3-_1{c7Gh$M-iq`ze5dNfhSo*ezC-Lr4#(;8)I+uNQ zT5Xe(0c8~JsktS&+D>ZqgSQ;dgbSX&BYz^3)eF6=d|V(2oF+~}YSX!NjObyX=>GL* zD{~ML=^*Jp!!ESC_7%jq138bN7hi?$bL`yadpbeDil+c7a0nVoOu93l>p`!zNUBS? z?t5!y<k*=J8)p=aa5;nYP8})AJ(a!))OP zt^s4saRkiBAA$FiLwVm{ahQT822V5k{=AaJuY8YNb1i_eytW42+!&W`NSIQBa>Cn9 zfPu}gYNXtz#UJ#jugmr2oeRxKlF~iX5Pg%D;VVFw39p$&g`}iEbsJH}YxxgPWoKV@K#bb%Y(PoXsOdw z%iYNqjd=)Kg<}FK*(UAkpb2ulbR*^DEVyd{j}*Hh9UR1R#hC5^xK%)r!B^|}o5F=1 zrYGt36_IfR(3YQX!gcQZlZ8*wHF&jJhErUAw%n`&(~Hr?B}}(D61fj1`Vsf2!jT` zqf@GD9@P0RZ^WhD8#^mjqe{shPM_gD3EA$8lsW=br;1>z}4ho zifOYQrZFCtJ>TzlD`ICt7qQFd)QAk&zEuUAe-;o712_*)gyVzHmyrXCZUjKPHG|o% z_6aXi37X0h+V84AtzIbi?!vQ?~q~ShZG~B@&`F3PY4J>HOfPV-93@f3}|_J znYh&_D)!6S^}BD(Nr#BJ7PQ7IK1LO?+@aD#?DP9curKEEZHF3!emIQPyno{r-6j2@ zMrcE=Jpu4g1GPKCVZ?Sn3HYWC!0K0Fo!iih-t>f;qZ8oQ^Bn$f4I^$Mnez?h==4v3 zX}Up7m+ZE=_GYSF)#|Vy1j;F(dNQEO_{dYg*#K3tNbXV%k~2Wr4x)hP@4g&Cj?NKr zx#w%!$8DeqZOeM7wCv%=rIzu)WlwqIL75Ytpi!&i^odyuHC|hea+imz%K)kVJp?ES zew}eQ+06qgdtxcALIo+BHqKMX};M@TR zX!h?xw3_nV!+gS9Y)I>dtY9nodrw4)e?a+WHf_BNVuZ33j*PQwg(5}`k+PHDV&@Jx z0_ur74f62*iEbdO2F-)o4?vb};UZ?S1QQ8Wx?f z?xfpk!l~jBuc)&*M3@WS8MOzs^E#j?vJ)Kfmuq2g-4Gc#he>Y!N%A7pPI2NVwieGp zc|#KA`SWnY(|ka&f@oHLidNJhq=cZZCyZGW?y{-ZD3#)-{Y7+}KH6Ud&g3XeENUpp znIx^oBuuX(*t1fsY&vA}Lmtc3FYg~i0*XB(zk2gINx!(7UZ8dI_XMbYn!gHPR733o zNlsJrQ+A3wjnlcRQQ-r#5`d}N~m z?_@wUBS4c4hIm4rYzC4nV9?|RjblF$8;AzDTnf5-Q_a&;c1x3;4=oPB2$()8EApg= z#4tv=3P;Vcwa_fT1pjG3*E|XRXBLcBl9{*{z?YQgfm%iENuAlX9n_6Vn`A=obOkeOHx#I7)}W=$Zfa9a!OgWxbP62Wrg ztqlrzTV0Xf0T65gER*072I3pDkJkmFBiCW>@*yA;E=<)3qL0AAP~POfrA;hZ zfxm#M7@lA+4d5nI;2fsuRa;-R9UXxnPzJh?n4|asO=U+6+6(_Z?6e z;ZB9_QC@6Da9D`-Lv3z%mz)7{bKx`Ybxo?LNmNV~HYtOtgkm8xn%&5q6vLg2h8V<3 z_x6qAVz4Pft;sMS1ctRRDC2oqpO{20XL(UG#(+)s;{eZqiuE;y!YrMnzId3#e+e(k zkj+(qF$GD^jzp(Mpe-}Ox8qz7G#_=*Z)U$e9v|gvz3paEI41?*gvIb#A~PJ78Cnaa zrJrQ$K(muoPF%rct;8^xE*DRT-tRbJ?YsT$;?RSBL7TtYm4QQ>sWwt;ZaaC@HQZ znndLznLRIa)d@n8yInpob{-^Spu?ivpaY%Wy8@82{l`~*8&2NL91ZwMyKF@I_lu7z zRvZD~P2&b)C(r5EOf0De0!ht>0~V+j6Bz!wD>-$DlLKp;|~27GS>)#&z~ zUd||j0iDjL9d!E4a}Q%Pj-%=hV&eq2{jKlgt=iNKJ7Grb$Ds?#!3r~D@2}5+#~}(8 zGkBGNIW7_zJ5JfFoP6)na;~JdF^JgmHQYyGrdM`fA^FFrhs+S|OW?(*L+^}ly-<9_ zfIBj$m!w@%OsKvVA-~XhQuFk#K0iahOUTzNB7EzOnhUmz@c9miOruN624>Iq9nWZ> zCO`vstKr-W{97JiM>2NrWy;%s*R!@;iluU)ve!9kS`?Xg&HKftr6XpkjhQ*@fn{rz z+%Ni*)N+pFmfR{kDH_X$z_wFfEAY~ep=el79#aQZT`t=b62+)4)9*XJgyEse<4CwR zGqGStIvfGe_r6sSA2TA zBnnCUynz1XTJSpAa)j7?ZpQ$ly-<(s>>*1`(R}>hN~m z~!B$r=~TXSx|!#Dl{NC49I&j@fqiNKb-2$mDhM)5ph~yd8bwRM6QgMVw_eN z;Y&-Pdr9&?cLYD9^(~!frUkNh>@}%U9G_VnpBEg$&@HO*B3(=VY@UHd0ZFnoAN3#} z{T^r8>1Q=E?3*KYpF}KEVu>l#huME}eNKvw?hkqEwNTJFvZq(>AW}!K2R(3suOuD> za!33c@pKQ)QwY=7%A@{qh+==mG{u>04^ovZyk@0W(Hh=EK)_%F$WoJIKw{GYrud!I zLU;adxqjNj-n76{du2hU_z>TO$*50?t>aPU|lSQbQ66`BmnrMAM0ePCMBMTtMlZC@_EU*E`> zO<>@nR>p&y4CS0!g?R|?Wfzz21u?K7?FhTw=QF-Q$6m4`zBP(Qpm2^KpN~B2r`On9 zmHZSd4uhJ@s{@Hicq(UpkgYaS2P7a7;1kk5EZcn&uyl%wgK;TShw9U?t5XaL$L|y+ zsHAN|ytm^!feeB8A0MT^kLj{okK^ft);krPzUWHZb3xa%yQ+os<4sR_FcT^xN{eiE z?XR$2QuQ1%fL%cYQ?M1_8F5xV{Oj3Lko)?lph1daz5OBE=ojF28H*jqc61a2Uop(C z3yKJl(2B=()5+AwBNaIK-FsCbsh3A?g*G@7VRV>K?p^bqwHt^Zq(qcH_F@|%x@PSo z*GJ-e#A2;h^T#%`cs3Ws)cT7e79HcO(7CPieOqgI9*KfoSHJPBHTBYX$_+g!Bw#m9 z4~g*OE-J8e%&=#x(r|Hc|nDsvW}+O#F|3vFFukzHuhD=O?7{n8^x2u4Z1~;L=eI7JpkUz%%xG`3_FLmbHJ7ULaPRMLBjc|^b4XF*EXydIcs8XX*H+`9WzL-k zya=q z@;b}Yz5I@|Iv^@+@A+9LGB1neh5mK;;k(v(b@$txQ8hn*(&|ceo*xJ(YQ|+j zlMa(bVJpX|p*omF)K)9uug@#Rb^zUjYnz6|OV?j&X^RxKuuBPvi7Q42#W*`$VB9u_ z=o54bj=-M@t*(@0(O-;@EeIByNX#KP2t+X}1(iat-7Z?M+mkR?@M7cw)IG+{!DNV% z?dK_l*^gg6{6^HicOXZ|4^3G8?w_{%K4CoJT@pXW4{~W2ixfW|9NmeV6ww|Msai#v zo#z8i`+ph5$JO~O}1jB(}ZL! zh+_vSdLE60EkV`Z4V-z;DVN_|csXsu8uBU{dG$l@+NKp}H^Fvkt^`3XMyOD2by}Ky zDC)HE|Iu{a@l?M5o716V<=876WK<#)$~snLWS1F*NQ8!+y|PQg$oL=>WoC~(H1Ue{Ie0<^D%>hnR z0VOI!px*E0t`*cH6{B;!`SpEs0n*8*`Tg#wxb^en`hoE5e>0;542E8DJyT)1QwnY9 zYf$}CcP1T1W3mpuV$X$(Y+lyDwM^AVbV_I$&O>Kk%rso*U3OFUGEDeI;%2f-r`%-3B&~$uVAB7#gsJGq2 zw?64!$D1>Y3}-1XaSQ(czW2s5-|D~Y_&?>wp`Ml`Ds%o*T3hK2UW4$V9GtZtj%0=q~%0kJ}`atiJ%dNA(9dv721FK4VCzT`H z22inR#oBAd#%3RTpR3E|3pBM^09cBln-7~yr(Jh^_lXzYb6g7Q&Lt(jQC@r3L(pba z!qlfVz-CV(4&VO}6T5;>xdPCT&sR8`J-&Bl4_#b@2BMCh^F(`v>)1TyEug|xlYC)! zZ%Gi(1QA)W#%p&V$-O*y|ALRSu?(zZ;5#w2kNFV{vpWdj;ozkUuOHr%0o}||tD0L| z``dB<7FH7zgirxw+i)c-Y@z2XTcgR}sjaoar{5xGEkC~J2d*m1Ybd>&igxT{3j zA@aY!O5F4LDrX-91ZDn=b;0>#l~YH-G>aF!Oqm`ap5b@jsKd zA%Z=p&izR)clpv+S1ZTti^dswe~*TNB^U7{8fUyYyaT@xs0UYo>pk_F5|X`O?fTc~ z7r^*I;UJ&RYjV8ihmaOvf}fazBQ*~)pT9TT2RmawTFY- z*%IO7OQD=0JfBSKr&qSYEz6_w&GQ`PDtBLDF@d7-?d%b25#NQ9mCk+BT=Efa=bFU( zAW&3AtSBr_ukbAYZJ%wpm@(0i=eb3)J>&aiVk#wVa%GCys}qh!&DCGEp8lA|2w%%D zb{-j;6>Duw!vQ|=`@4@i?HYvtXy;07``XpIdCKj2Q@s7u8p$7&*1Ye29eYl?=lFtig^F$wsZG#oRvd1;oaD*z2P0&Jfw2GZ(Hk zI9#!=h;1MOH*px_2q!Yue|lw4Es8G*oY(y5a}YY)vM+gn&F0nyUF$34aOZW71ov7m z@^x@|sVUV0((bz2sq7d;S8UIM5<|4P)vXe;VJc<`dBJW&>m(u1XvO=CEa;U0i(LS0 zEW#?LCK?oymkJ9QeVyXqC__oO4w#o!butViYFwcGwg*+y4YP#@*vYWOLO>Z5|E0KJ zM+3c4BfQH5vY>^>%>W}_t$FAV5W{tbEM!Tj^+04|K{@aJj|WZJEOYfxjK!P5h{sSf z@ELy~Q8wuR5ux<+3KqwXYec^wp`O&D!np)FEgk!Mq0Y;F%>;1i0iLAMy%_S5>cpSg+X94y{{poFD1>s2i!P2Jz_N7gDlR7q zVnKgeLXCi=C6tawif~h4=X@A;kfObw0+W(T(0*RO_sIXU{37DTY(mIz1_oOn)cFFl zX(J3of&m304j6SUK|th{41?&8;Cs;E$8fc!fL*#1{Xg(CT z7FGKnDq6;XR^ixIC8|vqQPefm%LMA?DuAlyFnT~vIv!-yHAk8BclB;9j4J%mPudf0 z5e;d6V3xspauzt&dZ~HAv2Xpvjq~*ld(t}i64C?NT&tlZM%jr#*7FX@Bs5K6t3C6X zO7=WY0gOQL$KM-VzDRp=O#ty`t|mp&KI&P;7({km^1mQ&Ib9H81JWT){UGS>lC`>K z#~ODNc6m{`3#EEFB6b5QC^@st7M1Goo(GR){*FfC3*8Jwg}J-P63WW0Z%maF^GwVQ z_Et@84q$V?-X0IT)*Gnz=K}Sz2GBRzVBWrxmqc~OY#gR)Cq`7jio=Xc(k}AqCGJ-+ zy&XXOv8RtOmO_m<2zlV(cY#mbP@q^Ly}So7cZbdjr+q8kVfMO^sua+0)Z%FcSiTj& zD0M8adHsKB<^ply4pOou4Z^?>9XC`ohk2L?7$Ag033+^_j0m;xm! zIzL1HVQ5%aaG|K%jX^A+C*i{$2AES5IFB(Iw_3PUkG#C9NWK768l^>S2)IQOVSRLI zn8u*C7dsUK0$n@`5b6|l>dK&aaD?1>HtNW67c4)M{?v2Kg46Tc8vumoP>lZ>2KTgI ztjT{NGg&56JxoP20Vf<(&cYr*tgY!lBlJ~(=c;XLQ!tCrHkX0M!W_+h!q_fq(lKAz zGhRpF@wHi3+*|;ij{8)Pq8q zvmO+E4@Q;&a~%S@=|{{W?A})>oW8i;Xw!swO#X{vtkfm&AVB|Lg%&|qRTD!3cjVr- zc;1D+Fm{*%o0ItiyV?W5qS~?gUGcBdrU(9MJBdPA@WPqhX*)d8at#O`9zmYlBM6pu zB!6gc4x-JY+5^zU6ze?5oM1t!^F!5e)($B(>4-aFly=s$;dpi(I4Ela9M>uTxa3qUNf%$!89O8f#7o-}D=3KkYWaiBepMf9xWBMVRvY;J)728z}GvZf|9LX$;Y zY5~QFb0Xy8sV2A6#sIaIStZe;`N~+F;$O#2IPI1koF_{ynP!-!OtGb+nZQuleOYT` zJS_!sA2Aw#h?Fn&9%dRW2mgkWUFz7t{|F7FA`Ig0yz}YbG%HF2@qpOkXr{88*`1L8 z=K}0&JdopnWF&Y_->&hcW~jNkn@eGAeGQXfp`Uo}Bl>aJb@$cYKRjL0aG-{lL`d(g9V|edt zC74hL{=QdL6(QUY`sL2`3|J6`InE+#Bx3z;TB*YIhj3F-9)|)C3IK#0d6N? z2)J+lk9;1P7pRpL3w&qf)Gi$ZR%`tfqpz+|K_V&@8>blG_l`MyCYE-+kgE(`8jyg8 z+J))s2?@;%_+;y#2OI!2XZn+Yq$muLBKFP^@4>+MH>R>e@Z*pdnlLql!WhpNW{d++ zKYa^cxOi3l8-$QwP{-L#G>1hLG68@2SNbdpL5B+A{DvyrrXAi7i{5a#HEI>Agugaw zv@r7C;?MlM@O&Rk&&3@=_qdBVFO2ph(Guj!+^{i~dg1{Dpc+zbrKzkrs8_$CUjf0W zFHbL{nEO6?z_-2?l&IFt17Vb>M<23WjSj0z4H(KTZ8&m3`)QX)g271ABNO&2+%=E5 zAlto|O~8vRLOW4Y==nke-U6s6!3)qV)kym(8NJ}rgBeV&(Wi}2>j7& zV-59i#6K~$6yqba1Fz2ofIoSm+eGs!fCeq@hYli>7b4*y1+vLxr2uNhUp`uMW<#?D zFuFcx&odQA)S+PZ%7CPL7(Z5ItpsC|&$&0@|B&9m9=PLogFR(|0{M8c_C5pksjn@} z=gG11$iZ<4C{BfT9rr7)kDNvU29;nQL7*3Hh4SG}ySq3bmTX={;0aI>AF+==!3mwP zSL4X{2P%|Rj7^iJh(aJ7G{H*9 ze)3`XUvMtfMiW4UXdpHDfm3!+bXzVeARy}`b6f~fiza_{^IB2;6>d3?auCo&kojpf zawdV?Ij0=<-UG#LogEwn5c8c;KlDF&`3r&BSX7@TW6R$^y1u3abQ~8TDs`MVIko4_ zHEtVp^;0f4>yH8upwZOzgH-s9fP*9MPzLd_nAPxK8mPgG0fqySa)VK442o^-2xpsW zIgblv^!ocVZw@H(rT>$bDh|aKhY93r$#dfp_~=6Y-;$8ER$yQe7S$V&oObzap&1EByKf~8nt~;U_db`*Wk6-- zw{_|W{OmXv?7D_iEW5nE4j7Nmy^XuA(UJEhLPb~%nN<#{X4xHiU|6SRD54scAaTE% z*1&-3?xFjXPag<9O_o%@BI&!-@FU<(-$G;D$j>L!Pp0KJpOi&x`dan;(eX9?HM%)l z-&a4sux+BM5=l1WX}l;mSDxq%wWP|xzLRD+T#z5%xR7Pewvo6BA2u`)kH@HxT(OTt z%`WQ?_(Sn?SJkz&L+Jcc_hS|K;jjGTZ6V-?B?*<>ouM}8QlJI>X?1aO@7aN=3g<_! zQSWK)WjJ!F(Dn>2$}|l3bc_L$)U{&B>F8_EwPoP2)Hf~62Z&4^kR2zps6+RYw9XZF zi(x~Fq6~O)-goa_p4$v1+gVsAxmJ#NTmfG(MQ)P_28;@U8fOV3$SLp{mH^O98Z;1* zDvx{j^AXT<c+hJjwkvt<^dV-lkO}u=$OhiU-(W^c~PBmN+#R8E0^-*ZBB z96%arpod5xD?-XxrQhuW$2OqC30b_Yb&M6mph03-ToeO@+}020RcJfvHAyE0V8av5 zHRzm1jbB=NhZI?P47Tnxfa^xe?#^1{6jXzn8(}7}VAl$t@4)*@0u#pfnX6Qk!$F{? zt%>7Sej4$Df?_E zqic)^*TDFn+u?fC@!j;gQd}h{ZaBA&B{tSJRyQvDK5z71o1Fe;>$9fkc_0qed_$h6 zzZ|dnoi1)AXS6+1n^9@)zoqbbgY+9NWYHS<`CsPcgJrJ|$R&YK{h;~GqLl4Hv@NRn z+#oZg0>d;Fby>9R#|f1oj^h9J5+2iL%JT~ZIU{7zUD97VS5D4W=Zp9Z1F?1!>MRGoMjuJM()3kCqm4@| z>VZ=^y>EICs1R|?2gns0p@We^X|Woz{-<==73@~P&N8pmtsnZE6io((nZHkee_*6j zcBM;nV_tah;*fUjt)GUq&3Dzi!BSY;_?^)ri@H4PNrxxyXRhi!oeK-ZN1j5*C1772 z{KoJi03A&~*5{D4{l2(VzM}pHP2GTQ$4eE8U8KuhNo#!iS5B>w-yCOiGo(@2$rnO} zU(b}og1@sKKqJS=xf*mP4^>Q$+BJIu#UmLsq(bIo1u_kMWM;(Nyv=$4)t!+|rJbIe z-3mQ*Jtr2V<^y&&t-o^o*!Gn+Qx>!*tM2^oqq9+pgDADSVx!uP)!2971?ekn6|Pf5 z%d4lH~g z4{eX9MQjByI&$G|o}k*p1gqlCpsJvl>FCC%)OC$WJ^cxk`lLUqbnvGLNw~dkaG@z<`v#Qlm;6qIF%t<&RsY9oBp$gBG@VTOJ7153g=@ z%}tI-dn+wS<1LhFH7*J#4kTzNsCKk?94Y3RX2_O8avW^QK)eVBV`|uJAV}jm?oC*JaL+U}I?ISU=paMn z3G&@h--W>wdP1?IHvDuSZlMc8Tt{GEn=Py&>ek;YVS>enNeyHtzYmny(xo?bxC8$s z3GkFur_ayBo?1BN!O>M$A!6$qNk}T&_nY?j$fjRgLx{SelXre*MPt%0#|?F|ck*q< zKol=aU+XXc$D~xwJ74b#PgU}r_EE+Y2X!a(olDoXSWB667#L37#jQ76YZ@;PTq$Y4 zGjJ&ax#mA#jD62>Qzei>@$tRZn+|{x_NBYRq^+eM_vL-D0RU0qcl$*bRY)#OF9kVv=lu3;wg`F_Gv}Ku8141<&O_oieIrq+ZVKy@x zIcpN+7sX}pCixjU|?37(~?DL>@G?|$k08%GFo9Fh{=d8{S--!->b&|@N z!)9{Kb0aWtmp;wSDrMKYD8FL<&dbHFZdbdz!o4*wvCO58Lw4Sw!B^^*q5XW$yY-3n zQQMm3hpdgou059I7W*3HEI1xheA^z`?sECHJ@4!=yn68+$6BRjnQxU6;+PzL^7T|p z^EfC645{zc^?{mYV$elcVFUWK?6&24-~UzD5aK5UWOdyzQhy zZPOa0&Rzlqt;NSQDy7?}nNclt);ud>BW8cC^tgj#|5N?r-{th zt5^2ecQPEAn3Z|Le{sYiKdH_%IhiH(M>n4b`Z*{cl;o-9RCmV*I!@P1fYkMx{X;RY zh~oB$)&!c;o5O50l3Sf?xBBa36?)AkBS|2tv+Y<1aP^xd?|yRn$s?}EBZz-^Zi^j_ zTZ{R7P#o)aNx7~UK$lNN){W(9^mOF(8%?3~zegyZy_YZ6w1_rBe!h=l-h3P50;*n$ zuGtWTs7Y+Z-y@JvDADWAu};|gz_{;_P@vBISuVj?Q$B1~99i@^uLnLfRVvq&f4rp> zU%$Ycy8q=EB-t8e+meo&ndb#JgcyUzZoWGH97k+Vq|=qws6mVZXTGcI6L`|;hqTW? zi|y)WIQ6vm^`)SAXNpp%8!saE@x_b0$SJzqpWWoqW}!1;AtSWmuvYseV!LkBH{h{J z?CzSRnI)7u(om6L6Slk3PRi4W=5!^bTYWTn{vnXYToUP^J%PI`hHi0)G1ExSjBy3}2*cnB0 zRfif?0AwfzZ!1=T(yt4e)^u4CpjR%y_vjp^&sPW|;uuH=f~O<TuMwL(^LJ&;#M|yI^`;06nC|<8ss0+ z=Awrz_KVT%--k+>55#e}gzW^fW$bFHos5(F=yBvdR=(x;K6-f@uaerxf*FMomK-ty zTapTonCY=Mo{`Z-5IBhFE$4U^SvC1}eSWFvVCJTP)%Y{Ph>Dj2xdv^+%#gFQr~}ohd>{*4-`w zKOY~6Tkl=H0Ov3i6NH>U?l0l5A&=w1E9p|E?;gNq21@}Ix(~x%f0g+rcf@D!ZIq!H z1$m7GfIfO(N=S^0g*nRvTYYuSKNo*@-Brgh9iwKgw#m{t5{eoUF9E2ZR{D`Bs_%7d zKD2s&By~+fi&r%IcvJ|(IGe_ARx29c+mmz|4q}nCLyFasNbMG@>3IPcBqBhxI?hT_ zfA6Z~L3{b%pJhaC5I_)e>`0IT^{N#99yK4{1S@hRhTQ?&!YkIRWQxBDaOFWR>g!4K z4w2EXPrTDMeFjyAD|k|$JGjJGwBINtYy0vAUZRjyrB2nqDq%*UmWw$+LQJ}#TucNn zYeNd0J&^eVGG*i;sF$=o&5&H%!tQRCwU;|$HTN|dqtaOd(H%@~8#wk*oMilZore8^ zZ%9^z$gJhkYgCKpHiC=2l!&geTqyA(11ahzfSX)TCLzI|1RIRbU*3~9-24jVZvC)< zSYcO-X6x5JYbZN};Uo9$@x)m8VQS(O(meIWxh!@N9QBzhIu1lLos(^PK3Y;}fP$ay zUe{B$B)L=+#6Qk)@Y1Q*-{J~7vOe#je9boLm?m7fP$moZO`1${j3$X_yg)7{eMyaPe-!i9iOa3yX z?F>F9C=@At3xU8fwiEO90w4Bk44M3+{wR6_6)48(9x?l!V}?go;|+R`U(8vxP5o+U zU%huzsMfY2m^&-X(sh(Uzt$N5M0L*0~jYJEHeDe-MA+yGm4v3qThll!w=CDRc% zUfEeYOc6Q&<0A9&)zFQW0T)Z4yWJHdJh7cHLHn{KnE-?!U1Ct!Z`AfWqK!<2i3@y_ zy9~rqtuMc_%-$2LRO?hpwVCg^pL;T!Q19Z&j3T63ogW{wm>O%keF$PKCO#0iJ}br` z{tjVXv4_vM+n`NC^uZQ;I)YZDx%6%!Lsa5n>Ugp$xyvQM8|k~Qsx{VZy+2LD6h}Cd zb^HgU9w#njFvl?yd${byt^TV(+?Xo=bWTcig#n<0RF=-e@Nnzy4g;rirfXF9Vm})sr(c^H|z!VEIZ5J4PUek;Ef$n$=a|wjb4HHGS ztV>-W_j?`b)u36Xal97NFBjT}}X{Ka7(1-_A50U_%-=J~INPr!AE%WFk__cwE1#6C*e;%)O}7M9eky#!q4a}% zG&7#)A$AF9>YWTPFsP(&JqxA;4Dx3Dq=85YM&&88n;Q^$1mD%aO2$XBVPe8e$8XY{ z_wyHtdx;Lv5KezRt%M@l4YfQ&-K@RA;cOnD7?F1Dq>~x80YL@(TmeJor~+)hzP}M< zo{JYyka0mJhuScKCqWG28n~eaYtKCzthbMkT?!}7i3#{ zqHgye{Jn8rEV_}SR`Yw*nI0hi>OU|skixQ!9*Oq5eedm9YYNfrsO;QA%XfGg@9xI=6gpY~y2DWcP!qd|6wJ{fiEFGe)`{Aw&9moT{9 zmf82;o;8yWfNj#mw1>p=Se91hKn(dhGs9aIk?xc4&BBQP!REA<{0KH@(r94LWd(Nx z+*Jt=t5Jj|BNBr_?tqFNo#{NqV5TOYNmc}HpvS>(FL^8G7Wn$BfD)!TzJ`fH=Oc^jW+I}u(of(rFIRyP-&_D} zbB|9@pIs7!V}=Fj&-%Ce99$&k4Hd`S8PRh*{_7JAbu_(#dZlzd9ad*#kWttta#H3nW%(}yY&>K`ha zs1SUy?X)coR9tz`c%5Dv?%2T%8Nw$%&WC5mG{7xrSHP}@Y8D{*yyj!%HHmeHw(hKr zRNm9o4@`NU3)?k80Of4Tm(a9Q1`Hr@=s*JU-W2#aq>F1|3qr8x6}i*#-yx-?z>BxB zC1Cy_gD4qrBs%#&*iz)Z*=**zZD&Q(~f%3afj7gY%(foIUQVwa=OF{5(ri5MN5uu zzldr7nHLNgf7rN+3Q-B+*D)7CYl?E35}{uLA+3%eRW}1^?VOw~2}0KPoqODa$Zb%ufU-e|dQPbvl7j!gTa)p*ElK$at@UYovpc=k7p zQ(yeSu+pMWm=yxSYo-4;aCnaIUDP?A6)h%NC(i;q>{~b)PaeW}N2*~hE81fC18{zT z`%kqC#Fq;vfwSK;*wH)Zas_CDa0H@(nlm5HS@zjx7T^{fy1niKb|%kj)o>4Z$g5a{ z)olEmfpiUUUP9y-S-MfRpHEqU6T__k&HzVHgmxYW-tY086KG{3 zDPa$Mvet0Dm-GMOij&Q%9*_wlvKGt*n%-(>f~9G;$nOX;3Wme}wFrNNfBn9ocDfWF ziO1O4BJ^Skm_jGkQ;tfX2^@0ib-Srd{slDOCi+z5eEQVb9q@lO0`OuPPN75jQ<2t7 zdn-z=mg$JyLf0jpzV#!)R04U@{Om8XCgU?qyw5=LN65iyPifYNZD`v5ttMmG{Nlvx zIm-MA;C=S|u=M()hGQlV0%}ky%$UuJjO6uLHdF(9Jq(9?;08jqDdnARw1|#`ald4! zZmuI>DE8dOa5N&5Y{(nH5KijO-&+|ynXjpWyaW_bPJ(Hc}^w7jSy7_f*1+inGvBe@dh|C{Q4KQKhY*5TLOM$13x~@IA~SALKQj zsylfc-85?viHZ*5JZ`)wrU!;Q54!+czp0e4+8IeivZ2tO+|=umfGGO6&AkeO9oIGt zrEGYNjD1{ABNrt-&lCK6d#${F1r%j`ux`PbLUC$THQJ%Xat0A<@*bOTOg=2(*|>&V52_r;3Dg`d`q4*ls$AwOpI!K5_XY;Opl?}J6Qv)6PJE@|h! zKuwGz|G3(nWLe`e=0)KCrz<5)K5x_UrV6u(~?fDy4&# z^9l@_FO3-kc()9>>dGizxP`t>YSvIF_O?VFd9VE#o?Xem?%2BMQQMDU;N1b{2o(3O(boO_FQckc*yvFMxv_TP)SN9YYp z`AA9sfiHV5&=sh(n@hP3eDhQSzg`D6CwBP>FzoQnK`jbE65 z27;$lw8Ga)sz)51b5vVnC+TCx6f$`>Lr6d~p}j2(P39$F3C#2;-)4r7hKV35Q>`Os z<0l?NS1T508h8GP|JsCX60!dP^U3^MxF^7$y8^=QTtsX?yjT1`f#P2;VC0Ea8{GI{ z3q@I++eMpK<~S^l+;8kKwRwY|H!zhJu=(~MoZce*I!^X0&Zy+F`Zcb6RQt$bk58~#_;50yFa(Jhs2?S~%m$j$L#n?nN&o>GZ-^Lx}Rwg$IZ zw7k4fbTJp~5H2-qx#)(FvD7{r*45n3{Gx^nWg}-;Or}~#GDb^q?wqJ^cdEwYaF{rD zu?sJsn1M2bXwFi9rS4Ck1zi9f`uzKSd`L z3k!ao2eP~rbog$bXI`*j)YWW2V~`8606~t$>Yq@wRbvVYW>{nnXm(7Og&PpNz)dLY zuA`3c?XHvk12N$sq!Wxaw zCvO5fQsLUK0<^{{IK-(L8l18skH5SKOt*3o@W6ZrpZ@d*wG+AZH^k{`;QEQ5-Fv$* zY;PA9IP|1XfmY?!v%Z1%A`rVpxG+eEbP7gfbIMS7(!IOC%5&1`L6b_fj{3|h`jPen z$SOX?=-jDRR-wD(z>MM9RYJ+x9&AonMLIpatzrVh--g_EI->>kZkCKgi24m2hJtx<YE5rE6(|^y~Q-Fa0@cjjPIvw76dV6k>`= z!#Spz$<-epUe|#Y9Wd|}bD_rjZ$6q+92<3)1oqv!jsDv@Fm|I`a6zm1L2VrJN$DSb zn3Nhzxs8*`(}wM!w*GZ9$Oh!Z;h)`AX4Wk6wEc<~S^PlotP45l_2(eT@>S?YC4q7B z>V+i?R`+dUUVw!7Ph89wmC%qSG=2qU(+TF%(;zx|5v9psxtHXmK^0NCCK)mb{1J*Z z{^#~8ghM#lS8QEYpuHKqUOT7q@weN_#HMXzy@yKp*GZOWO+9dE#CA7DocGSky+Kfy zlzovnV^QrsiE1!*Vbt}u(*=QMVu340=< zcrcG#o`W2<+1@~poTbi)TBh90hI(`R&s8vpQKmy~*-1c`!)~fPzpX;R;$*Ker@ZB# z1Fi{s@cD?MyE@-sUHk>C2N$j6UR71$ihs;&eurW{$)#d2w~CH>e2|Z(gy}U6Pc6^C z4Ne3T*xUjepX=*a#rg28*xy73m;))P8~NjCPsEAI-GFU_+YT80r}C(v~57EeU`=uc+CU4^Ty9SUgV zBUG~m_0!vns{VE@aei~U^i0O!`lH$mm>2smScp4N`F7S z$2x$GTy|b(OUK|uFTa6MP6t`$`<9lwOfF~0Sof_v248@w*D*@F4mFOD<0I=L^+|Sy z?B75DL2vYFdGvg#%;=pptD7)=7o^sh+Riy0gpWLqVV*-Koi6kxKKbxQp}rP%FRP6& z%zQY020@?++!#^1Z>Wp{Itojk!6pnXc0f6{Bb^w{7WX2z^}$$Mls0H<+8?r`nb? zJ(G)Hjw<6zX2F-1K1x#!Z^;WWrq3UIku-2nL#;-l+^{|N|Y$MBfFn7 zgsGDT-@|O`%l%gd2XKz_w~uhDLQ}8b^gMFuOz4AA@Q&yNLacMT&&=UXS;!g{Blf_5en8rHrA}nFKcFw{JO1IBbUCg@`>UV83}lS zQENIEiez`$$TN66SrZ|?Ut4fzO+`@61G*p>MRPIdV$aieEM%8*TlvaiEiY9eeIq(%hDYTE&9hR1Z^s(h#iE|$3MVP+|~Z?Szlgw~Iq-cvDEGk@KVo|>lcV}x_X}6?cI)T&cqtVtw(qGCV zpHjp1#XUVajYK>J8Pn}{k4R6&THxc~ROM-w7!A>OdmO)h{*e z=S|Y!@oo`QE5;pn89H$j$pM=s@hed_{3z((R^C_z=KPTQAw_z0-rt6kDdyFA;zk`h z7OMZP6p}r#b$eGD&7S{X5(72aROvghGOmL5A8kqZSbq48*$X@xQF!_ylz1A865dgg z>U#Ld(|=7;0vpD^B|6czHmDId?M0?m9u?^*s;GOF6#;D z5e2W`&M4dsQo!!6cH*sPag#9j$AhhMeF|QAlNq2ZCaD=*i2Mzlr`PwVAwrLmor~LV zc0d`oIxtQQ&CXZ)8YwVa;Z>wss_NnP63Np&L@(A2+>h#YBJuBMB$5}mJh9-Mto zH+4R8fAY}}Y*gXF-mzIj&w-nr(xbSkgY`2OFa9ft-2t-cWn}vFf}-QAe~rjPiY_z{ zM7kbrV$IBwihMeb_#SHK&#c3tI*{nn1gKgdAzwf1&xWyw3>JRc2@Iko4wNeZr`)Be&u5Y(kscOTh89%_x^+}HbUu)nYBx^a*Ah4Di;dc zOlO=;Q=8mff^7_iP1r6@R*oq7nf>vT+c3{78oqWvx% zP}7D9O4$*+2it%Iyi$=Yl$s>A)jB(lzU$ndlz4C$nJWNSMERu@RZtcIHX~2z zfom}!Vz9V;j=>sBus(c3Yy8warIUdw_T6J3_@$KCzA>lRf~!AmsIJXk&@auF)fm}N znXuHok6VRHo1L;GVIrqik}Ndm3>AIQ0`c1;cR*%>ayd%VlgqP${kxz_DrlbeJEt3= zuzy++*s5L7T->Xwqn&!ZTRq6w?2dI3(&)XjDCHrzCNv%)5l~KkWf9U()W3q}zAh&dxoS zPuc2t&*iN*PojgpLBOuTUUT1wD|u6y)}S(W_S$2Gv`as!3cc}+yg&qkzNfW@qnK9N5{aLHy*_>x~&V$mf2QTs$hld`Y$Mm=OE=lFFzr)yn;sEG_g~xz~z;J#{Mk(ye~AoMT|6#evg_Wds6=$XR`muuc{JnmEZ~?JC^o( z^)Pj__UxAZh*Tl?;N$@rzZ)a-;6q$Y{6)Q+zwUVk@l*FaLfEx8>sP*Z{T8jxJT$2- z^Dh~;8&wfHr2gVHJDSyXY350@Wy#7|XN$)LIw)_Y`_7j{ABbw^BioM*9)~t+VN*_^ zO{pRfNMKD}1@JmGg4bVm?q^v=rhAmb(;C*lhLmf5-O~>-*F>6NO+T2nF7h=`A7u1_ z8eQiWOT1UR_k&+xSzUQ(3)PtWZ<)SL@Q-&&$K-WT=#D5C$2R$Lx_Zo7nhrOQ?qBet z)W!du?XlyMOjif1biN^jk!8s@=PB(KTrQDyxT6Ai|C9W0OI2!NKbZe^^hf#IB=0Lh z%H&$^XtIP&(U+Os5uwu#4!#IQtZfe*y4Mg)(~x3AOcgoJ717&$e0U9=Y(b&J>%qS_ z&hX{@JJ|_ed*tqt8t8UERZ%S1jAjpX+*6+7mRS#@NZf$A^5E-|^|U$uTI=b4_fVoK z13SP9+&xT)oX+MCI|{7)`_;L755XP>}%RUK@rjx7J!wST)V{#N0xY9@>(!zmag z+d2aG##y>pKjHDTkBF-`1w+&*A*UIWXL#bV)6N6OYw10NVf!BWymM)QoiRmm6bPXn z#5MwV)(BUfF-NEEb}3+^=B=C1`dy*A{otp_umKk_yo@^T4&99V19b08F{@0XuwF&W z$$e^UA`^UF8+FR>rOpz)0iubwXwGOQ4{m;LNz=P1^4$y{B%(prGiFc<1m3}P-qM5o zWTo5|+UzM`*Y@hO%1l z+0JXXx$NFEXuszg<*LvC4t^5doBk%M_m^nPaqv62pKq|U9l-nrc*sxT%ghJ?1nhd*%)4uOYS0cPA zZ!$*S>7BfEpinM(?D*Q{m%*loPqoho6(q%6QPc0dl%ySYIEm?ZgS{B_M7Ij&&yd%c zess(Rp;=7L7$&LNuTH4@1#3p10$=BQzc;GC_Hq(7Rj_{ki=Pd`i2C?(vna<(Akmzs z$`C6(Pm;TjVZ0BV7+eh8u*CQpOC2l_FE4o`ljxN_ACWWF+7T-A@Cc?=ZdGDH3YrEh z53>Cd^dI7sJp){0}BVM+y`N=+r*%D&6khcqGkR;z(T#?<0}!x!3Vi_JkL zz#nHD^(~4?$Kg8qmfVC@)AvNlP+bMa>eT*EeCSh%(P*0L8ZZS>2tikoS$tA$U(OOj zE`>AmCyFP2_svAI9kKs)Y~dl;DGh~W=7f61S=0f7DJ>8~aclIo7~siQ$G-bLe1+$U z{*7usg%f5@qHQQ5x5tpc!hz|zXX#0f%7|n5A;Qk3*#){O`j}4}^Hw-f-QD6$wZmOf z)Txh-M;L{dC6L(TF@?{9Jz3QhT234}Gvi4SmJnXo+80|r45W$K9{jw>=+&}<=~j^v*-S{)ZyiNzuAoT15$Dnn_Yx^#k`>$xFH*?`113~e#^v+#2@)k{4jHqTT^-Q ztcqZ9xC=b%)O;kHaF&~x;1vFFz@7*!yszk}PvY)uQ8dvt?;mTr)l|WJ`f27qEpEd> zDLP6sTwvhz!Ry4fF?|J_h<)Di=9FL4$=S3xJas7d)3Aw^M+FG|Nw|;AQToH1G%Mw% zZl-tq{bZG;ud!tQ7HZ8qwshut?--gQsOAlq3hgU=hB!6X(XU}-9Ezsi3GSgI>@=C8 zYFT8WS4(-sA87Skh&q@K5D1aQnvvHs3lDvvT^NCwDr^%t>!v23xNM2}BFZ{XRXQY6 zyjS~6Vd8AAygN?wj&R<)Eo-<<>mygph(T*@(MPY;Y%+}>s?y>IiAf<8)TJTbJN%k^ z+H#ECp8`|AOFV7-hpD|p&3`NdiNEkd9ZmYAXeT{~WDDb1-6wV#ebsiZ&Qwz`_4h4` z=(bCi>+U+P#{({ZU~WA<__3Got7#`)hw-Xd?hVCR^=cmHX}Zu$i;~Y~-{6A`XcOFN zm1=DYqrHxdSjKMRme`i{ra~j1wYMQ|(p+;~nNc=j{JU(wWEj0X%u>Pq*epb#i+5h_{c3=NB~lMFJ1} z6nSNF{Q_g~z|+%bs`1yZ|9Egmd|sEbFW-{gYvLGi)T;Lem`-@OZeEf_r-Q!+3 zpVy?8QYaX?h8X2KysZ4*^Ow1^C|W;SX_Qtznc=#O6U=QMpp|L&$}`{3)J_}e)lMfl zsKsRGOdQ4cUK<1^j8<7~1q-G`J+NA|g%HF}@%v<_a$Br`Qb~x;0Zrp}BL|~I9h|Uh zk6c3a{sr~QcDD0=2g&1c)E1B6CaDxahn~(JxM@DRDe`&*8SsqBAmpz}oTor#)FolI z+hOG1_krq!lcd{=l%_^yG+U>HurgVq$2}^tMblLV1d_7o23iRpFZ+(N^^#}vRxuE3 zY$;M=zq7ZDMH9d5!^uEjEf(PWGnt+z`{4Z4=)-h_XP zc>hL=<(gq|h4M>*)vR_2^26V8UQ?AZbh2zM3cJg!v2?%cAH5uj3%tP4mv3eEM+4^^ z87MG5G3zCr8(k>sShtTwh(7;PHe($z$K&0$(~S^`#poM+eZwCPe@U;Ac^96sy0aev z`wNthvveuJaHDqh zd3^R6s}Q#RhM*J+%kSQ2Mn=+UqUT3Tx^smd{=q$x7u8zSnHXf8vT+XL&yG#PMGq0i!#*g|{nf{k$=iqf7Fg$&=bBR2(FAu zyezw*dv|Ev_i_~HblZ4E6km)}-n>ucrICO0qJP_?dkv}yw%Kpjajl9yx~^=Hh0f(K31IL*~pE?6k7h_LyQjt%M?1cBZofmEoSU)y*W^` zm^sbk@-gQwi2-wp4~_htU&=Bj8aQiw<&pO!JT(xo&o2H=BQEqzr#ET%=@E~)e((G7 zuf3Cx-`|O)pnEo~!)#F15iBIr%g03!!%0#ZvedtuYyU75JxgP_zM=B$N3ewSA$dE$ zvsj5$D&TKc)=HeiS$rU=Sn0M47n1P_@%;zE!N}Li=`*oKnRFOJbP0+XcTJDpSv#fb#zX_MAbRC^ zGQ3GGOLzD7)V#*fz^G@5 zMRhm?#%_O_{i5xiYX!#5-E7<{CC8%zqv`@+}E+-&5wKNFa=>UuI_9c1+Vo^P!);=e&F_Z!lL9O~Qna zB+890=rku0Si&b~)jyDbmBh4SWuflDbOkJVuZS?F%F|4_yH-DlR@ES4PR1Y3q?D^{am-CN{ z*P{u0ohDs3jDw~yYou30=G)K{THiCC=s9~$mA0P^x~!?!!gB6eO3(7lz%GGQmPT=b z|3LJDouAExR<=<2=ZoLQy(f*AVyFng^q>Tky3`!1KXw{FmC!9x&gbj>=r@O5r}BTd zS&x{;z(F)}yW zHfiLbkiKFwXA)LBSA8BoTi2sMke%WnLH0F$+u$LS=pj;%o7fudYn57< z=fV8I)Bo!7((DNC{*qLVG%AW0EJS-{%&B69Ut=2Xs&maV{hKWX&JxnYl1pdGN;8{y zT>;8sJ$%u0^eX|_@Q;sC3pA85{`~kQ`4Fqo+?lr=(|M(*#I@^;UNgfT3l%?Yu~Af% zg46+cx2uV?=h^WRrbf6ROT+`3>~89CAEIFnU_s$s$zc3|`Q+5U`Mpypzm@jD$~@=fci!nf=w zt{zFne*BdIhs7thO_(g|%QR1-&U#7BPbX_%k@&``cDzCtI?!?X?o$HMKq@n$Sfio| zy?*I^t<4WBLa6Dj#@dWTs_y1L^BGd^3pX1j`oGIrNBR=Z@dbA@!(8t38Qtu$I+_wm z_fS`fp@lc6N>oDv^NR2TcJfuG;Pfo0#`h>WAY0afJUK8)<8b$OlfNL>EcJ zRx#&z3Wn!?LkGMU{63!&OFo}4j{o%}V>6L|MWpnZYV&1rTb>5}Dk;}RMd5nZlLzanm8 z-GbKncMKv%TlYMk4ePB$hi4a+6??9=6>n=Ry`{nH(rf!HghvT7-L6mPKRMpGee8g< zg_rg_0o^$xQbR}jcZ_5C?27`bs+_tQ1;UV653F@8E!Mz2t|C99h^#Fo#&m8_0d%ef73IDb0f~yjZ+OyU&NO3*$P&oRLf+ zmQK}&NjoPZbi&wE7;9iHY_(-s;GQHdMI~avkdpaZ-JV|kMe9DLS?lJv1ED2{I!arW zf&uC9{!_M_^(6h2X=SF`GzvNfR`c;^`TL}E z)fw)pO!E@HF|R4uR(;Z+fDm}g?%tQ{B%YNrvkK{}!3q-wLNT%#kBi7+s-pLby`#5e zTf#B6Z1rfYbEmSvp0bqsgznjruyq>U2gud?<~bA7nQES|lFVXzTKi%Z)Jw<^YP zb7(4zFja@uP`z!=O<)^G-_?_6+Qt8BQ^QKiuU_o5y=o%P@Ku88nM&KnEois*!kC#O zGPvPftr0{h4P1RPR!0_Y!oL1n_g4Ju?-^?H$sh7;^vloB1|I)QBD~!C_rJ7EmXB z>f9Kl;%gMX?fqk@M=vfD^U(6G2t&Tnu{z;>{_uW*tz)f`Y_y!+s5^;FZ-YFk>+`wD z?j4dm=^v%EX{XuUOBC6XK3J{0^@S~LKG^4*pVTP6>^wVkh`cV-;sk{Q+ZA&>!ulm` zeHfUaOZ~G*X!?@{U8GX!wY3NKg`C)ULeEzrlf0P+;uY4vh*a;`m|XigE7;F6v}bAy zLoKIXRMl(BVn})oxl~gntbN@?sciZ>Hqb@v)m(OB@wS0df>+%xdv^Q7ZiA z{dZ`Xy4Iu_)=iDo?7Dr%bA^B0@($SEk?1^Y+Ou(2jN|-m2JDP46G>r#R(1b&%obP3 zLXc)BRb5j;PrdS0N;ZZRCK1(l0TDiIn>xuUSIq7V?a;XAuQZMTX(Tm`A-oEd9<~?_PN#U#6 zIeQ?cKRoNVEdFf1G0pIB&1j+H!}}~7{>DZNt_Pf{>!tAqPA6-H++W_I62`oXRLQ{l z%DuaFw~SZ&M5A^pVK{ZIl-2c10`k*R zmPYEjzQ!qPXeLJ0`Y`Vf&zpT+VJ?o$Set99uJT;W85jwvmM;1bJ-%615|K+r^;@80 z)|ikvgkwEd7DxFY2erOPTH)ueRzs^|?lhN0BSj5d5(x6uJ4*tkwtwws24wHDMqRs! z(!{dlnWVhLsYWWy98y~;eF{ALS9hZ-nuljl1uQc-vUXI&YRQQ8QEZ+(LEB-VnQsYW?f$wmvy8`&2tQbi?U7`MkgN zFCV;KsK@O;l6s$i?N2$o&IRi^B!3&iyHzKzPG0=w^Xtn!`c>=?Q(e31_Zv~GjjK9j z+NOD@uJO$*@z}B{@d$6*YTm!0N73RK`fnJ-qW{jQbt>&A_g+^M~Ig5nwd`|WJ1%_1)#z42LhK6~5Jy9-W zNv?}0$**EroVq|#>l#2~*3Q*k7CM4bX7E1}o6w=YV0}M6$&qINc1FWFC4!Vvgq{5- zg$CD4zgE0atHacz$TjirQRlKuo{uLoveI!_Hc|vKhiPb<)YD}u6@OXyq+O#H3NAc$iePE;!lwJ2B zTpFVosk~}Y=hd#%_1zb@SgO*aVqMf#7pPA){$8H7Esylnsy|jJeBbbjC$-&#o=HZ6 zx4K7lMlUa`yyQ6P;IDDiwN}qp)G+ZXEHB&jJa!e%?R=$H;v+6#w-RH5#40`esdw6&!b4-{ zM8AC9PUtYa|Ger4gWkiS+FQ$P-+237(R&%xCERaQdp7LUns|*A9k=H;`%KBy6z`W) ztco@^RXbj5CA-GdpJmK^<5eMhUz6#OHCI}P#7|pa?*eYE8LdB<<_#Yu9~>U{J$T~j z0CAoZu+nPsJsDg{^PF+Jy>DrxJVMkKB|6p1f1MKjAQ9nwepuN~c}{+Q#e*t}_OFFAa{!Lxk}m6`e4r*kK) zN;)W~l|)3_L>u$Hcq3Zpk+;rg{mVDkVmSazXK|;~gtl_~TUgx$<1EJ`){}k3_JtI4 zC$yh3EM1$Xx3iq5=p+9t{(J7q#}FfW!$XfbWTpwrfQEOB?VfOKk;^x>NV(|?DvjD3 z3iFS&&U@6QTG4+}c$^uiR?*4TEi|KeC!B>lyvOw1*Hdpg`hBCGhf)Y)u6?kM=n<3> z6CgJ`<4x3CMLxS)`tWL@NjH_&m4*$q(R*zlG*W$}>(qN%B^?aeXv0WP-%uJyiJ1XT zwY?$tj)l}pdnNrBljqY{xx~j?XEAhU3r=zCiLM!qv*~fOQ?*e6G#n;78Bc;d>t8PN z#M|HqFr%Ej2E}IyxGA1b(X5u5HMO=eY_Y^A^avS?p!%68jI`IN(Md9MW)UYaw(i~H z+WgoXCfO~O{nB-p6yEwq<8|A$#)$ff^TJBU*CL`9zhy}%ho)WQN?u^@&xF>+^vhzKJ zS=8F#8v=HYgk6Q& zHdpAxaDyUw9!}zCMALazbW!wzRIS0)r(Wmw=bPpk61fPuq8tB^33wT9rtBo#W6jqw z{?Qo2HRngY-qB7Do^vFtPD71)eo-8PuHnToOy};e_Z3O6UcpLspNQqJsJ>Y9pe{Fs zs&v=j0mz`gHMBWTN;g(6@iTCae)RaVCST07ALbEZXWJ4LCw_5v*28^JOWT${45Pt2 zi|$ayrpzV_5bE$%vV~5dYgyY(V6=#qXvgSlm(9Z(_gS+H9byC43i5uRpZ*A9EMGBD z@9)ozy&2irk%eqotlbIn(1Zm)l2>bu@+8bA6?jhGR~LV5u+P5g4UGJw_PPN*3WE%F z1Fs)gtkc!uLvQ#P7V7>>tvazSftX6?KxKs))W(-|)4B z^qgO+5H51fWnM8SXDRUn%SO;IS>utodo)KhY=`vzLhSaCX<@I!WLF!N?a5A3PttZ( z`r`WrVy8{6?kAi?SF{9VlKK++SExxl>1}%xN&41H+_7(ICCA<5x>$e7xpJJ+DDQW` zM`H`alLyfs`)DYw!*AX>2hQfLCm)0Owm&tq*t~j@d}^(F`7hovq-r#xbL2*7O*g5pCuU@ocq_OS(S3DM3)Ht*7-BfN z_V)tSUu;aynYCuHlVj&86%Kv>sP5VHwAY|d@P^F&RcAP*!T-+XC+}#w-?^tM`+9Z4 zvm8UatP!iQIclDfec(!LdUchNA;HX;&v!&LI93he5cDZ?efjivT>fLO=9}AJ=e-*H zd$up9LE+`8RFh!KBwb4Uw$t|&x>9d4H0s*b@O~QdC-edKsH5?nGc@?bP!jgEFp6Ip zcH*RzH>?d+G~LHDR(=Sv|HRQ)*0L=9#VwZ(uEj^<+r6VvXLyx=NE>?yG-@|L_Fxfw zRQ#*(xr0HXv@izc%5@>*jvS#9u|LI5u73(CD&hp+VX>{)n*N^>3Zdx=3jubss(+|U zZQ%spi29ve-f^lM@Y#^!VnPGMu|=VRtNcT0mfZ5pg!r$yWQOed?sMXriu$-7; zt!mP9kwv*hHYro37eHqR)LVHN#a!|3dpI=Kx~3P+)S8}0m zu|$$;IY+FbOHFt9Z{k`a&Iz`qyr=W3e7y#v*5%qNzXHgK9mb{O4i`3*9r{=!b{J85 zH))J_5{^;y%WLombhP3=jzjFfh0bGCu=NX85nt1t@q2G0YW70GJja9Nm{h+M*|Ci> zThF^4=d|~XZ=p;_jp;rC7w_M7e za;DP6TwkyL&{a!1C-zkv_wnwz)&=$eWr$1l)inKboguL_Z#%uxzWa+$Yo>)Wp$x;3 zK*SHo{ zy#Y(&>1zRkg};$e+@E%|`qoCF`YFfX5_qFX-{Nso~l$5#T^e&xZ% zlxl)sd{wLyX6>F745XY)rd1`@LO*U*o=@{mFUb{tQy}+$og#>fnuVyGQY8qYnOpcT$9rIELRBNAv?6FtBgxh_+#hXKaUbMGc`eh@6 z;+WqN>DB6au6L<9dbjQRuxPT#1yLyv^53T~7M$7bay8%bi;hg@Y0X*k-`KnU6Vt3V zeBEiaOSg-9=a7cWTDlX>WNY_NN0VSZ*DqNF)A(|LPVmgqNA3uM6LCDmGo|_&#Seu9 zdq@Up@X+!iyQ3;+C)18nxo1Sowc5yX@yU&=UtqSy;n_P|(e{I@7(Rl&-GnpOV#$cV zV^RZMZWyo>uokc2@bsx(nqTpcD4(B7)cI}~#TZq<-He4(+W3I3{h#}}q}IlVt2+-W zLQd>|iNCad@YCeD!c^UfYq`$m8v$F3i<|^cS|uKYTHCKNtdr56)N3uqz9$?i2;{p* zxGAlv&o%F7Qgo&54e>hJdKQtx$2h}VYNZ!d2LAfn#J8ZO;)iq_S?_*}^~Ns*eJ!*1 zb*RaBAKZA6sd1*i&yG@uc+0K0JcSTP-e{Gocwmurt8V4?s#Pah0<*8ltW`9inFd@Xg>k zdDLD`Em{0qg|4>@(+d=?4(!WVpEnzN?<9R0jU>XhicVh6Uap>f{sq81!hfkuX2|Nh z9<65X^wgJug5r-~c15RtiEZTHL%T5XW`puZQ7g?_3#{GQ78CTeqKu5|z$Gb)7G`ZR zn$CN~L~@$r_*+`8?O!sO#0{@mFOyZs*0PA`Es!T?MKcmb9H$QenLVb=uH@pAhaL`` zA41t_^HpWY8D7M{w^XDB@y9BJo6hxRQNLG;=GR4_9=tS0zC~y z>{wQ0+N0?54`Z}=5oaS<4rjat^PSa?s&k@q&x(1XS3Ezv3ggT%Qm z?>I5OC#EqScSA`S88{4b)hAOr^;)H{ywi767D{DKUV3V`%6!mOoF8S!*r>(z=!w;A zETCUq5>8a?(|P zO~?V2vm5?q(8T2{lg644LyHs39GN!DuV>#>GW*+E8oOBqo&>#8W^Su_Z1RF;gu5)^ zKC}M(#rwZ37DM+0&#}_xH9U4X^?ieSOakygyL+ugF$>J9QsnKte=b3td;#}Km~&_U zZ>Lneru;mag0yM34qw|?v@AMPa@1;#!iI~MNU>jQo_l&`Dp7QyAcE@o;at7&!3mND zyVjV$5dm)=P<~!qS;5--!z{I8_E-A48RoYEal2uape5Y?L*daTM1vpuaxB3dM~SDH zl}J2QK6{9>7EY={aS@xzC)CW+$7(Z4>y$D*epd0OlLn=swcWB?6dDug9ic$hzzQ1Y zu+Fp|alumDXa&ynW(0F-l^P{iYMJ*f%NIX&TBXU%5qR#?btX>rGkDP`UzO`^Er}#2 zg;cx!6xg|zxgIrjyx<&Xc6n%xx~Me^(@w2w{^AjW4wNeL+8rW2x1_L)=$y5|>(5aS zkkRAtD3!GMs_--9Pjt_M{L7V5x#!Bwuyh$AmB$o1bwM7IdcJx5jGmOkmlA@V4Zc0h z(vppf!o(lqR#Y>)@3F;99bXON8!J8F5-^>iKF<>2klPAnO(}n?MtBRykVUgvkhHK7{XH>lb%|z^7T@UH zU1qcw5lVNthtLvG3j*@!(!-u4m*KL^V>Uwn9Q(eZ#g_`;g?Kry=se9~=an&xEqL)Z zhVD9t;1XuKxpau@s|AfQRML{X>@T;c2zlEnPEM45<@#p$=L89RcRM1$48epe>!^7F?@|^~J>sK_8jn z%hflHtu?tn`He#}84|b_xx_VEmU(zEd#VJhH-?%pY9v@cn5gw7^<{|q_?ZuX*eh3r z!%Cwq(Rn{HCNED;Hmz4~CTSG(MV`F8{ACa*A2<;V&P-f?F#K|3MT0+!GNyay5pkLb zneR(ls)S;y!rw{$d-%81b=yK@(hJQ=SBO|*pust&=9;oxIxP4U)syPHX3T&xsJ5n4 ztrlP(D1KPfNF5O#MZ|0r{7S#-{Sl)^7*nLt@{p;WKc`h#Y3g*9ZNWBAyL|tOON4`r z+p~5UQ_*b?F_lXIIzaNSi@_m4=r~PhX6WJd@GOih1i+*MYDLt>i zIi#b8ArBM4;eG3DqdI-kqcBcM;}0~i<*C2IjOlFuniWf;e*Mq5=uA0_#%oEZ5wb(% z0%AFD9t+$4A`U_d3=0Zc18s`KHgO?1mCYj$D8q=V!CWq6s$R31#@*hi7fyp&>G5tJPwgx(!VJzefrb4f-(9{dG{l z9Mm7ZC>rTE-KOgXhl#VBh?q}f744;ir6)%HAhYRuoN%vgro0AjAJ+`16XTSZ{;t0} zIedb0-Y-9R$H`eIa(&8NLn@>E(z8ev1sW_gNo$==me+oT7mXosy4sgdp)y=$Oxa-@ z+AD?8^|qJeTd9koYMf`WDT1vj{iWfa;S#@opRnM8KPu7K8_hdH6GLkzq$u>!DD zub(Zl`y}Rcc`u>`0Hn?cUY1Js8!kKQNKfp8xoT*8!`taP)i7hbe<+1&MhRngj^FmiG1XmGx2C{!W>cP)nu zc;X`dxx(tnmcf6oe=0YZzWCD3!W@4@L;Z!d?f~+DvYFg>s=`0?=t32MJ!F!dcKD9c zPX-jbEh4X(_zN>|*)XaViN;EbH)CgV!365Ya)<693VHil)!|$-uyF8g-#qBOX;D^= zGhT1Hxd*^DVFL~$Mh~i0l2@O=>eG|`PG18A(i&iHAjpsHe;+x@fmC&?8-KRC1ZMhS zxHA*9l$pTn7`cKvTBfjIE~=iU{qGB$8_NIT^RbWIY>6+Fige#CVOgzsAYfunwkI%oJ3u#uELt)rEOYPDfa)& z0wfbiW=_NQ@*)@|)!6mW9_knHkcGu^6MXH{$aI{jD_5nd1Q-#+Z^SJP&>;59Ye%{S zHdLp2LcH-sE+2~8$2cq@`VAfa_G{=#%r~sQsUbW@EF9;{BMJaxz+UaJ{UaWkLB@Rc zC1t@DxOcqP`BJ2$@5^!^k0S&Se+11x;lHPcTwt=wv&-eZ0A0RLZY*-;MfV{cGLK$_ z6?O1@vkFLUQTpqbUMMenwonyItgrNwk7SliZee>b2mScphnzVyU8Ybg z5Jt4a$;fQA1459f0mxn^h&|GX>{m`AT(x-+@}EK6?>cM%G!-pOhv-BMd*L|CtwRhc zAfBs(&W&9ZBv3D>GIs3KE&ZLmIrJA4o>z7KE8NIXhyPVYhDjHe z6#cVj26f2vqXB^_$t>v$PX1A=0tXM<0f3RE{ayHj*o$)bNl~hx2lvl`qn;zC8Nz%9 z%*N#m@*)UJSRM~he`g{lkAFOVFAiSbe1zu+gT>g7z#B`zl)(PS5YUoUwCIuO`uD|0 zMe*0nJc3dh1)z09a3xi>o~3smA~mrz##eviO-V4f7yZ9d)0g$D&xk$%o5!_D0c=`L z5H3u1UP&xJ`adM0T>ivQch}Z5)SDR4tq68{XIb6T)2Ph(^V93Vb!Yua{$!Xm49AHF z0uthlIPrjcQUH|OUlAwA?dVrAWlMs~8+YwZ)DVClz3?vDoAPZ;v10-$(ebs6% zw8Be-bR6N&D1H=9+^{KyXFHGQ3wCP!qZmb|6&YsEf9l|a@jff7PgR-U%ID5Pr^)M*5$^sD&cBf5s<3Td_x2ED#M4U^r>(XJfi3l`0x0 zV00kFV^^N8WDa*Ay|q|N%M_O{e|!{=fVe-7tNZT{{hZ~(!wDs`%cVR20$vJz6G;~{ zX(j>9s)g_bBtZgis$%}co-}NRM-=>xZh?L^8zHJd?&aF1yi5)}ks zzH^E5K--cgVf6?tEREz2#v+xEx9A^$yG+d9pf@|Bga4}vw_CM-LUaymhGYT|G$}x{ z>P}I#@QvzvWEH$W-O$?z$~lAwP2BsBNUQ}?ZP_LTS9aM@MlQr{dvMsChrB3R{OM?< zn-Zm@6_}^dvH(M!wCHQ9obDKf8!X1s?GSTs`~jFum35K#P9(v|KHzqqYk?`kaxdsm z7eC$gVuBZ99YKoyE(PnmkHS~$4G8A~Cw=AYg=t#meuF<^RZ!P3X#hXGMOQ%zJ`bue z2{@Q&3^PDZi9k|1t*JI&3%@f2ULwb52+S7|YZQ&!HUE2UCeqZ)_QE;;u1*ue_dY~> zpx_oZRJ|Is`FS3=XGwhFKQ3LtxYd6p28xx(ZipY@kFzxBRxM;{I%`}JSSqAEen!-` zGlBG=!g3W8Cbm{;&`2?U7NmI%MZRZ@O*`bU2l7u8w3EGuIlw2uLcoljB$2%`p z?lL0IK0K%HO>Cl@)G;*O6cAIkd7}b{r9vy%`s(YTIC7ke9stsx(*vaX$F;W`ad-sO z-?kM1;Q|R8e-|T?m{l+W>>nY)PKdt&I{9~$%cCrySA-&+uAHmx(Lne^y15qpBeOEo?=~y+*DH2}=H0!yiSXk5p)%4gVuK4b^!$ zF900%0hvMjPcq6!a8?L)IMKyfRGG~py2?{Vb02?Z|VMYU_;E*}GhjJD`Y#&agF|P&s zjW{c#aXCF2b#?|A0Wa!^FgJW%*rA@l12b`SHWpzSUz+?^=&2QpkQwH|hoax@afa(A z=x(z>;x+omAMmv$pjHx7SD8A7KHwzqLB|*YWXpN(U&PEL2z~5lTEhNbY(W70CE!VN zREA;C6LEZC>DhcYMrBy)2nO3m&04!RpZU$f1G1w}(u<_lE^?Wt^j)i%mlYlfw#Lz&zHEt6Pfc;6|^dNSljeWky48<2n>DPGfK$NzE! zpo65o!dm41C86y-t09}burm(C)`1vFxO*}7~D>aA)H$k`{T*X;Q|5meg$vt@5N zpDOFzpQlJEbvtedYwcd^yJmBz@^%C^Oq^4tJT@D0eB!0fdx8auan9;@+Bls-*|hkG_WTx)HF$aEk9Be0p2;8&k7n`a_5 zp$o~UX(yWhGVczR!4mW$;B^ZT#25oM2Y_+*;A|uHFum7O^9BZPq0|Ogwq>z-kG68Y zMiM>1pb+!$8W2cdpG;$y10s}K6oG9Vus_CR4W@-1RlXs3qmJdP+C&^rcX@4X`7dPI zfoYPIXQ6QNH9n%L0wE^7G%GYASSnB zeVu!eV=bghFCn}PkT1EffcSGuF3Fn~?Wt*v5cK_{ec{@AfyByeA~596h4aIZ!My$( z{o~E#usl6OzMhKf67XDllzu#|2uUbyF-X}nl0b1<{1wEE>#$MD!&_X1au!2@ip@f# zKmg|XjL_Vz%j5NKHQKKJnrg^CSy9g|mwQn^QY8i}%8^q)S%4;^Ru|#4`T{Fc z86+hGmFI`W!G}=~VEr)TJcwwLfn^4J+J0;$1s3tDwg+q2D^&xz+?bmxp;2pqJDUy; z2KG%8Yf^y)JTfdf;C>(XQCQ-a+8D`?Q&EMX?tVq1*|{xglrW9M5-%*tg}ydK->Amn zkJu3V1JFEy0e?(3x7>{(fhidz9nyq>4r@3Nh$!~?L4U-2u_xCfS^)B*yxfzZr~&AK zvhy%@TF}3vdcY zK}B=4frxKGvg;`>^0O_3*Fo4pcjGaF>Uiq8T>%|q%Vp*(RD$34-nV_GtG$fHqK;CI zQvG64d>OS1R9d}&<9xz$7Op@Xl>`G^4Q@=gj;T6rMHlPqW=#J23<}(Q)D2F^D(a1^_Efai!9qA8IB05*AI9 zhsE+3(4*P2Y2CvZ1RuZkN%t#va8xAnCZS?wdXrR&aIATcuFl?LY0*XO-*_`63xEGx z>=C7t{5Tzena&N}?OF2}KShYvUF4!Ay&;C=&o*871!u4+v`W@31Q!8hmn{Z{PTAOJ z!nhV2hG%B8fc7}EJ%Kb!0Qrd!AQ-oCCtM)D+k>^7J(#x~vx6^!==gH+vH_8TF95qo z6u|X})bV}=LtcaMn7{qA^1A15u7lQ;N}qqo2yTd!8i5*x%}->@1INNSxOL7@5*OfQ ztyuj6=F=?z!;o{pzjjlj<=OUYXuVNtCLsvQ*T)c`lww#M?>w~wo9rIe`txld9AyC% zW)iU#8XmcNR}oGBGz8Gbx{ys=EJTrfuteigOE((cagyQSXr_?`B~9{<*ZR)~!fEoy zJOSI;WsYsP9-c7o7qAdg7|}a*41MBACM5^sd8MyY7ia9(UTQ_=3Q*oj~K;D!JoKX@Q) z$SN^3v@;~3qS4#4g_?tSmk+tn!LtNZ*8tSc%%%Zl8iFQxN-iL$+8xKYEjbZQob_bE)B52JvsZpo@DORQ=hX%YYZtc(8ir8f<+5kW?pV zZR^rM zSjU={Q?Uz62$lZ;z(x~&tH50(ZnnoBq0J9tT7dno3%PEkUpECIm8qr@q7ed8`2~*n zXTo;Ynl->YT=3vs;l>o*QRzxzqf=v7_+k!@u`9}?Wsb0gvh3KlU6 z+lWXOA%y-h!WXg2-YhoqKzrNjWr}xHyz}Sz^z=f@>wDjV11UO^*a`wQF5P_=2Hhp?lmDj6_e1T(AZUZ;zEZ z==E!e+=jSk?MYM-4XosN-`6hlN4I zoMP^V7RLC`H#OI2WB|KE8z(rDg?PiJ9}S=^BJ}NV&UdKyxMEms5DicRm!`2m_YuEP z{>WtyTwH9Vr{PQn@q+(Wox^tfjZ0KYxWg#oEuI{MehgLo8&-1?8uT?yf}(oZv(AI& z{rP1pACg{prE+fggj28FyuUVLwGT^#&Sl(l;4B4uP5{y|RZM zM0*0FD+_63fTNhKUe%LIHO_ljOk8N!#gPWG(xS4C<^_Wi#Mdq&8f?W{c2beq)mR2# z21{7!yZ;b);6yK(mVDEG@I~Lx$^J!1S}tZy*3Km1e|sa%f+%4C?@ZRpOIgDaVdeOAQ|!F6#jXAtVRr27i}Mc0&{66H@_aw55YxN>qq59AXauxLa+Wy!r=pc! zRAiPx8e+$!&$vyF8kd=hl`NJ{T+<>5kaqDGJVWiNXYCMmi{Mrnw3g);PA37X^C|%Q z@9fL+X#@h%?WR(RH$7}b@L0^fP!i(HROT-nuOVEw`16Mc2(QSMEZKmffAJ%CqlJU$ z3zxCyOhtvK$-}l0@qB}`BG9@vRznWut3Ba;{!kAbK?Ddig?a)05X?xLs%C37YR+`gd}fwKMH{l64A;bMIVm%%3AIKo~|`@g4P zk?PgW5as&bW6-JIDGl)t;X)1j16w;hlAu3DqBVTt=aY41^3A%8E2XQX81K_YxfePxe_gLNj&X>ajhvCl4A^V3Kb|=7@@t&CN z*}*2CGbs*j5w0YG3_XbG%(>}Cj?iPuzyoqkgMzTFOZfPKP! zd5R~Y)0gLz%<$pH?ffWkW9%H0e8L1CvOLyA_?<7y@VL3l&Zws7na4f;SZ%9?9Lv0t zdq3Dd#ktt$JfWqen9zlxKgv8v@I4ZyfTI>3=igp-(ljXXAFvV#F{6Rl*tC z0%u1+2EprUP5m?&Q7j(SQgOgbI(_8zr8}P+^pvA;gz}0YR_2^FI0)D$ZpBb@WVtyN zG{DS3jUe}lU?Qj+`FG9O||UjW_po}=hzFKX6* z^~=0aM`k&}*TN_s9>X8{f&DX^7p?24X70OzulT&h%~Lzc@owWqtA7k_L_Fp?DL0a9 zH_s71>jZz8bMRK!hM;bM*=s)hnB-~EeTW$L7FxMn4>hGtKORaiiBs(;5}`sEO#u;$ zck$VmsnaW&vHkd`;pA=&cnJ!CAv<{#Gkk^lk@|Lm%Ua=DW5`> zMIU(iVDIk-mJ0|`Fk8|yLz?Ke?bZF=-;fxJmTxSZOf{%Ak9>K>uHb8}wzIpnn!T|3 z?7&6Qp{ZxqEqhaGFlKn=*Vb3IXRd{#Rw^UcN3LJHJ^XsvWPjR!QEX>`@*)u7To1|z zVuIZje+SCb?;mc#{fo#hN)_5H%x5hGhuJ1f+0}_ZsK)1~peK*O^)Ox^PEyL<*77>v zOQe^tbZUo`a5?$(GrlIVt0OW=e~-z~p)i4(p#(}hJdfV@f%}h8`<>T!GL-ty+6-=e zqgcQ$p6xh3dt*?=WMua<^JDq|GRLY%EZ^P6o}Z`>d#w<25TY6~VjtuZki=3>W~GvT zW^imp?mhGmb-1WL>Q^rD-MK@@A59%5!#H3tO}1&F9vqa~BH2#Ow%72cqHC#tSZvt2 zqW_g8Pvcyf+-&1p_Qn>VhH~u;9A4gZF4DLY&QX}}vu~C>GIN#iX3!$b;m+rt=JaKL z?b~XPYX3evJyNXPa9U!awXjeK_~nJ6`b79~t^EkSL$T`VF?*>%3ax{~L*B69UVVof zSC(U(F6?ww2d6y&_%GuL!8c*Cpq+|-5xbct?4A8dxod9jegX=KxNld@C6Zyhhl zvG9dTX+*ur!OJQF>YGgc&1lO`UZ7CUe>t%~aTgXZoGs4434f~K#S)B9xoNcxpZups zHwCbqxI;8_`ROfaMVmNJI8Jkg9mpbR1dw;248T)i6_2ifKanU}3i#xwx-~H_VdBMMFpx;cnL5X8f z?6P7-<+8Y41-0u%fYPZeIP8~IE>#y=QCq|`RB9RL=eajAOf8$$=+{y?Uw#@~>E2hb zw6K%4*|d4dp}4bT4^D~f2{lH#ART>&@RJJvPvZWYb>}#202}`fy>PpttqK;-Ij0Nq z2|hL}C$TFYpS)xtelh(~Po84Krw08)TvctTJ~3X%fA^2;U}Dqll88$glpLE)-fOd( zHCMwc-rs=JKtD~SAQ~2jpcZ;ahHL3^`{~uW3$g8OxkmczK!}4`Czp3;j;lT@7gx9} zxSYMavzz$UvVPpynuU-@1a>K0p?uk`tmlU|O@*FS!&u_}QXhWFq0;gN^9Bw(B~-h2&ZPWWDysP^8T<16&gOv>-*3h+6d`d&b7tTT;>P8w zCB8z)$&|ryV|bb`Lpz2fxgTyr;+~1ff}>(JE~%v%4nhnP*Y*{mz*5ezax2GutUbGb z-9%b)NZ(_LSdShPXr|)8S>H32BQI77F3KNK9!C$UKDF^E{QHd@_r{&4Od?_DsP z&Hm1EKK=Bmop@0qR0H|q`tjG8f?pgDsi*Mo$A9WfROlEk(ozEv_iHM=+lP=mnMQGz@mfGpQp9U z1ohBop4(E52N%@fAFX0d>AHjDLQRBnqa^h$D&Q3Ka4h|dFrOtnhaZAWOWiExB-%ly# z&fz@FRiyP4y?&&@{;E^qckt&**xd!Bc`dFUJKngwf9~i$9$jWtvkK7Euft!hl%Inn zZbvsN=_A)jSB2h&m#!OHygZF5<>>{aQ$Ikhl!dc&XkYH4w}|f(AL6u>p7*>ms9aD! zGj%1zRC@1IrqhnK;^oJQGW)76HE0YzZPn(Dj043&9uGE+Yo1JRq)8ig{k*FaP1H zq;~`0kXJEe!GH(Cyk025bc6cSWw9HaAz#~wYF@-_4s51v28>YFG);(C%vYj8`Cf*A z=rfY{bBb>o2ItWcV%ZH!zWA2F&-(smUzYnTDwnqM zPQQK}Yx(K5x385&e$9CC!lJtL#!%vM9{*r1WTrJvdS~Lk-w2z`ue7s=v?^=chy=S6gqCZeO<7Ja_$*5TS z+Ji@foR-axIYU)`67Bq1ISKOObU-WKdGq&};o1^B7-MVe0<#E{=IEDqGv2&?tlHR@ z1OrWuKfM%9-@+nr8D9E05VZAcswn&6s|PUEmOW$9jC@~M1{NZ`M&hmi_ua^!zAy~j zc?Jj5IE#;CLU6QwhO4o{>;f#+QPS;81n!1QT|4ER!C+bOmF$P^O=tT+sNZcLv%al) z>Hg(3i?mxBxDKd7{moLskPGz9DM{avKSy6TihSMKn$N~E;PbAc5 zWevVm7uS4iK@Y-#Rqsv>%`wV0$+UiOy~+$a0QUQ9$vEWnn{?EIa0~X@XZ_v4t>%ME zpap}LPsyU%~GqW*w;M_hADV={MONqWtlzZjMZKv3|ZKpjcmdH2-*g1rELwADf52=MD{R zhI(Rmx%-p*|J-m+n=tS*)d91KZ@{bPqJnX+JhKOU(5u9Ag{ttP0Vj12MwZS!Jez0- zr!Vlx>do1e0-4-I4-qSzmVI#ITWh~wN(h+%UrK;#)RKKd@!!;dJX4Rf7C6)aPZzM8u^)CD73E?hw6Cpe=iI9SYHzrhYi@>D7I^+p3t$kf4j`%xBhL~ z6=$N}5)Erzij_Vq68^pR7|3mbrME8p&vR&bNfdmz@3S=VveIShwWNj?RP|2wZ^26S z(@S}6co!e3KnS@Gbx_f(_h1jgQ+7(#0RAUj*|BtVgQTERdAT+-aOQtMb{-8GFBKei z-oq|d?Zu1p_l4{q4#O)w-biwWI;KBwT}iE}lc)=8D~y3ybh_J?c65Op%teY)`##ql$-1k(>+8%M+EY^T72 zpr&NFp!sjG`RCePggx*xsx{}9;ZRcjg*fa<_#!Kz`g(CCWMlrq;3K!uNy#fNtS)rO zXTk|C1*=R7!8G5TngsHaRw6HKZVvhXd6oW^zLUCLipFvEB^MH`;f(-8Nxq z?>u+tXGhEdU)=couKbn^7S(_SBZ>=f1kDEi2%%H}+PE@2}5EL523h-E$N!{uHRI6wI7@q0iF4gNyah)MnY z2c950!J;<4vLxo4a>%wTQbdKIKQ|2i^#5DfI1@DVJj9`CA$xn#AXcXldgOeO?vwSN z#p!hq*5H>!_2w&Eq0#8sr%#6BL$!#?2rR#Nd{m@`lJ1?(b@;5%RaoV_eWrs(|9j;o zM0~m#)(;-N3flFKJ)I_avD0Pf%2VSf*oHsQ{_g>DIj_zG;pcGX-;=@>Q7Jy)rcApy zUi%ED3$9{k_8kY7gy2$fc?2khEu#Oq=+IDU%xR0)yl-Qu)1jJ-d6jeE`cz^T++?Ff zEB9f5*`CdHD&%KW4_(~pQ|Pa zb0jSTXijhRi1k$MVy`#lN$4)pl)QoY|Lg@X*T0a0drA!-7I|HuI@>aVhAR6m3}0tR zKe(+N8B2z0e5n<^%mweIB~rnQ%cQIS&l1FmqK_9j;|?J*;ker^HoV(p0prmRxaB>iH(#w4MSvnkt2_sXnD1*Ek9+hBHLzO-3$Yy z8V3}pCL?Ns)D#sML&a7Maw81Bj)m(u5`lcr!RxIvaFzJ~T)R;i(i$^c0V*Z-)+B7d z+`_lxj-7l5o83onPyzX$rp9TxH&fz-RubzXxAn;|uoE-5-5des?s#IHY{g4GCK0C%D=ieK4Kc z)m~bCntv%<@O53tZ4OAq42mXQk|mlS!2ON~>4+`z3zV&He#oLdab7!5!O!N3*NeVS zL1eIPIRR&i_aGu^`JT?MIvamK0ttR!=wS6Ui9PK2|G%rDp~_gW)tJDnL3QB}`>RAZ z6V|CdyrN)u!arXN9qRWWiLmLo348s&a7L^7guo@MYu!XX+cQ(S;~ zuMZRpaGIMn0;`G-n)3f&ci$b4b=!xHkQ7-_Mm8-YI~i9IqBK>?NJZR4atRq-THFmQ zBV}c@6h$P7tF1&vNLeLY*(0Ui<8;?^ce(HPeV)JHKA*dfp3mdA&hPmh$8mhW=ShwC zcRkwo419bJkzAH?dm%_{hnm~2tm&%P)C>F(V;|jrT6#gV6>>v|OW*LTAMw%-(FJ-# zzVX%mKfe2`F8=o5=yb`#uIs&h^OwfY$Z#|7kZ0kOHy8a@o_JPfHQ%>_G6B*I{jz(! zp!-(MXw6`aPe%K_1pe`_j77s%hfN-~$5x9}8arUzXj9^>y1&OK)gJm{KjxI)0f z^+`0JJ%HO@G(zALBNtqo^TzsAM(BJ20U7PZHhW9D*9-@rEL$XwIjtm{9^;8$-IQh~ zl%1jTI%}7ld)%s<$Xn% zdw2M>qvUVTgW#kVh;4J+azGXiPmY4L``#XC1;*0LT)Y&3fGm~#hHovVXcjKD>Y=r{ z(QG}`#+LdvG4U12s*NP?BJmCzPWw#kcSFgflJCUJ51kR~PAK0rlyE_HZ%D^|fvR0> zedGi$9}oFtWCfe-%82*$GlbhPW2!5Xh#uU)AgtYmR5vSYkaxNXOS{8iz~b}QlH z&QLqbAFw9nhZI`UH-Y zd3!95+*->3-)LToi4tQuAQ_2lHL^dcPG5=l5Ou5H5!(zH-P@#)gIOBpYikg{!+{0tG3Zp*4@ zQ{d=O-0kv(#2_4BE)}QH`$uwiEQ;&*N^6Ex1zE=boY5do(fm}+WjSryG+B#ln_X+K zBo>sHp71Q1W<5KBsd1k+w;>igP8Jd9dVi|s-@5@np@^``+rI)l%I+hTa2+pzBduJI zqo6GoSzP;=sWY^ko#m8}QZx#)^Qt8=N=o^mw0p;y8m$+G5DOAj#6|m*iy`XH?z#vqvaYHeASAZ`sAf zo8_g4JuPEF<0`!P14&hFL@iGulb)#xH+CUw9J%gG_q45yk6gsrF`!zrq!pr<3$jj? zm)G@XiVnS-#*)w6htut(g`eU?`7OZH`0a1x2Ye$3LPg)KXvHb^DKafg#9m`zuVtcJ z4;}yA-*Akj)4F>`m+pRQg1)$ks3QTs>vy8+O8&|rV-iL=__=Nd9prz9eNfzX7SqLrca6i%x~mYz#Fj*ORTbZ^e3bqP9_DEEZzCXoJwBQ-HYh*(9g z+OdB9JaX0LMmw$K2p%N(J~9iO>w*z6HdyR zm@#@Z)?IQ*su7+J1Dn?Al5{W}k_IV(G|rsVzv0D6tJN|lCoU1D=_>GM5cm(4iKWQr3Y2qg8|`gU zg1q!B9d8^Odz{8XL2hEPu+m`2cF8&e4qAGcFL1}SVP$>wYT)V1yyz*SCjg& z%Mv^>EI?hONdISA2=#eN-$}*g+29?}&$KQTL^Nxk`!b2w1onJmiMKCHF?cuSn#p{5 z)XoS2C7XKDT;EaG=MY70zTBtHa*L1jaji+zvpy-Pun41`MF(WvY=!neVWzh)a3 zn>_L0BSI+Ff6T2|$*@pNfx&Ft^K}-$5bOil%TlMA=qOhQ4VQeV=U z-wSEQNou^eZa8WX<5L7m3iL*u6WY$)JiuUCOn9PgIL7$4QmD*e!fg*4=Y2vl?m02H&gcfPS4WRoWGl5`E9vRc)EKLrK94}8m_*~z3d4!S z79GFK-Y|NC0wDCZYs2=RRv)ZJt8M+`yqXiAUG_QV-ZQ){!gw}{4&k+=^=BFDU zf6+}=e-m)Ec|Mgej>_7yg7bu%jthyfQt97JlvH~?yf{jO^BW?xizBx`z z`ZxmzLM2}20!{W4&cqH6!r$F#xl%6o`^9isFE$c}YTkc`ge?t;=jVi{?i`P|HG_BJ zicOe5OzqdqHB;^v0g=q!^?GvP5XP?*ceI3tz1Nt{|}$XX17>cHI{G_38gw z1iHmeI*nNcL#1#Sd?L(4Sd{gH6*=8B9I~6(?r%8i@x4tZK3vwmcN&n0o(Acm`A|ca zgOii<hoRzroHP17 ze)*kc9X)l~q@YxmS97W~%mqe{G%gCMI1k-p6LIX{7r`ZGO0O$obnJUMI^OnT4jw59 z&5(1(RV+MY=Nt$a*)&9S*%}c~E^q6ZdemDiW59k^{!x8x6_Z;xZ<;$XaZzI8l8P3L z>&)@^E={q2Db9HSig(Hn4V&%(x{5S9@K;&jt?VB_phqB&QeJ?XI2x&6F*Gu%DCSxF z>NSXk|2u+9pU0y8XpDeRKg;l+PYU;Iij&sYh!D4lJI<_XBp@aJN-eE6 zr&)8~TN(IsE{{?I7ZJh<-gG0hH7wqlr#XcPJLs|ib@rnb7gzsoRxk_KnMiA^S$l=O zyViJfvNZA(!zJcn;<2R>I&B(Jb z39lu*pIcn7-i3Nq#AYK;-w6r^czz^%0R*%uh@3}OSvdJ{0M6~t!WH21WC2Ca;;bcG zsfRr1vke#5PI=JJB(ox20S6E-3MAJ<%5^tjBA=*60Nx0;Mc^Aoz$Xr71h%oU9&|Yb?vY09cj9yslZ}XbZv~GnSgwapUcb=DSs7se)is%l&Ab^BI<^c|Wjcw4m48103Y; zRcOdQMS+O471^Ph$d4|U(egAj-W(X}*buAhn;W@!tKvfken7~EP6R^cF~$aPXEn>2 zvQ#z%ZHL!;4hczjl(TlH6v33;Tb{-yX+diTq$bk@&P4`5dIVVR=aCD~l70sBt}Ns6 zwCC-DD|q;}wyHZlS4|B#xqG0pVAz{l0GV9kIqpI~Zyt~t%0)O=EK z-0k20iUs)R&!}u42vD>b?QC-3gu?+xeLb*s*Xt3~5wT4EV7v$V-W#kwuXeQnLi)-) z_uUm|R`Sc53fSKle)t2CmsIqsA~t&q<;|jBvKEsA+?UyQ->|-yOZ{ALN$N-Ycys+E zY@h>>*UUpo2InvHkST!lN9I$VSiCPc@eC0%C)k9>l;BG+px+*m9>XTL_PmU=aY!VZ z5muwicDn6n)6?%VN{SIpo*nD^BWXo}AXoFd+fLF*B4gmyj4gk_DW-vs8$jC%Dc?$6 zytu}4T;nB+yn1a^AK4hRQ#&hHStg#HUA}$-AWaksbkkFU3J` zVDe`fK}CQ5Pod0O>Jx+R4x5G zT&-&-zI_=)Nh|IIc7E=3>)dvP*R1{TS1z>7l3^}rYFtPa1x;>)t=;R!%=G(*gRoI> zu6{rPIk@h+Z$LAhIq|cZi^vba9M8dC+M}dPR?K&KiL%)qnvw{qRh%R%ur!h@uy;Cg z5==QF==zfPNI3Dws~WaqHHP9sMGZ-l5CRnGkl9h%sU;C*x%Qa>axB`gG15(&~n zJUoEzav0k6cNwb9o(9{kPZ-6txySqX1M?>J9`M{JON^^D^BVxOcULY1 zE6mqm^4fFl80NjjDlbDCBUezX-t1}Z<5}H2f#7(_Jl_5X1|^KE=0IS+Apn3mIj7yn zK$=>yPMnm@goImlQb3Y6+I{bMnKX@NOTzqcUzHEzo@{4Ro0WiKg1I2L{=RSR%QdwU z*7|djN|N#f<68@nJMw1{#BoW(U{CfdcJXBAC*75*IKR;C+}D-0XKG`6$Md_F=^dW- zo2Z6j&!5rr4u|S?G<^BZ#AquJWv5LdhB>L@^&K_%O6 z?p%>=cqYRs0eIu#OB%WP?G)%AOWSfUo(kAV(RaVO>j|e1!p@B5iE-D9gRqZUxbpzr zZ`An=kJ0DDY#rl=KoCKs*D}HSLe@vxNJ;c%upL#Wbt?F8JB^JwAr(2#gJ#W>!t!u& zS3zWE-1+8KN~&3mIh?FCY76DK{O|W%<*qrGbT9GcdJnK-?yfW5hdna4+&C|)duTH% zY(=8%s+s^}QVdd45{Q9-e~T1CvWb2SWzr?>zYh3(cTjW)<8BL(Oma@Z;2dir|4@~# zODq8{S$cstf9Z`f`ga(G3|a#p$RGwPJ4bY15X^VFp}1)in91f^d@xjdrug~gWzLnE zgBi}xoX0Xm^1HJ1Vc7jUJ_B_sf$ovOg>7d}m#?ZVFQRtBHkukGiFe`|E5diH=lnea zV&@euNm4C@3ee$58xW@P16y?}M_|>@!M{ilsSWct9Az#oq+}Bbn(g zxv?1}+0rt**bDR97#n)C7ghT0j?+DabuTp--kW~K65zlp1HbsI z{%~p8J{`zZ(%`<@lpE?T_WE{j)Q6{7Dsl(O&PD>86%FQUTU|C6`-1ThL~Ha^in?&~ zl`zX|#+r}MxLlp@K>zwl!SETB(zd{OChSu?xy%FM!U7HF&MaQ2QfN4ZS zi~HmT`mV|`>S{3z;OtMn`5{$RLV^O$Tn=tGs{XEraWTrr*UOIu5ZnK;71_iNW`?aw zK{i2M80jGx*Ei=uyGcnn#Z;Db{i3RFVGvrLa5L|#VHVSvm~?i z4=YX4YRjj>2T9dJ%VGC}b=gCmV5fLhB=b9}PrkWx7i8Wa*?w#-%v{sx)Fg`1kD5h1 zbj(Va@ry{*ur%EBih&~K{BW7)BQsK7l{Z^!UOMB*?uQg@m1c3nO}nFeUM0l;FE!OK zEU(lEV)M?QMGBB;Iiik2p+d6R`Ii(MyoZtd{!Ex3&KldsHSNEEaio`tzo5GhETwJ=+vTnM2Ky%M#68fhYrZkZA!G zIn7XviL^!J%Rc}&L>Rq@WgZUyOYKMPdcB(Hw-}6Xzfyv+)C63UxvJ%D`ErDUbc7oL=O`8d% z0(-DMNa+Guj{W954s6Vw27q=&xH;DnEyHiFWdDvNtknSK4Vt0e`-mA&2fpfX8%k(^ zmpTRB$n>Ym%x~I6vI`cN zVC0elU@{q6Xy}R$=SiX>2A}vzne+A(zaw#pMPd8xF!bTAMmb^+>U!yo6N5a#(G*0Z zR$!--mC7C0hKF&^`wH?Qk%Inb8nfSR>N50c@$4t!t)uK`Gk*IdaB%?((7y$@5raZ{ z{34Te$m~FP!4i;xsi@v6uWq``v$j08S9_XG36wrEC~H~!`j?tmp~Z*i=22Wy%qUQU z@sb>mH$xf(>FN?9&R>)MUWYYVHVnKmQ05RKIirfR)FwL@bqn`x4?qgzgLxNV($Y#E zkhedIw--}8NBvunPK>V4fE4gJ0@og0?EwoYF)aoXh&czKZ-AID*fICRcE}hPp!!_8 z?h}*(=oK<%5*I)ESe<}i%b}!tXD8q|jUmd=C~Kg)DnyrVWzpdu=e9iZ;V?9#?9J<5 zGYask1d&4ILp^X16QVNpz>I`26x4ilW3!k^Ey?s5KpGsCJ2@FH+8o3pT!4;<6>wDY z09!{4nDa%&S!(3cVaHLB>m7G|i1(03+|_)AnWyU!#0>tYC!NizQp zXys+}>;1%Rf;}+vU@^)6USZavNrF$?JQnXep38X1!k@z`Jz|gi%(zMca^+d}W_5Y) zDVY0bpnb5XWAFXT_xh3JO)d<^GKkE8%~s56h_ldiswQfo)lrM)K^O-ag@g62({r{)-@hKKw0sO&wYeblD6;aW4QhzI~IgDv>Ttw<2$XkI&Frjca| zB45;n(#g&EMfU>h{sC>?b4&(l|LY-t!NGT_-p&O-khDaA3?3ZsZ+X^=>4#9=ga-b% zy#x!72Jtpe(>3lj;LD#63Xz+Bgp#eE$2T-?3%3=3U@;}4oI!c>xG;f*8+qvWZj=NP zJAFj#R3nKx9iRAbtf5aeP-EZQVzg4(V_`R9*qVN~Zo`?`f#_q{!6~@JQDbZM+ARh# z*c~RMudfQ?^+?QF;io@)MsX(^QaU#>-Gg)4-cG43c{VXOczYsDtL&sRRd87@WdCMX`Br8Odph<`7DIS-d@F zhd4~z(hwYVgkm+*u_L2IENxBfMu5+PZ3l>Ai$-G_{01mc7I*?+Fx>^ONoo1U0;y$& zw86dKUY>l0dfpD8;b_EN*?6d1Ar@n4mS5gK3$)dn+fHXe7e2iHDZ{xSND*LM44E<6 z)`@`trfAATp(b1Q|HOq!`}q><{LL^$I}&kuYFXfE&q6>Gn`Te_Gq#-If*bSp+YJuE z(|r6>J^|^^fByzx-uEE4=RI-p^XmsoISh?=q7G95gR~mzYowrZFR#WZ*+xsPI(e>L z$tq6*zzeR+t^AB6`{sPe*D4(=g?={z3jGHwnX?9|uBwJa*bs4Jl1~H^VieL(ZL&V&i-tn=rv9bB&3|mcVuIcGmIGfhCT~l`0e8IWkUWsJw zb-qKX@be3rnL(KpPmW^(H=}h=F4&3EiEx=Z4{41#8Z6$Q6^RkN$F;b{cjgCS@&5J% zT2?zTHADp()vA)8=$~lg?tGyp7DAqb$|ykb2XX7b6bn zp0^@YV(@NemN6~%ONSUf0#l&Y94%@`82EulRt80AH`g=IG;aN_&<8Nk2XnX#i~cCi z_&WQTA}P=SIKsYGe?;QW+V|bRq}&%y--}SnMbkR;e}80&=65W@$J)x`cL1T2C#^1I zLP24JHn!4R$mNKZ~ZO5c#9kKU{G zlnmP+GeBUH>S*7Q2I2y zjo6qC1Pr5#G=&Io{QFZ&2OCU*EMZ zUR&1f{%|w|Zchv530T^3d9MrM@gyHagAls!lged&zc6YZ$C({V#;{k5fl=Dqg0!@sL7XLqcjS@+oLf{2wcphhNs|o=LWDtef9G|bE+$&ZRuPFW3nz0Z$LaaH+lel_>l#pNzwck#d24(DpkLj5HA3fW zPd1@|cYSOK9Xz0r3 z^et;u99}L!GAu_6t>)OTf)D+FQ7BL(PSX@uq+Hku-ok=tkjN3;1>;@mCXzh+0x@J0 zij`%%%D#e1D6Lf45Cu@g49b#L^fpGIeF_=;B4ry=V#F=MD}1a-bDR1KJq`D^cGYIO zMJKP0RY{w%&MazCYmvl|&RZ7bQ(?V@U(4cZp&ugX_6|=`q*&nf?HpdXH5HlJDXnTUMIT>WLIz}=6-F6T`F~~|UaM2*|1jCdoI-P_tZtNtl=A=s5DLZLrAWFFN zTuafk!22!kL@$)vQJ_)IwK^@}xI#xlQ0nhK(qlO-0&3b=9iJ>RE4YFMg5*SmJM9Q{ z=Ph-&o%m6I3~5c)AOD_)avbBu&;)%V(*az5l_%3_EaTOkKdMj8>ksg2>bRgbel-|K zehEMv864eSi2AbP7pfE>cw{;kW(m5?V}iJOGw@|~4)ro0me8;Kb(iHjI^ zqxf5ug`I((Hcx@aG#-$@XphU%Tlr%ATo`U!tPy)o1EW=3sB;)(KR9n6Ke)fGHR@u}8p`dKYr$0LA0 z>FMrdpU`MG;GXvB>Zoxucvf)#tkg5Tgt*0dI?2~{K|Ky&SCq_DD%j~@ytg=P1=(|; zJqx~=-U`KZ>+g$Ejok6EDhjX$3g%GQ#iuG<@B?YEo$wBfA9Je9N(1=NxLd3)$2o=! zUqz;!z$1Cs8$i|Q+~sqKFCHPjxIM2N(-L5=|M@F0PpCZUAW$))7P*UoHubh)iyz;k zxn94P=u$50Fw_lnwxwTV0-bV%>wg}jlxW!~mj%;w4_Gu=V#uPz$m~y2pR%L+rIXAe z;KI&PR zR5B0w!AUqZwf~1xV?rBSxw6gP;wDfNpB7<-TtaF~Z4D0lkM&(dZ=Nm2ZAgsY*Rv{` zJ&|2_Y52<_XfQ9ztd20mD$Bq%YcyfRM%}p0;hJY>*XcRcG}Q*VMu?1mgcAqH8V^bv9wpyT3ESP5_ktPuvJCG{YhlZEZCC_|rjTULS>P5}yq|?? ziIGnw#VhePvx~;W#Q$Qo;fl?)_L1M0;^(61$ltUt5hMe_Ecbp#Lx>!0R8&Eo+^A3< zzRP#F(J7J>{AUHpPavR?s6+sFH9?k4I@Smuy=an;ete(wb&GhgG5iL(0Op5tiD*nh zD&GAcvM@E3#rwy+Lx%_yc??-Wd&boJ;J0+-d4M{S!VDtT7YNrgnstrMh(LnYi_-9G zzwKOW zPH-^v2BEz$@#5vnxEW;|$%;7taVHAhgN2~PC}7PXh_uIK9P~G)q!CRSeH59WzuQa4 zrHpGvDy6MEQMHkoI16nH3orou6jNiN!KxChHFgL}3E$lrQ{ zj7AH;RWJCD-=WdkSqW!B8gGBjCHNn$8up(C+4jav8{K*8e+V!a_y?VgCVSN#4Ts{R zPpzilgZRYXjCTT5sJ!E5OL|gHi!Os(Iea1kfCy1`V`vj;1h(Vf;>y^iJ!wGnX=FrK z6mrcH;4Jd!=a|P_Fn!Hc7P6Zy14$DU{v$@eG1tX1@zZp!DVQxWp&kjKiA3%O1}T!M zk^AUM5n|?Jd^a8FugXBzpKMn_2=7sCpq2E_^#2mNB#gME5zCP4@D$6zJERUkMC}{qq%}q$TD;U})feD{!zVXF#O8TfZ<6kc7>zPL6|#CVAFU2E)>^Vn-Zy}u`V*j$63S^Va zzI6>0jt_$y3P-KaNB5nX*iG!<;G8#DjJ`@$(uD*O;Jciww;2WDlN% zmp4B>(8b!ffzO>RK=Z6XbpA9Is9u1Wj{xVY+N-8ChT+Xg0yorCw~6XWrfryjEJ`U* zVfKqO6eX*uWv~-U4Wej6gKyzib9|we(;*czCx4OQFS;2E(eU9Jijm5v8ve<*!6piQ zOn<#poaBBm@jhfc@m|=SJV4ZgM$bB?^m9)>H5;?_hpk%$Nn=dM_C6p$rw6Y6{Z68R zG#b<0gP+cKmP1Y~<2%w*)+ws{FRS!n{Z6C{15h5BLI?%Y+3PaE@RY}-UyEkIO4R9q zp;Yc)#erow_ODb@H$`NM(c+(!`Wb*YW9P6D(LUKo$Pw~9p{cra4sHA}$`iD*fZE~b zq)Y?Q8U}>k;lc9Vj-CH}v&R>IC4PilMJ{ma-Bu}@*M*alT*6)sj2Te&o*2b!gcsmw zN%2_LA$ot(UKs!SKuLn$+o53;2MVEb@T@OnVJ>*LSu}WZy_!j5tzdzxXmT2!cL43b z%TqjtNLXUL3QylnIzaPO=td&+{11_G2}Nr0r^Wb6YoAO~1|F=$fkDrsE>>CBOH3Lo zt9|+C5W$3|UX4a4930VMLh=Z&pJ{nQPWw2Mp7wT8!klUuHiM_zh&enJT!yy^#FXV! zB?pEk%HWX6q+?~gB*x9?1bo{@&fWsz5{*R%SN10=7Mbl);IvMin&hT?`qaVxSnDmTX;)woS%U}{B zOW&dZfYfk*&`?7?+^rduUU3zx4^3&XCp*s2pc%Tt@KC++;<%-xW0&1;%}fmcXw@Ro zet$3>vV~Nv{{Z%OP)EhjP73Z$*@h`ALjEZwlm{^mwzsNx4voyfPTai-*jvi8yMOHh zqC3N>3Grx9)G-W23T*$~_$=t8prJ_De~D{$Y&dPrE;^2AcYNIGH0DacaoaW_T|XZ- z*`~LbgE%c{3Hf7zvYg|Nf(`)fd~(2v4;t+uOFbs)hQH#BtP3^9L-~4c_cNZI@UrR{v<^LW?Q?NP z_o02=dFA_puYJE_GrL!KL(amX?hV$`*J=&!NDiIb)_$+iA)`I7&{bEw(c|K)TV1EF zWfXUvb`^V{^B^a{>#f)ZUxPbOaJcg`_52C<%khoh<&Ay>zux8VIiWY?94@vy=bhqk zUamCtU2>Juy4se`ajV{#;PE*POJ}@VeC^ikUcG^1zSIJ#dtL@jE$_v;&-m(J)6lM7 z0%!8zM_I?Yu2Z$6YddU;v!sL?uB#U$7d^&|IxEt92ASN_ z>=xBM#m%CuN<)V6>Q}PoV@AWtn$bfx9Uf|0SqW8{HLo(7jHzqW^=h>rI`8m)W;tv; za=tnv_^l&bcE+b#yZu})H+qM^luJ^*13kS1vpro6?Woj8?}Fc^4wI%2dVs*c0q6 z0bJ9!jsS5gAw?TdgGi*i&w8X><5n^CvhxK^?werr%3(litQ3xG@rV?~L7R~up$^rd zma6$ruTz()*{NO-R)=?My2nnx_{x*(k4H0842oRE__fa+)ZJ=OMYy1au0g6e7Q;&Ub*|_@Qh1KojR-2z43(y_vDd&D^&XP*!Hhr7=hq-R`l z=VE4mxeZB4(4X@6PlvE8HUe@z^C;C<|Yh6U1s@#U^_Nxc)?g+iyTNgYo z)G%WBTG6eBKYrcPRW*tM;=_-Na$g5xO4(3pd`Ce)k4>|9oyhDO5w1q>O^r|o=j#NlczIf82BXr~buYoDuxJCXB{ z0lQ=Ht}pLBBakpku5+3Hf+L{q2S_C^R|9;qCz0=gBdJL*h0x|FqZ7AqKSTk8=7U%h zX-&6Awb|BYYXNwHk&3T#pVA6VR0dl0@k0E-0^+r<4bAvxwEW9eOElvD@&C^?zUx~*KenlG)WnPle^#$DH_TYM H^T_`JO{aTE literal 0 HcmV?d00001 diff --git a/docs/project_architecture.md b/docs/project_architecture.md new file mode 100644 index 0000000..973abe8 --- /dev/null +++ b/docs/project_architecture.md @@ -0,0 +1,53 @@ +## Project Architecture + +This project adopts the **Service-Controller-Repository** architecture to promote modularity, scalability, and maintainability. The application is structured into clearly defined layers with specific responsibilities, allowing for clean separation of concerns and improved testability. + +![Project Architecture](images/project_architecture.png) + +### 1. Controller Layer (API Layer) + +The controller layer serves as the entry point for handling client requests. It is responsible for: + +* Receiving HTTP requests +* Validating request data +* Invoking corresponding service layer functions +* Returning structured HTTP responses + + +### 2. Service Layer (Business Logic Layer) + +The service layer encapsulates all business logic and acts as an intermediary between controllers and repositories. Its responsibilities include: + +* Implementing core application logic +* Enforcing business rules +* Delegating data access to the repository layer + +This layer ensures controllers remain decoupled from domain logic. + +### 3. Repository Layer (Data Access Layer) + +The repository layer abstracts all interactions with the database. It provides: + +* Structured access to persistence mechanisms +* Isolation of database operations from business logic +* Flexibility to switch databases with minimal impact on upper layers + +### 4. Data Transfer Objects (DTOs) + +DTOs are used to structure and validate data passed between layers. They provide: + +* A consistent format for API requests and responses +* Isolation between database models and external communication + + + +### 5. Model Layer (Database Representation) + +This layer defines the database schema using Python classes. Responsibilities include: + +* Mapping models to database tables +* Defining relationships, field types, and constraints +* Interfacing with the repository layer for data persistence + +The model layer remains isolated from both business logic and API handling. + diff --git a/docs/project_structure.md b/docs/project_structure.md new file mode 100644 index 0000000..3a23999 --- /dev/null +++ b/docs/project_structure.md @@ -0,0 +1,26 @@ +# Project Structure + +This document provides an overview of the project's file and folder structure. Each part of the application is organized according to the **Service-Controller-Repository** architectural pattern for better maintainability, modularity, and scalability. + +## 📁 Project Tree + +```plaintext +. +├── alembic/ # Database migration scripts +├── config/ # App configuration +├── controllers/ # API endpoints and HTTP route handlers (Controller Layer) +├── docs/ # Documentation files +├── dtos/ # Data Transfer Objects for request/response schemas +├── models/ # Database models and ORM classes (Model Layer) +├── repositories/ # Data access logic and database interaction (Repository Layer) +├── services/ # Business logic layer (Service Layer) +├── .env # Environment variables (e.g., database credentials) +├── .gitignore # Specifies intentionally untracked files to ignore +├── alembic.ini # Alembic configuration file +├── dockerfile # Docker image instructions to build the app container +├── globals.py # Global constants +├── main.py # FastAPI entry point (starts the app) +├── README.md # Project documentation +├── requirements.txt # Python dependencies list for pip installation + +```` From cf1ef6dd9b9c6b0091ecae7477ea1ff7fbda6987 Mon Sep 17 00:00:00 2001 From: ibraam Date: Thu, 12 Jun 2025 14:37:05 +0300 Subject: [PATCH 06/43] Added license file --- LICENSE.txt | 674 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 674 insertions(+) create mode 100644 LICENSE.txt diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000..f288702 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. From 48b0df2663ea176d210366b6928df5cc9cede744 Mon Sep 17 00:00:00 2001 From: ibraam Date: Thu, 12 Jun 2025 23:50:17 +0300 Subject: [PATCH 07/43] Added more documentation files --- README.md | 20 ++++ docs/add_new_endpoint.md | 113 ++++++++++++++++++++ docs/database_migration.md | 213 +++++++++++++++++++++++++++++++++++++ 3 files changed, 346 insertions(+) create mode 100644 docs/add_new_endpoint.md create mode 100644 docs/database_migration.md diff --git a/README.md b/README.md index 44a6efe..3273df2 100644 --- a/README.md +++ b/README.md @@ -103,3 +103,23 @@ Once running, you can access: - API: **[http://localhost:8000/ihr/api](http://localhost:8000/ihr/api)** - Interactive Docs (Swagger UI): **[http://localhost:8000/ihr/api/docs](http://localhost:8000/docs)** - Redoc Docs: **[http://localhost:8000/ihr/api/redoc](http://localhost:8000/redoc)** + +--- + +# Documentation + +The `docs/` folder contains detailed documentation for various aspects of the project. Below is a list of available documentation files and their descriptions: + +### 1. [Project Structure](docs/project_structure.md) +Provides an overview of the project's file and folder structure, organized according to the **Service-Controller-Repository** architectural pattern. + +### 2. [Project Architecture](docs/project_architecture.md) +Explains the **Service-Controller-Repository** architecture adopted by the project, highlighting the responsibilities of each layer and how they interact. + +### 3. [Database Migration](docs/database_migration.md) +Details how to manage database migrations using Alembic, including TimescaleDB-specific features like hypertables and compression policies. + +### 4. [Adding a New Endpoint](docs/add_new_endpoint.md) +A step-by-step guide on how to add a new endpoint to the application. + + diff --git a/docs/add_new_endpoint.md b/docs/add_new_endpoint.md new file mode 100644 index 0000000..5b2aa19 --- /dev/null +++ b/docs/add_new_endpoint.md @@ -0,0 +1,113 @@ +# Adding a New Endpoint to the IHR FastAPI Application + +This guide explains how to add a new endpoint to the application, ensuring it adheres to the **Service-Controller-Repository** architecture and follows practices such as pagination, ordering, and wrapping responses in the `GenericResponseDTO`. + +--- + +## Steps to Add a New Endpoint + +### 1. **Define the Controller** +Create a new controller file in the `controllers/` directory or modify an existing one. Use `APIRouter` to define the endpoint and ensure the response is wrapped in `GenericResponseDTO`. + +--- + +### 2. **Implement the Service** +Create a service file in the `services/` directory or modify an existing one. The service should interact with the repository and map database models to DTOs. + +--- + +### 3. **Create the Repository** +Add a repository file in the `repositories/` directory or modify an existing one. Ensure it handles pagination and ordering using `offset` and `limit`. + +--- + +### 4. **Define the Model** +Add a new model in the `models/` directory or modify an existing one. If you need indexing or hypertable functionality, include the `__indexes__` and `__hypertable__` attributes. + +#### `__indexes__` Attribute +Defines custom indexes for the table. Example: +```python +__indexes__ = [ + { + 'name': 'new_entity_field1_idx', + 'columns': ['field1'] + }, +] +``` + +#### `__hypertable__` Attribute +Defines TimescaleDB hypertable metadata. Example: +```python +__hypertable__ = { + 'time_column': 'timestamp_field', # Time column for hypertable + 'chunk_time_interval': '1 day', # Chunk interval for partitioning + 'compress': True, # Enable compression + 'compress_segmentby': 'field1', # Segment by column for compression + 'compress_orderby': 'timestamp_field', # Order by column for compression + 'compress_policy': True, # Enable compression policy + 'compress_after': '7 days' # Compress data older than 7 days +} +``` + +Example model: +```python +# filepath: models/new_entity_model.py +from sqlalchemy import Column, String, TIMESTAMP +from config.database import Base + +class NewEntity(Base): + __tablename__ = "new_entity" + + __indexes__ = [ + { + 'name': 'new_entity_field1_idx', + 'columns': ['field1'] + }, + ] + + __hypertable__ = { + 'time_column': 'timestamp_field', + 'chunk_time_interval': '1 day', + 'compress': True, + 'compress_segmentby': 'field1', + 'compress_orderby': 'timestamp_field', + 'compress_policy': True, + 'compress_after': '7 days' + } + + field1 = Column(String, primary_key=True) + field2 = Column(String, nullable=False) + timestamp_field = Column(TIMESTAMP, nullable=False) +``` + +--- + +### 5. **Create the DTO** +Add a DTO in the `dtos/` directory to define the structure of the response. + +Example: +```python +# filepath: dtos/new_entity_dto.py +from pydantic import BaseModel + +class NewEntityDTO(BaseModel): + field1: str + field2: str + + class Config: + from_attributes = True +``` +--- + +## Key Notes +1. **Pagination and Ordering**: Ensure the repository uses `offset` and `limit` for pagination and supports ordering by columns. +2. **GenericResponseDTO**: Wrap all responses in `GenericResponseDTO` to maintain consistency. +3. **Indexes**: Use the `__indexes__` attribute in models to define indexes. +4. **Hypertables**: Use the `__hypertable__` attribute in models for TimescaleDB-specific features. The hypertables will only be generated for newely generated tables. Fields: + - `time_column`: Column used for time-based partitioning. + - `chunk_time_interval`: Interval for partitioning data. + - `compress`: Enable compression. + - `compress_segmentby`: Column for segmenting compressed data. + - `compress_orderby`: Column for ordering compressed data. + - `compress_policy`: Enable automatic compression policy. + - `compress_after`: Time after which data is compressed. diff --git a/docs/database_migration.md b/docs/database_migration.md new file mode 100644 index 0000000..7bfb08a --- /dev/null +++ b/docs/database_migration.md @@ -0,0 +1,213 @@ +# Database Migrations with Alembic and TimescaleDB + +## 1. Creating a New Migration + +Generate a new migration file based on changes detected in your SQLAlchemy models: + +```bash +alembic revision --autogenerate -m "Describe your change" +``` + +* Alembic will generate a migration script reflecting structural changes. +* On first execution, an `alembic_version` table will be created to track migration history. + +--- + +## 2. Previewing Migration SQL (Without Execution) + +To inspect the SQL that Alembic would generate, without applying changes: + +```bash +alembic upgrade head --sql +``` + +* Displays the SQL to upgrade to the latest revision. + +```bash +alembic upgrade --sql +``` + +* Shows the SQL required to upgrade to a specific revision. + +```bash +alembic upgrade : --sql +``` + +* Displays SQL for the changes between two specific revisions. + +--- + +## 3. Applying Migrations + +To apply all unapplied migrations: + +```bash +alembic upgrade head +``` + +* Executes the latest migration and updates the database schema accordingly. + +--- + +## 4. Downgrading the Schema + +To revert the schema to a previous state: + +```bash +alembic downgrade +``` + +* Rolls back the schema to the specified migration revision. + +--- + +## 5. Safely Modifying Existing Tables + +When adding new columns to tables with existing data: + +* Make the column **nullable**, or +* Provide a **default value** during the migration + +--- + +## 6. Preventing Unintended Drops + +Control what Alembic includes in autogenerated migrations using the `include_object` hook in `/alembic/env.py`: + +```python +def include_object(object, name, type_, reflected, compare_to): + if reflected and compare_to is None: + if type_ in ("table", "index"): + return False + return True +``` + +This prevents Alembic from generating DROP operations for: + +* Tables and indexes that exist in the database +* But are not present in your SQLAlchemy models +--- + +# TimescaleDB Customizations + +TimescaleDB adds powerful features for time-series workloads, such as **hypertables**, **compression**, and **retention policies**. These capabilities are not directly supported by SQLAlchemy or Alembic, so customization is required. + +All TimescaleDB-specific logic is implemented in **`/alembic/env.py`** through several helper functions and hooks. + +--- + +## Example SQLAlchemy Model + +Below is an example SQLAlchemy model that uses TimescaleDB metadata: + +```python +class HegemonyCone(Base): + + __indexes__ = [ + { + 'name': 'ihr_hegemonycone_asn_id_timebin_idx', + 'columns': ['asn_id', 'timebin DESC'] + }, + ] + + __hypertable__ = { + 'time_column': 'timebin', + 'chunk_time_interval': '2 day', + 'compress': True, + 'compress_segmentby': 'asn_id,af', + 'compress_orderby': 'timebin', + 'compress_policy': True, + 'compress_after': '7 days' + } + + ... +``` + +This metadata drives the logic for custom hypertable creation, compression, and index generation. + +--- + +## Key Functions in `/alembic/env.py` + +### 1. `process_revision_directives` + +A hook that allows you to modify Alembic’s migration script before it's written: + +```python +def process_revision_directives(context, revision, directives): + if directives[0].upgrade_ops is not None: + process_ops( + context, directives[0].upgrade_ops, directives[0].downgrade_ops + ) +``` + +* Intercepts autogenerated operations. +* Passes them to `process_ops` for enhancement. + +--- + +### 2. `process_ops` + +This is the orchestrator for customizing TimescaleDB migrations: + +```python +def process_ops(context, upgrade_ops, downgrade_ops): + ... +``` + +* Iterates over all `CreateTableOp` operations. +* If a table has `__hypertable__` metadata: + + * Adds SQL to create a **hypertable** + * Applies **compression settings** and **compression policy** +* If the model has `__indexes__`: + + * Adds **TimescaleDB-optimized indexes** +* Only applies hypertable logic when the table is first created. It does **not** support converting an existing table into a hypertable. + +--- + +### 3. `create_hypertable_ops` + +Handles hypertable creation and optional compression logic: + +```python +def create_hypertable_ops(table_name, hypertable_meta, is_existing=False): + ... +``` + +* Reads from the model’s `__hypertable__` metadata. +* Generates SQL for: + + * `SELECT create_hypertable(...)` + * `ALTER TABLE ... SET(...)` for compression + * `SELECT add_compression_policy(...)` + +--- + +### 4. `create_index_ops` + +Creates custom indexes defined in the model’s `__indexes__` attribute: + +```python +def create_index_ops(table_name, indexes_meta): + ... +``` + +* Parses index definitions +* Generates `CREATE INDEX IF NOT EXISTS ...` +* Adds reversible `DROP INDEX` for downgrade + +--- + +### 5. `check_index_exists` + +Prevents duplicate index creation by checking the PostgreSQL catalog: + +```python +def check_index_exists(context, table_name, index_name): + ... +``` + +* Queries `pg_indexes` to determine if an index already exists. + From dd4324c204a0d419ceddaf3e32006978cfb7cb9b Mon Sep 17 00:00:00 2001 From: ibraam Date: Fri, 13 Jun 2025 12:04:18 +0300 Subject: [PATCH 08/43] Added .env file, removed root path and modified the documentation files --- .env | 1 + .gitignore | 2 -- README.md | 20 +++++++++---------- alembic/README | 38 +++++++++++++++++++++++++++++++++++- alembic/env.py | 1 + config/database.py | 4 +++- docs/database_migration.md | 1 + dtos/generic_response_dto.py | 2 ++ main.py | 2 +- 9 files changed, 56 insertions(+), 15 deletions(-) create mode 100644 .env diff --git a/.env b/.env new file mode 100644 index 0000000..04f0806 --- /dev/null +++ b/.env @@ -0,0 +1 @@ +DATABASE_URL=postgresql://postgres:123password456@localhost:5435/ihr #Database connection string diff --git a/.gitignore b/.gitignore index 41b92af..1562c4f 100644 --- a/.gitignore +++ b/.gitignore @@ -35,7 +35,6 @@ MANIFEST # Installer logs pip-log.txt pip-delete-this-directory.txt -notes.txt # Unit test / coverage reports htmlcov/ @@ -129,7 +128,6 @@ celerybeat.pid *.sage.py # Environments -.env .venv env/ venv/ diff --git a/README.md b/README.md index 3273df2..4a71796 100644 --- a/README.md +++ b/README.md @@ -8,14 +8,14 @@ This project is a FastAPI-based backend for the IHR system. It is designed for f ### 1. Clone the Repository -### 2. Create a `.env` File +### 2. Modify the `.env` File -In the project root directory, create a `.env` file to define environment-specific settings, including the database connection. +In the project root directory, modify the `.env` file to define your specific database connection string. -Example `.env` content: +`.env` content: ```env -DATABASE_URL=postgresql://postgres:123password456@localhost:5435/ihr-fastapi +DATABASE_URL=postgresql://:@:/ ``` > Make sure PostgreSQL is running and the database exists before continuing. @@ -63,9 +63,9 @@ uvicorn main:app --host 0.0.0.0 --port 8000 --reload ### **Step 4: Access the API** Once running, you can access: -- API: **[http://localhost:8000/ihr/api](http://localhost:8000/ihr/api)** -- Interactive Docs (Swagger UI): **[http://localhost:8000/ihr/api/docs](http://localhost:8000/docs)** -- Redoc Docs: **[http://localhost:8000/ihr/api/redoc](http://localhost:8000/redoc)** +- API: **[http://localhost:8000](http://localhost:8000)** +- Interactive Docs (Swagger UI): **[http://localhost:8000/docs](http://localhost:8000/docs)** +- Redoc Docs: **[http://localhost:8000/redoc](http://localhost:8000/redoc)** --- @@ -100,9 +100,9 @@ docker logs -f ### Step 4: Access the API Once running, you can access: -- API: **[http://localhost:8000/ihr/api](http://localhost:8000/ihr/api)** -- Interactive Docs (Swagger UI): **[http://localhost:8000/ihr/api/docs](http://localhost:8000/docs)** -- Redoc Docs: **[http://localhost:8000/ihr/api/redoc](http://localhost:8000/redoc)** +- API: **[http://localhost:8000](http://localhost:8000)** +- Interactive Docs (Swagger UI): **[http://localhost:8000/docs](http://localhost:8000/docs)** +- Redoc Docs: **[http://localhost:8000/redoc](http://localhost:8000/redoc)** --- diff --git a/alembic/README b/alembic/README index 98e4f9c..0d50729 100644 --- a/alembic/README +++ b/alembic/README @@ -1 +1,37 @@ -Generic single-database configuration. \ No newline at end of file +## Alembic Directory + +The `/alembic` folder and its contents were automatically created by running: + +```bash +alembic init alembic +``` +This directory contains all the configuration and script files necessary to manage the project’s database schema migrations. + +--- + +### What's Inside the `/alembic` Directory + +* **`env.py`** + The core configuration file for Alembic. It sets up the database connection, imports the models, and controls how migrations are run. + +* **`script.py.mako`** + A Mako template used to generate new migration files. When you run commands like `alembic revision --autogenerate`, Alembic uses this template to structure the migration script. + +* **`versions/`** + A folder that stores all versioned migration scripts. Each file here represents a change to your database schema and includes both upgrade and downgrade instructions. + +--- + +### About `script.py.mako` + +The `script.py.mako` file is a template that defines the default structure of migration scripts. It contains placeholders that Alembic fills in during script generation. + +It typically includes: + +* `revision`: A unique ID for the migration. +* `down_revision`: The ID of the previous migration, ensuring migration order is preserved. +* `upgrade()`: A function where changes to the schema are defined. +* `downgrade()`: A function to undo those changes if needed. + +Alembic uses this template to maintain consistency and structure across all migration scripts. + diff --git a/alembic/env.py b/alembic/env.py index ac65105..0bd9c2b 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -35,6 +35,7 @@ model_classes = [] # Load all model classes inheriting from Base (excluding __init__.py) +# This is done so that we can later access __hypertable__ and __indexes__ attributes defined in the models and use them in generating the SQL commands for file in models_path.glob("*.py"): if file.name != "__init__.py": module_name = f"models.{file.stem}" diff --git a/config/database.py b/config/database.py index d1f4384..6e57db2 100644 --- a/config/database.py +++ b/config/database.py @@ -12,7 +12,9 @@ engine = create_engine(DATABASE_URL) # Create a session factory SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) -# Declare a base class for ORM models +# Base is the base class for all the SQLAlchemy ORM models. +# It tells SQLAlchemy that a model maps to a real table. +# Without inheriting from Base, the class won’t be recognized by SQLAlchemy’s ORM. Base = declarative_base() # Dependency to get a DB session for FastAPI routes (used in controllers) diff --git a/docs/database_migration.md b/docs/database_migration.md index 7bfb08a..cc79da2 100644 --- a/docs/database_migration.md +++ b/docs/database_migration.md @@ -128,6 +128,7 @@ This metadata drives the logic for custom hypertable creation, compression, and --- ## Key Functions in `/alembic/env.py` +All functions in `/alembic/env.py` are invoked automatically by alembic during the automatic generation of the migration file (we don't call them manually). ### 1. `process_revision_directives` diff --git a/dtos/generic_response_dto.py b/dtos/generic_response_dto.py index 3070280..c116491 100644 --- a/dtos/generic_response_dto.py +++ b/dtos/generic_response_dto.py @@ -3,6 +3,8 @@ from urllib.parse import urlencode, urlunparse from pydantic import BaseModel +# T is a generic type variable that will be replaced with a specific DTO type (e.g., CountryDTO) +# when GenericResponseDTO is used (e.g GenericResponseDTO[CountryDTO] used in CountryController). T = TypeVar("T") # The generic response format returned by all endpoints diff --git a/main.py b/main.py index cb2edfd..4ebf27a 100644 --- a/main.py +++ b/main.py @@ -4,7 +4,7 @@ from controllers import __path__ as controllers_path # Adjusted for `ihr` structure # The base URL of the app -app = FastAPI(root_path="/ihr/api") +app = FastAPI() # Automatically import and register all routers inside "ihr/controllers" for _, module_name, _ in pkgutil.iter_modules(controllers_path): From f3af036b872388dfd4664d8eeac917a5a56358ec Mon Sep 17 00:00:00 2001 From: ibraam Date: Sat, 14 Jun 2025 13:51:27 +0300 Subject: [PATCH 09/43] Removed .env file --- .env | 1 - .gitignore | 1 + README.md | 5 ++--- alembic/env.py | 5 ++++- config/database.py | 5 ++++- 5 files changed, 11 insertions(+), 6 deletions(-) delete mode 100644 .env diff --git a/.env b/.env deleted file mode 100644 index 04f0806..0000000 --- a/.env +++ /dev/null @@ -1 +0,0 @@ -DATABASE_URL=postgresql://postgres:123password456@localhost:5435/ihr #Database connection string diff --git a/.gitignore b/.gitignore index 1562c4f..7b004e5 100644 --- a/.gitignore +++ b/.gitignore @@ -128,6 +128,7 @@ celerybeat.pid *.sage.py # Environments +.env .venv env/ venv/ diff --git a/README.md b/README.md index 4a71796..f03ceaf 100644 --- a/README.md +++ b/README.md @@ -8,9 +8,9 @@ This project is a FastAPI-based backend for the IHR system. It is designed for f ### 1. Clone the Repository -### 2. Modify the `.env` File +### 2. Create a `.env` File -In the project root directory, modify the `.env` file to define your specific database connection string. +In the project root directory, create a new `.env` file to define your specific database connection string. `.env` content: @@ -18,7 +18,6 @@ In the project root directory, modify the `.env` file to define your specific da DATABASE_URL=postgresql://:@:/ ``` -> Make sure PostgreSQL is running and the database exists before continuing. --- diff --git a/alembic/env.py b/alembic/env.py index 0bd9c2b..50ac4f9 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -16,7 +16,10 @@ config = context.config # Load environment variables from .env file -load_dotenv() +try: + load_dotenv() +except: + pass # Override Alembic DB URL (found in alembic.ini) with the DB URL found in the .env file DATABASE_URL = os.getenv("DATABASE_URL") if DATABASE_URL: diff --git a/config/database.py b/config/database.py index 6e57db2..7f4b88e 100644 --- a/config/database.py +++ b/config/database.py @@ -4,7 +4,10 @@ from dotenv import load_dotenv # Load environment variables from .env file -load_dotenv() +try: + load_dotenv() +except: + pass # Read the database URL from the environment variable DATABASE_URL = os.getenv("DATABASE_URL") From f70ef0391f953f19ac353793c0ed43c9a323ed9b Mon Sep 17 00:00:00 2001 From: ibraam Date: Mon, 16 Jun 2025 12:54:10 +0300 Subject: [PATCH 10/43] Added database models --- .../6f24edb78b64_initial_migration2.py | 166 ++++++++++++++++++ models/{asn_model.py => asn.py} | 3 - models/atlas_delay.py | 79 +++++++++ models/atlas_delay_alarms.py | 49 ++++++ models/atlas_location.py | 30 ++++ models/{country_model.py => country.py} | 0 models/delay.py | 39 ++++ models/delay_alarms.py | 83 +++++++++ models/delay_alarms_msms.py | 21 +++ models/disco_events.py | 82 +++++++++ models/disco_probes.py | 43 +++++ models/forwarding.py | 39 ++++ models/forwarding_alarms.py | 47 +++++ models/forwarding_alarms_msms.py | 19 ++ models/hegemony.py | 63 +++++++ models/hegemony_alarms.py | 54 ++++++ models/hegemony_country.py | 61 +++++++ models/hegemony_prefix.py | 89 ++++++++++ models/metis_atlas_deployment.py | 51 ++++++ models/metis_atlas_selection.py | 48 +++++ models/tr_hegemony.py | 57 ++++++ models/tr_hegemony_identifier.py | 17 ++ repositories/country_repository.py | 2 +- 23 files changed, 1138 insertions(+), 4 deletions(-) create mode 100644 alembic/versions/6f24edb78b64_initial_migration2.py rename models/{asn_model.py => asn.py} (87%) create mode 100644 models/atlas_delay.py create mode 100644 models/atlas_delay_alarms.py create mode 100644 models/atlas_location.py rename models/{country_model.py => country.py} (100%) create mode 100644 models/delay.py create mode 100644 models/delay_alarms.py create mode 100644 models/delay_alarms_msms.py create mode 100644 models/disco_events.py create mode 100644 models/disco_probes.py create mode 100644 models/forwarding.py create mode 100644 models/forwarding_alarms.py create mode 100644 models/forwarding_alarms_msms.py create mode 100644 models/hegemony.py create mode 100644 models/hegemony_alarms.py create mode 100644 models/hegemony_country.py create mode 100644 models/hegemony_prefix.py create mode 100644 models/metis_atlas_deployment.py create mode 100644 models/metis_atlas_selection.py create mode 100644 models/tr_hegemony.py create mode 100644 models/tr_hegemony_identifier.py diff --git a/alembic/versions/6f24edb78b64_initial_migration2.py b/alembic/versions/6f24edb78b64_initial_migration2.py new file mode 100644 index 0000000..314b531 --- /dev/null +++ b/alembic/versions/6f24edb78b64_initial_migration2.py @@ -0,0 +1,166 @@ +"""initial migration2 + +Revision ID: 6f24edb78b64 +Revises: +Create Date: 2025-06-15 19:59:45.170363 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = '6f24edb78b64' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('ihr_asn', + sa.Column('number', sa.BigInteger(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('tartiflette', sa.Boolean(), nullable=False), + sa.Column('disco', sa.Boolean(), nullable=False), + sa.Column('ashash', sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint('number') + ) + op.create_table('ihr_atlas_location', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('type', sa.String(length=4), nullable=False), + sa.Column('af', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('ihr_country', + sa.Column('code', sa.String(length=4), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('tartiflette', sa.Boolean(), nullable=False), + sa.Column('disco', sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint('code') + ) + op.create_table('ihr_disco_events', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('mongoid', sa.String(length=24), nullable=False), + sa.Column('streamtype', sa.String(length=10), nullable=False), + sa.Column('streamname', sa.String(length=128), nullable=False), + sa.Column('starttime', sa.DateTime(), nullable=False), + sa.Column('endtime', sa.DateTime(), nullable=False), + sa.Column('avglevel', sa.Float(), nullable=False), + sa.Column('nbdiscoprobes', sa.Integer(), nullable=False), + sa.Column('totalprobes', sa.Integer(), nullable=False), + sa.Column('ongoing', sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('ihr_tr_hegemony_identifier', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('type', sa.String(length=4), nullable=False), + sa.Column('af', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('ihr_delay_alarms', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('ip', sa.String(length=64), nullable=False), + sa.Column('link', sa.String(length=128), nullable=False), + sa.Column('medianrtt', sa.Float(), nullable=False), + sa.Column('diffmedian', sa.Float(), nullable=False), + sa.Column('deviation', sa.Float(), nullable=False), + sa.Column('nbprobes', sa.Integer(), nullable=False), + sa.Column('msm_prb_ids', postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.Column('asn_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_delay_alarms', by_range('timebin', INTERVAL '2 day'));") + op.create_table('ihr_forwarding_alarms', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('ip', sa.String(length=64), nullable=False), + sa.Column('correlation', sa.Float(), nullable=False), + sa.Column('responsibility', sa.Float(), nullable=False), + sa.Column('pktdiff', sa.Float(), nullable=False), + sa.Column('previoushop', sa.String(length=64), nullable=False), + sa.Column('msm_prb_ids', postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.Column('asn_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_forwardingalarms_asn_id', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_forwarding_alarms', by_range('timebin', INTERVAL '2 day'));") + op.create_table('ihr_hegemonycone', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('conesize', sa.Integer(), nullable=False), + sa.Column('af', sa.Integer(), nullable=False), + sa.Column('asn_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_ihr_hegemonycone_asn_id', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_hegemonycone', by_range('timebin', INTERVAL '2 day'));") + op.execute("ALTER TABLE ihr_hegemonycone SET (timescaledb.compress, timescaledb.compress_segmentby = 'asn_id,af', timescaledb.compress_orderby = 'timebin');") + op.execute("SELECT add_compression_policy('ihr_hegemonycone', INTERVAL '7 days');") + op.create_table('ihr_metis_atlas_selection', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('metric', sa.String(length=16), nullable=False), + sa.Column('rank', sa.Integer(), nullable=False), + sa.Column('af', sa.Integer(), nullable=False), + sa.Column('mean', sa.Float(), nullable=False), + sa.Column('asn_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_metis_atlas_selection_asn_id', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_metis_atlas_selection', by_range('timebin', INTERVAL '7 day'));") + op.create_table('ihr_tr_hegemony', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('hege', sa.Float(), nullable=False), + sa.Column('af', sa.Integer(), nullable=False), + sa.Column('nbsamples', sa.Integer(), nullable=False), + sa.Column('dependency_id', sa.BigInteger(), nullable=False), + sa.Column('origin_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['dependency_id'], ['ihr_tr_hegemony_identifier.id'], name='fk_tr_hegemony_dependency_id', ondelete='CASCADE'), + sa.ForeignKeyConstraint(['origin_id'], ['ihr_tr_hegemony_identifier.id'], name='fk_tr_hegemony_origin_id', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_tr_hegemony', by_range('timebin', INTERVAL '2 day'));") + op.execute('CREATE INDEX IF NOT EXISTS ihr_delay_alarms_asn_id_timebin_idx ON ihr_delay_alarms (asn_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_disco_events_mongoid_3a488192 ON ihr_disco_events (mongoid);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_disco_events_streamtype_streamname_st_bda16df6_idx ON ihr_disco_events (streamtype, streamname, starttime, endtime);') + op.execute('CREATE INDEX IF NOT EXISTS "ihr_forwarding_alarms_asn_id_timebin_idx" ON ihr_forwarding_alarms (asn_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemonycone_asn_id_timebin_idx ON ihr_hegemonycone (asn_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_metis_atlas_selection_metric_rank_timebin_idx ON ihr_metis_atlas_selection (metric, rank, timebin);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_tr_hegemony_dependency_id_timebin_idx ON ihr_tr_hegemony (dependency_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_tr_hegemony_origin_id_timebin_idx ON ihr_tr_hegemony (origin_id, timebin DESC);') + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.execute('ALTER TABLE ihr_hegemonycone SET (timescaledb.compress = false);') + op.execute("SELECT remove_compression_policy('ihr_hegemonycone', if_exists => TRUE);") + op.execute('DROP INDEX IF EXISTS ihr_delay_alarms_asn_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_disco_events_mongoid_3a488192;') + op.execute('DROP INDEX IF EXISTS ihr_disco_events_streamtype_streamname_st_bda16df6_idx;') + op.execute('DROP INDEX IF EXISTS "ihr_forwarding_alarms_asn_id_timebin_idx";') + op.execute('DROP INDEX IF EXISTS ihr_hegemonycone_asn_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_metis_atlas_selection_metric_rank_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_tr_hegemony_dependency_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_tr_hegemony_origin_id_timebin_idx;') + op.drop_table('ihr_tr_hegemony') + op.drop_table('ihr_metis_atlas_selection') + op.drop_table('ihr_hegemonycone') + op.drop_table('ihr_forwarding_alarms') + op.drop_table('ihr_delay_alarms') + op.drop_table('ihr_tr_hegemony_identifier') + op.drop_table('ihr_disco_events') + op.drop_table('ihr_country') + op.drop_table('ihr_atlas_location') + op.drop_table('ihr_asn') + # ### end Alembic commands ### diff --git a/models/asn_model.py b/models/asn.py similarity index 87% rename from models/asn_model.py rename to models/asn.py index 39d35fb..962864e 100644 --- a/models/asn_model.py +++ b/models/asn.py @@ -11,7 +11,4 @@ class ASN(Base): disco = Column(Boolean, default=False, nullable=False, doc='True if participate in network disconnection analysis.') ashash = Column(Boolean, default=False, nullable=False, doc='True if participate in AS dependency analysis.') - # Relationship to HegemonyCone - hegemony_cones = relationship('HegemonyCone', back_populates='asn_relation') - diff --git a/models/atlas_delay.py b/models/atlas_delay.py new file mode 100644 index 0000000..b534958 --- /dev/null +++ b/models/atlas_delay.py @@ -0,0 +1,79 @@ +from sqlalchemy import ( + Column, BigInteger, Float, Integer, ForeignKey, PrimaryKeyConstraint +) +from sqlalchemy.dialects.postgresql import TIMESTAMP +from sqlalchemy.orm import relationship +from config.database import Base + + +class AtlasDelay(Base): + __tablename__ = 'ihr_atlas_delay' + + __table_args__ = ( + PrimaryKeyConstraint('id', 'timebin'), + ) + + __hypertable__ = { + 'time_column': 'timebin', + 'chunk_time_interval': '2 day', + 'compress': True, + 'compress_segmentby': 'startpoint_id,endpoint_id', + 'compress_orderby': 'timebin', + 'compress_policy': True, + 'compress_after': '7 days' + } + + __indexes__ = [ + { + 'name': 'ihr_atlas_delay_endpoint_id_timebin_idx', + 'columns': ['endpoint_id', 'timebin DESC'], + }, + { + 'name': 'ihr_atlas_delay_startpoint_id_endpoint_id_timebin_idx', + 'columns': ['startpoint_id', 'endpoint_id', 'timebin DESC'], + }, + { + 'name': 'ihr_atlas_delay_startpoint_id_timebin_idx', + 'columns': ['startpoint_id', 'timebin DESC'], + }, + ] + + id = Column(BigInteger, autoincrement=True) + + timebin = Column(TIMESTAMP(timezone=True), nullable=False, + doc='Timestamp of reported value.') + + median = Column(Float, default=0.0, nullable=False, + doc='Estimated median RTT. RTT values are directly extracted from traceroute (a.k.a. realrtts) and estimated via differential RTTs.') + + nbtracks = Column(Integer, default=0, nullable=False, + doc='Number of RTT samples used to compute median RTT (either real or differential RTT).') + + nbprobes = Column(Integer, default=0, nullable=False, + doc='Number of Atlas probes used to compute median RTT.') + + entropy = Column(Float, default=0.0, nullable=False, + doc="Entropy of RTT samples with regards to probes' ASN. Values close to zero mean that Atlas probes used for these measures are located in the same AS, values close to one means that probes are equally spread across multiple ASes.") + + hop = Column(Integer, default=0, nullable=False, + doc='Median number of AS hops between the start and end locations.') + + nbrealrtts = Column(Integer, default=0, nullable=False, + doc='Number of RTT samples directly obtained from traceroutes (as opposed to differential RTTs).') + + startpoint_id = Column(BigInteger, + ForeignKey( + 'ihr_atlas_location.id', ondelete='CASCADE', name='fk_atlas_delay_startpoint'), + nullable=False, + doc='Starting location for the delay estimation.') + + endpoint_id = Column(BigInteger, + ForeignKey( + 'ihr_atlas_location.id', ondelete='CASCADE', name='fk_atlas_delay_endpoint'), + nullable=False, + doc='Ending location for the delay estimation.') + + startpoint = relationship('AtlasLocation', foreign_keys=[ + startpoint_id], backref='location_startpoint') + endpoint = relationship('AtlasLocation', foreign_keys=[ + endpoint_id], backref='location_endpoint') diff --git a/models/atlas_delay_alarms.py b/models/atlas_delay_alarms.py new file mode 100644 index 0000000..b65b2ee --- /dev/null +++ b/models/atlas_delay_alarms.py @@ -0,0 +1,49 @@ +from sqlalchemy import Column, BigInteger, Float, ForeignKey, PrimaryKeyConstraint +from sqlalchemy.dialects.postgresql import TIMESTAMP +from sqlalchemy.orm import relationship +from config.database import Base + + +class AtlasDelayAlarms(Base): + __tablename__ = 'ihr_atlas_delay_alarms' + + __table_args__ = ( + PrimaryKeyConstraint('id', 'timebin'), + ) + + __hypertable__ = { + 'time_column': 'timebin', + 'chunk_time_interval': '2 day', + } + + __indexes__ = [ + { + 'name': 'ihr_atlas_delay_alarms_startpoint_id_timebin_idx', + 'columns': ['startpoint_id', 'timebin DESC'], + }, + { + 'name': 'ihr_atlas_delay_alarms_endpoint_id_timebin_idx', + 'columns': ['endpoint_id', 'timebin DESC'], + }, + ] + + id = Column(BigInteger, autoincrement=True) + + timebin = Column(TIMESTAMP(timezone=True), nullable=False, + doc='Timestamp of reported alarm.') + + deviation = Column(Float, default=0.0, nullable=False, + doc='Significance of the AS Hegemony change.') + + startpoint_id = Column(BigInteger, + ForeignKey('ihr_atlas_location.id', ondelete='CASCADE', name='fk_atlas_delay_startpoint'), + nullable=False, + doc='Starting location reported as anomalous.') + + endpoint_id = Column(BigInteger, + ForeignKey('ihr_atlas_location.id', ondelete='CASCADE', name='fk_atlas_delay_endpoint'), + nullable=False, + doc='Ending location reported as anomalous.') + + startpoint = relationship('AtlasLocation', foreign_keys=[startpoint_id], backref='anomalous_startpoint') + endpoint = relationship('AtlasLocation', foreign_keys=[endpoint_id], backref='anomalous_endpoint') diff --git a/models/atlas_location.py b/models/atlas_location.py new file mode 100644 index 0000000..88723f3 --- /dev/null +++ b/models/atlas_location.py @@ -0,0 +1,30 @@ +from sqlalchemy import Column, Integer, String,BigInteger +from config.database import Base + +class AtlasLocation(Base): + __tablename__ = 'ihr_atlas_location' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + name = Column( + String(255), + nullable=False, + doc=( + "Location identifier. The meaning of values dependend on the location type: " + "
  • type=AS: ASN
  • type=CT: city name, region name, country code
  • " + "
  • type=PB: Atlas Probe ID
  • type=IP: IP version (4 or 6)
" + ) + ) + type = Column( + String(4), + nullable=False, + doc=( + "Type of location. Possible values are: " + "
  • AS: Autonomous System
  • CT: City
  • PB: Atlas Probe
  • " + "
  • IP: Whole IP space
" + ) + ) + af = Column( + Integer, + nullable=False, + doc="Address Family (IP version), values are either 4 or 6." + ) diff --git a/models/country_model.py b/models/country.py similarity index 100% rename from models/country_model.py rename to models/country.py diff --git a/models/delay.py b/models/delay.py new file mode 100644 index 0000000..55f9443 --- /dev/null +++ b/models/delay.py @@ -0,0 +1,39 @@ +from sqlalchemy import Column, BigInteger, Float, Integer, ForeignKey, PrimaryKeyConstraint +from sqlalchemy.dialects.postgresql import TIMESTAMP +from sqlalchemy.orm import relationship +from config.database import Base + + +class Delay(Base): + __tablename__ = 'ihr_delay' + + __table_args__ = ( + PrimaryKeyConstraint('id', 'timebin'), + ) + + __hypertable__ = { + 'time_column': 'timebin', + 'chunk_time_interval': '2 day', + } + + __indexes__ = [ + { + 'name': 'ihr_delay_asn_id_timebin_idx', + 'columns': ['asn_id', 'timebin DESC'], + }, + ] + + id = Column(BigInteger, autoincrement=True) + + timebin = Column(TIMESTAMP(timezone=True), nullable=False, + doc='Timestamp of reported value.') + + magnitude = Column(Float, default=0.0, nullable=False, + doc='Cumulated link delay deviation. Values close to zero represent usual delays for the network, whereas higher values stand for significant links congestion in the monitored network.') + + asn_id = Column(BigInteger, + ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_delay_asn_id'), + nullable=False, + doc='ASN or IXP ID of the monitored network (see number in /network/).') + + asn = relationship('ASN', foreign_keys=[asn_id]) diff --git a/models/delay_alarms.py b/models/delay_alarms.py new file mode 100644 index 0000000..056203a --- /dev/null +++ b/models/delay_alarms.py @@ -0,0 +1,83 @@ +from sqlalchemy import ( + Column, Integer, String, Float, DateTime, ForeignKey, BigInteger, PrimaryKeyConstraint +) +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.orm import relationship +from config.database import Base +from sqlalchemy.dialects.postgresql import TIMESTAMP + + +class DelayAlarms(Base): + __tablename__ = 'ihr_delay_alarms' + + + __table_args__ = ( + PrimaryKeyConstraint('id', 'timebin'), + ) + + __indexes__ = [ + { + 'name': 'ihr_delay_alarms_asn_id_timebin_idx', + 'columns': ['asn_id', 'timebin DESC'] + } + ] + + __hypertable__ = { + 'time_column': 'timebin', + 'chunk_time_interval': '2 day' + } + + id = Column(BigInteger, autoincrement=True) + timebin = Column( + TIMESTAMP(timezone=True), + nullable=False, + doc='Timestamp of reported alarm.' + ) + ip = Column( + String(64), + nullable=False, + ) + link = Column( + String(128), + nullable=False, + doc='Pair of IP addresses corresponding to the reported link.' + ) + medianrtt = Column( + Float, + nullable=False, + default=0.0, + doc='Median differential RTT observed during the alarm.' + ) + diffmedian = Column( + Float, + nullable=False, + default=0.0, + doc='Difference between the link usual median RTT and the median RTT observed during the alarm.' + ) + deviation = Column( + Float, + nullable=False, + default=0.0, + doc='Distance between observed delays and the past usual values normalized by median absolute deviation.' + ) + nbprobes = Column( + Integer, + nullable=False, + default=0, + doc='Number of Atlas probes monitoring this link at the reported time window.' + ) + msm_prb_ids = Column( + JSONB, + nullable=True, + default=None, + doc='List of Atlas measurement IDs and probe IDs used to compute this alarm.' + ) + asn_id = Column( + BigInteger, + ForeignKey('ihr_asn.number', ondelete='CASCADE'), + nullable=False, + doc='ASN or IXPID of the reported network.' + ) + + asn_relation = relationship('ASN', backref='delay_alarms') + diff --git a/models/delay_alarms_msms.py b/models/delay_alarms_msms.py new file mode 100644 index 0000000..ec6f35f --- /dev/null +++ b/models/delay_alarms_msms.py @@ -0,0 +1,21 @@ +from sqlalchemy import Column, BigInteger, Integer, ForeignKey, PrimaryKeyConstraint +from sqlalchemy.orm import relationship +from config.database import Base + + +class DelayAlarmsMsms(Base): + __tablename__ = 'ihr_delay_alarms_msms' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + + msmid = Column(BigInteger, default=0, nullable=False) + + probeid = Column(Integer, default=0, nullable=False) + + alarm_id = Column(BigInteger, + ForeignKey('ihr_delay_alarms.id', ondelete='CASCADE', + name='fk_delay_alarms_msms_alarm_id'), + nullable=False) + + alarm = relationship('DelayAlarms', foreign_keys=[ + alarm_id], backref='msmid_entries') diff --git a/models/disco_events.py b/models/disco_events.py new file mode 100644 index 0000000..06e9e41 --- /dev/null +++ b/models/disco_events.py @@ -0,0 +1,82 @@ +from sqlalchemy import ( + Column, Integer, String, Float, DateTime, Boolean, BigInteger +) +from sqlalchemy.orm import declarative_base +from config.database import Base + + +class DiscoEvents(Base): + __tablename__ = 'ihr_disco_events' + + __indexes__ = [{ + 'name': 'ihr_disco_events_mongoid_3a488192', + 'columns': ['mongoid'] + }, { + 'name': 'ihr_disco_events_streamtype_streamname_st_bda16df6_idx', + 'columns': ['streamtype', 'streamname', 'starttime', 'endtime'] + }] + + + id = Column(BigInteger, primary_key=True, autoincrement=True) + mongoid = Column( + String(24), + nullable=False, + default='000000000000000000000000', + ) + streamtype = Column( + String(10), + nullable=False, + doc=( + "Granularity of the detected event. The possible values are asn, country, admin1, and admin2. " + "Admin1 represents a wider area than admin2, the exact definition might change from one country to another. " + "For example 'California, US' is an admin1 stream and 'San Francisco County, California, US' is an admin2 stream." + ) + ) + streamname = Column( + String(128), + nullable=False, + doc='Name of the topological (ASN) or geographical area where the network disconnection happened.' + ) + starttime = Column( + DateTime, + nullable=False, + doc='Estimated start time of the network disconnection.' + ) + endtime = Column( + DateTime, + nullable=False, + doc=( + 'Estimated end time of the network disconnection. ' + 'Equal to starttime if the end of the event is unknown.' + ) + ) + avglevel = Column( + Float, + nullable=False, + default=0.0, + doc=( + 'Score representing the coordination of disconnected probes. ' + 'Higher values stand for a large number of Atlas probes that disconnected in a very short time frame. ' + 'Events with an avglevel lower than 10 are likely to be false positives detection.' + ) + ) + nbdiscoprobes = Column( + Integer, + nullable=False, + default=0, + doc='Number of Atlas probes that disconnected around the reported start time.' + ) + totalprobes = Column( + Integer, + nullable=False, + default=0, + doc='Total number of Atlas probes active in the reported stream (ASN, Country, or geographical area).' + ) + ongoing = Column( + Boolean, + nullable=False, + default=False, + doc='Deprecated, this value is unused' + ) + + diff --git a/models/disco_probes.py b/models/disco_probes.py new file mode 100644 index 0000000..15a8b92 --- /dev/null +++ b/models/disco_probes.py @@ -0,0 +1,43 @@ +from sqlalchemy import Column, BigInteger, Float, ForeignKey, String,Integer +from sqlalchemy.dialects.postgresql import TIMESTAMP +from sqlalchemy.orm import relationship +from config.database import Base + + +class DiscoProbes(Base): + __tablename__ = 'ihr_disco_probes' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + + probe_id = Column(Integer, nullable=False, + doc='Atlas probe ID of disconnected probe.') + + starttime = Column(TIMESTAMP(timezone=True), nullable=False, + doc='Probe disconnection time.') + + endtime = Column(TIMESTAMP(timezone=True), nullable=True, + doc='Reconnection time of the probe, this may not be reported if other probes have reconnected earlier.') + + level = Column(Float, default=0.0, nullable=False, + doc='Disconnection level when the probe disconnected.') + + ipv4 = Column(String(64), default='None', nullable=False, + doc='Public IP address of the Atlas probe.') + + prefixv4 = Column(String(70), default='None', nullable=False, + doc='IP prefix corresponding to the probe.') + + lat = Column(Float, default=0.0, nullable=False, + doc='Latitude of the probe during the network detection as reported by RIPE Atlas.') + + lon = Column(Float, default=0.0, nullable=False, + doc='Longitude of the probe during the network detection as reported by RIPE Atlas.') + + event_id = Column(BigInteger, + ForeignKey('ihr_disco_events.id', ondelete='CASCADE', + name='fk_disco_probes_event_id'), + nullable=False, + doc='ID of the network disconnection event where this probe is reported.') + + event = relationship('DiscoEvents', foreign_keys=[ + event_id], backref='discoprobes') diff --git a/models/forwarding.py b/models/forwarding.py new file mode 100644 index 0000000..5aa6ef6 --- /dev/null +++ b/models/forwarding.py @@ -0,0 +1,39 @@ +from sqlalchemy import Column, BigInteger, Float, ForeignKey, PrimaryKeyConstraint +from sqlalchemy.dialects.postgresql import TIMESTAMP +from sqlalchemy.orm import relationship +from config.database import Base + + +class Forwarding(Base): + __tablename__ = 'ihr_forwarding' + + __table_args__ = ( + PrimaryKeyConstraint('id', 'timebin'), + ) + + __hypertable__ = { + 'time_column': 'timebin', + 'chunk_time_interval': '2 day', + } + + __indexes__ = [ + { + 'name': 'ihr_forwarding_asn_id_timebin_idx', + 'columns': ['asn_id', 'timebin DESC'], + }, + ] + + id = Column(BigInteger, autoincrement=True) + + timebin = Column(TIMESTAMP(timezone=True), nullable=False, + doc='Timestamp of reported value.') + + magnitude = Column(Float, default=0.0, nullable=False, + doc='Cumulated link delay deviation. Values close to zero represent usual delays for the network, whereas higher values stand for significant links congestion in the monitored network.') + + asn_id = Column(BigInteger, + ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_forwarding_asn_id'), + nullable=False, + doc='ASN or IXP ID of the monitored network (see number in /network/).') + + asn = relationship('ASN', foreign_keys=[asn_id]) diff --git a/models/forwarding_alarms.py b/models/forwarding_alarms.py new file mode 100644 index 0000000..6a0a2a7 --- /dev/null +++ b/models/forwarding_alarms.py @@ -0,0 +1,47 @@ +from sqlalchemy import Column, BigInteger, String, Float, ForeignKey, PrimaryKeyConstraint +from sqlalchemy.dialects.postgresql import JSONB, TIMESTAMP +from sqlalchemy.orm import relationship +from config.database import Base + + +class ForwardingAlarms(Base): + __tablename__ = 'ihr_forwarding_alarms' + + __table_args__ = ( + PrimaryKeyConstraint('id', 'timebin'), + ) + + __indexes__ = [ + { + 'name': '"ihr_forwarding_alarms_asn_id_timebin_idx"', + 'columns': ['asn_id', 'timebin DESC'] + } + ] + + __hypertable__ = { + 'time_column': 'timebin', + 'chunk_time_interval': '2 day' + } + + id = Column(BigInteger, autoincrement=True) + timebin = Column(TIMESTAMP(timezone=True), nullable=False, + doc='Timestamp of reported alarm.') + ip = Column(String(64), nullable=False, + doc='Reported IP address, seen an unusually high or low number of times in Atlas traceroutes.') + + correlation = Column(Float, default=0.0, nullable=False, + doc='Correlation coefficient between the usual forwarding pattern and the forwarding pattern observed during the alarm. Values range between 0 and -1. Lowest values represent the most anomalous patterns.') + responsibility = Column(Float, default=0.0, nullable=False, + doc='Responsibility score of the reported IP in the forwarding pattern change.') + pktdiff = Column(Float, default=0.0, nullable=False, + doc='The difference between the number of times the reported IP is seen in traceroutes compare to its usual appearance.') + previoushop = Column(String(64), nullable=False, + doc='Last observed IP hop on the usual path.') + + msm_prb_ids = Column(JSONB, nullable=True, default=None, + doc='List of Atlas measurement and probe IDs used to compute this alarm.') + + asn_id = Column(BigInteger, ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_forwardingalarms_asn_id'), + nullable=False, doc='ASN or IXPID of the reported network.') + + asn_relation = relationship('ASN', back_populates='forwarding_alarms') diff --git a/models/forwarding_alarms_msms.py b/models/forwarding_alarms_msms.py new file mode 100644 index 0000000..2998d43 --- /dev/null +++ b/models/forwarding_alarms_msms.py @@ -0,0 +1,19 @@ +from sqlalchemy import Column, BigInteger, Integer, ForeignKey +from sqlalchemy.orm import relationship +from config.database import Base + + +class ForwardingAlarmsMsms(Base): + __tablename__ = 'ihr_forwarding_alarms_msms' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + + msmid = Column(BigInteger, default=0, nullable=False) + + probeid = Column(Integer, default=0, nullable=False) + + alarm_id = Column(BigInteger, + ForeignKey('ihr_forwarding_alarms.id', ondelete='CASCADE'), + nullable=False) + + alarm = relationship('ForwardingAlarms', back_populates='msms') \ No newline at end of file diff --git a/models/hegemony.py b/models/hegemony.py new file mode 100644 index 0000000..a1d9cd3 --- /dev/null +++ b/models/hegemony.py @@ -0,0 +1,63 @@ +from sqlalchemy import Column, BigInteger, Float, Integer, ForeignKey, PrimaryKeyConstraint +from sqlalchemy.dialects.postgresql import TIMESTAMP +from sqlalchemy.orm import relationship +from config.database import Base + + +class Hegemony(Base): + __tablename__ = 'ihr_hegemony' + + __table_args__ = ( + PrimaryKeyConstraint('id', 'timebin'), + ) + + __hypertable__ = { + 'time_column': 'timebin', + 'chunk_time_interval': '2 day', + 'compress': True, + 'compress_segmentby': 'af,originasn_id,asn_id', + 'compress_orderby': 'timebin', + 'compress_policy': True, + 'compress_after': '7 days' + } + + __indexes__ = [ + { + 'name': 'ihr_hegemony_asn_id_timebin_idx', + 'columns': ['asn_id', 'timebin DESC'], + }, + { + 'name': 'ihr_hegemony_originasn_id_timebin_idx', + 'columns': ['originasn_id', 'timebin DESC'], + }, + { + 'name': 'ihr_hegemony_asn_id_originasn_id_timebin_idx', + 'columns': ['asn_id', 'originasn_id', 'timebin DESC'], + }, + ] + + id = Column(BigInteger, autoincrement=True) + + timebin = Column(TIMESTAMP(timezone=True), nullable=False, + doc='Timestamp of reported value.') + + hege = Column(Float, default=0.0, nullable=False, + doc='AS Hegemony is the estimated fraction of paths towards the originasn. The values range between 0 and 1, low values represent a small number of path (low dependency) and values close to 1 represent strong dependencies.') + + af = Column(Integer, default=0, nullable=False, + doc='Address Family (IP version), values are either 4 or 6.') + + asn_id = Column(BigInteger, + ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_hegemony_asn_id'), + nullable=False, + doc='Dependency. Transit network commonly seen in BGP paths towards originasn.') + + originasn_id = Column(BigInteger, + ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_hegemony_originasn_id'), + nullable=False, + doc='Dependent network, it can be any public ASN. Retrieve all dependencies of a network by setting only this parameter and a timebin.') + + asn = relationship('ASN', foreign_keys=[asn_id]) + originasn = relationship('ASN', foreign_keys=[originasn_id]) + + diff --git a/models/hegemony_alarms.py b/models/hegemony_alarms.py new file mode 100644 index 0000000..59ae8c2 --- /dev/null +++ b/models/hegemony_alarms.py @@ -0,0 +1,54 @@ +from sqlalchemy import Column, Integer, Float, ForeignKey, PrimaryKeyConstraint, BigInteger +from sqlalchemy.dialects.postgresql import TIMESTAMP +from sqlalchemy.orm import relationship +from config.database import Base + + +class HegemonyAlarms(Base): + __tablename__ = 'ihr_hegemony_alarms' + + __table_args__ = ( + PrimaryKeyConstraint('id', 'timebin'), + ) + + __hypertable__ = { + 'time_column': 'timebin', + 'chunk_time_interval': '2 day', + } + + __indexes__ = [ + { + 'name': 'ihr_hegemony_alarms_asn_id_timebin_idx', + 'columns': ['asn_id', 'timebin DESC'] + }, + { + 'name': 'ihr_hegemony_alarms_originasn_id_timebin_idx', + 'columns': ['originasn_id', 'timebin DESC'] + } + ] + + id = Column(BigInteger, autoincrement=True) + + timebin = Column(TIMESTAMP(timezone=True), nullable=False, + doc='Timestamp of reported alarm.') + + deviation = Column(Float, default=0.0, nullable=False, + doc='Significance of the AS Hegemony change.') + + af = Column(Integer, nullable=False, + doc='Address Family (IP version), values are either 4 or 6.') + + asn_id = Column(BigInteger, + ForeignKey('ihr_asn.number', ondelete='CASCADE', + name='fk_hegemony_alarms_asn_id'), + nullable=False, + doc='ASN of the anomalous dependency (transit network).') + + originasn_id = Column(BigInteger, + ForeignKey('ihr_asn.number', ondelete='CASCADE', + name='fk_hegemony_alarms_originasn_id'), + nullable=False, + doc='ASN of the reported dependent network.') + + asn = relationship('ASN', foreign_keys=[asn_id]) + originasn = relationship('ASN', foreign_keys=[originasn_id]) diff --git a/models/hegemony_country.py b/models/hegemony_country.py new file mode 100644 index 0000000..8cde03a --- /dev/null +++ b/models/hegemony_country.py @@ -0,0 +1,61 @@ +from sqlalchemy import Column, Integer, Float, String, Boolean, ForeignKey, BigInteger,PrimaryKeyConstraint +from sqlalchemy.dialects.postgresql import TIMESTAMP +from sqlalchemy.orm import relationship +from config.database import Base + + +class HegemonyCountry(Base): + __tablename__ = 'ihr_hegemony_country' + + __table_args__ = ( + PrimaryKeyConstraint('id','timebin'), + ) + + __hypertable__ = { + 'time_column': 'timebin', + 'chunk_time_interval': '2 day', + } + + __indexes__ = [ + { + 'name': 'ihr_hegemony_country_asn_id_timebin_idx', + 'columns': ['asn_id', 'timebin DESC'] + }, + { + 'name': 'ihr_hegemony_country_country_id_timebin_idx', + 'columns': ['country_id', 'timebin DESC'] + } + ] + + id = Column(BigInteger, autoincrement=True) + + timebin = Column(TIMESTAMP(timezone=True), nullable=False, + doc='Timestamp of reported value.') + + hege = Column(Float, default=0.0, nullable=False, + doc='AS Hegemony is the estimated fraction of paths towards the monitored country. The values range between 0 and 1, low values represent a small number of path (low dependency) and values close to 1 represent strong dependencies.') + + af = Column(Integer, default=0, nullable=False, + doc='Address Family (IP version), values are either 4 or 6.') + + weight = Column(Float, default=0.0, nullable=False, + doc='Absolute weight given to the ASN for the AS Hegemony calculation.') + + weightscheme = Column(String(16), default='None', nullable=False, + doc='Weighting scheme used for the AS Hegemony calculation.') + + transitonly = Column(Boolean, default=False, nullable=False, + doc='If True, then origin ASNs of BGP path are ignored (focus only on transit networks).') + + asn_id = Column(BigInteger, + ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_hegemony_country_asn_id'), + nullable=False, + doc='Dependency. Network commonly seen in BGP paths towards monitored country.') + + country_id = Column(String(4), + ForeignKey('ihr_country.code', ondelete='CASCADE', name='fk_hegemony_country_country_id'), + nullable=False, + doc='Monitored country. Retrieve all dependencies of a country by setting only this parameter and a timebin.') + + asn = relationship('ASN') + country = relationship('Country') diff --git a/models/hegemony_prefix.py b/models/hegemony_prefix.py new file mode 100644 index 0000000..1056a17 --- /dev/null +++ b/models/hegemony_prefix.py @@ -0,0 +1,89 @@ +from sqlalchemy import Column, BigInteger, Integer, Float, String, Boolean, ForeignKey, PrimaryKeyConstraint +from sqlalchemy.dialects.postgresql import TIMESTAMP +from sqlalchemy.orm import relationship +from config.database import Base + + +class HegemonyPrefix(Base): + __tablename__ = 'ihr_hegemony_prefix' + + __table_args__ = ( + PrimaryKeyConstraint('id','timebin'), + ) + + __hypertable__ = { + 'time_column': 'timebin', + 'chunk_time_interval': '2 day', + } + + __indexes__ = [ + { + 'name': 'ihr_hegemony_prefix_prefix_timebin_idx', + 'columns': ['prefix', 'timebin DESC'] + }, + { + 'name': 'ihr_hegemony_prefix_asn_id_timebin_idx', + 'columns': ['asn_id', 'timebin DESC'] + }, + { + 'name': 'ihr_hegemony_prefix_originasn_id_timebin_idx', + 'columns': ['originasn_id', 'timebin DESC'] + }, + { + 'name': 'ihr_hegemony_prefix_country_id_timebin_idx', + 'columns': ['country_id', 'timebin DESC'] + } + ] + + id = Column(BigInteger, autoincrement=True) + + timebin = Column(TIMESTAMP(timezone=True), nullable=False, + doc='Timestamp of reported value.') + + prefix = Column(String(64), nullable=False, + doc='Monitored prefix (IPv4 or IPv6).') + + hege = Column(Float, default=0.0, nullable=False, + doc='AS Hegemony is the estimated fraction of paths towards the monitored prefix. The values range between 0 and 1, low values represent a small number of path (low dependency) and values close to 1 represent strong dependencies.') + + af = Column(Integer, default=0, nullable=False, + doc='Address Family (IP version), values are either 4 or 6.') + + visibility = Column(Float, default=0.0, nullable=False, + doc='Percentage of BGP peers that see this prefix.') + + rpki_status = Column(String(32), nullable=False, + doc='Route origin validation state for the monitored prefix and origin AS using RPKI.') + + irr_status = Column(String(32), nullable=False, + doc='Route origin validation state for the monitored prefix and origin AS using IRR.') + + delegated_prefix_status = Column(String(32), nullable=False, + doc="Status of the monitored prefix in the RIR's delegated stats. Status other than 'assigned' are usually considered as bogons.") + + delegated_asn_status = Column(String(32), nullable=False, + doc="Status of the origin ASN in the RIR's delegated stats. Status other than 'assigned' are usually considered as bogons.") + + descr = Column(String(64), nullable=False, + doc='Prefix description from IRR (maximum 64 characters).') + + moas = Column(Boolean, default=False, nullable=False, + doc='True if the prefix is originated by multiple ASNs.') + + asn_id = Column(BigInteger, + ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_hegemony_prefix_asn_id'), + nullable=False, + doc='Dependency. Network commonly seen in BGP paths towards monitored prefix.') + originasn_id = Column(BigInteger, + ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_hegemony_prefix_originasn_id'), + nullable=False, + doc='Network seen as originating the monitored prefix.') + + country_id = Column(String(4), + ForeignKey('ihr_country.code', ondelete='CASCADE', name='fk_hegemony_prefix_country_id'), + nullable=False, + doc="Country for the monitored prefix identified by Maxmind's Geolite2 geolocation database.") + + asn = relationship('ASN', foreign_keys=[asn_id], back_populates='prefix_asn') + originasn = relationship('ASN', foreign_keys=[originasn_id], back_populates='prefix_originasn') + country = relationship('Country') diff --git a/models/metis_atlas_deployment.py b/models/metis_atlas_deployment.py new file mode 100644 index 0000000..e6f3f0b --- /dev/null +++ b/models/metis_atlas_deployment.py @@ -0,0 +1,51 @@ +from sqlalchemy import Column, BigInteger, Integer, Float, String, ForeignKey, PrimaryKeyConstraint +from sqlalchemy.dialects.postgresql import TIMESTAMP +from sqlalchemy.orm import relationship +from config.database import Base + + +class MetisAtlasDeployment(Base): + __tablename__ = 'ihr_metis_atlas_deployment' + + __table_args__ = ( + PrimaryKeyConstraint('id','timebin'), + ) + + __hypertable__ = { + 'time_column': 'timebin', + 'chunk_time_interval': '7 day', + } + + __indexes__ = [ + { + 'name': 'ihr_metis_atlas_deployment_metric_rank_timebin_idx', + 'columns': ['metric', 'rank', 'timebin'] + } + ] + + id = Column(BigInteger, autoincrement=True) + + timebin = Column(TIMESTAMP(timezone=True), nullable=False, + doc='Time when the ranking is computed. The ranking uses 24 weeks of data, hence 2022-05-23T00:00 means the ranking using data from 2021-12-06T00:00 to 2022-05-23T00:00.') + + metric = Column(String(16), nullable=False, + doc="Distance metric used to compute diversity, possible values are: 'as_path_length', 'ip_hops', 'rtt'") + + rank = Column(Integer, nullable=False, + doc='Selecting all ASes with rank less than or equal to 10 (i.e. rank__lte=10), gives the 10 most diverse ASes in terms of the selected metric.') + + af = Column(Integer, nullable=False, + doc='Address Family (IP version), values are either 4 or 6.') + + mean = Column(Float, default=0.0, nullable=False, + doc='The mean distance value (e.g., AS-path length) we get when using all ASes up to this rank. This decreases with increasing rank, since lower ranks represent closer ASes.') + + nbsamples = Column(Integer, default=0, nullable=False, + doc='The number of probe ASes for which we have traceroutes to this AS in the time interval. We currently only include candidates that were reached by at least 50% of probe ASes, hence these values are always large.') + + asn_id = Column(BigInteger, + ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_metis_atlas_deployment_asn_id'), + nullable=False, + doc="Atlas probes' Autonomous System Number.") + + asn = relationship('ASN') diff --git a/models/metis_atlas_selection.py b/models/metis_atlas_selection.py new file mode 100644 index 0000000..180c2a0 --- /dev/null +++ b/models/metis_atlas_selection.py @@ -0,0 +1,48 @@ +from sqlalchemy import Column, BigInteger, Integer, Float, String, ForeignKey, PrimaryKeyConstraint +from sqlalchemy.dialects.postgresql import TIMESTAMP +from sqlalchemy.orm import relationship +from config.database import Base + + +class MetisAtlasSelection(Base): + __tablename__ = 'ihr_metis_atlas_selection' + + __table_args__ = ( + PrimaryKeyConstraint('id','timebin'), + ) + + __hypertable__ = { + 'time_column': 'timebin', + 'chunk_time_interval': '7 day', + } + + __indexes__ = [ + { + 'name': 'ihr_metis_atlas_selection_metric_rank_timebin_idx', + 'columns': ['metric', 'rank', 'timebin'] + } + ] + + id = Column(BigInteger, autoincrement=True) + + timebin = Column(TIMESTAMP(timezone=True), nullable=False, + doc='Time when the ranking is computed. The ranking uses four weeks of data, hence 2022-03-28T00:00 means the ranking using data from 2022-02-28T00:00 to 2022-03-28T00:00.') + + metric = Column(String(16), nullable=False, + doc="Distance metric used to compute diversity, possible values are: 'as_path_length', 'ip_hops', 'rtt'") + + rank = Column(Integer, nullable=False, + doc='Selecting all ASes with rank less than or equal to 10 (i.e. rank__lte=10), gives the 10 most diverse ASes in terms of the selected metric.') + + af = Column(Integer, nullable=False, + doc='Address Family (IP version), values are either 4 or 6.') + + mean = Column(Float, default=0.0, nullable=False, + doc='The mean distance value (e.g., AS-path length) we get when using all ASes up to this rank. This decreases with increasing rank, since lower ranks represent closer ASes.') + + asn_id = Column(BigInteger, + ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_metis_atlas_selection_asn_id'), + nullable=False, + doc="Atlas probes' Autonomous System Number.") + + asn = relationship('ASN') diff --git a/models/tr_hegemony.py b/models/tr_hegemony.py new file mode 100644 index 0000000..cc7c571 --- /dev/null +++ b/models/tr_hegemony.py @@ -0,0 +1,57 @@ +from sqlalchemy import Column, BigInteger, Integer, Float, ForeignKey, PrimaryKeyConstraint +from sqlalchemy.dialects.postgresql import TIMESTAMP +from sqlalchemy.orm import relationship +from config.database import Base + + +class TRHegemony(Base): + __tablename__ = 'ihr_tr_hegemony' + + __table_args__ = ( + PrimaryKeyConstraint('id','timebin'), + ) + + __hypertable__ = { + 'time_column': 'timebin', + 'chunk_time_interval': '2 day', + } + + __indexes__ = [ + { + 'name': 'ihr_tr_hegemony_dependency_id_timebin_idx', + 'columns': ['dependency_id', 'timebin DESC'] + }, + { + 'name': 'ihr_tr_hegemony_origin_id_timebin_idx', + 'columns': ['origin_id', 'timebin DESC'] + } + ] + + id = Column(BigInteger, autoincrement=True) + + timebin = Column(TIMESTAMP(timezone=True), nullable=False, + doc='Timestamp of reported value. The computation uses four weeks of data, hence 2022-03-28T00:00 means the values are based on data from 2022-02-28T00:00 to 2022-03-28T00:00.') + + hege = Column(Float, default=0.0, nullable=False, + doc='AS Hegemony is the estimated fraction of paths towards the origin. The values range between 0 and 1, low values represent a small number of path (low dependency) and values close to 1 represent strong dependencies.') + + af = Column(Integer, default=0, nullable=False, + doc='Address family (IP version), values are either 4 or 6.') + + nbsamples = Column(Integer, default=0, nullable=False, + doc='The number of probe ASes for which we have traceroutes to the origin in the time interval. We only include AS Hegemony values that are based on traceroutes from at least ten probe ASes.') + + dependency_id = Column(BigInteger, + ForeignKey('ihr_tr_hegemony_identifier.id', ondelete='CASCADE', + name='fk_tr_hegemony_dependency_id'), + nullable=False, + doc='Dependency. Transit network or IXP commonly seen in traceroutes towards the origin.') + + origin_id = Column(BigInteger, + ForeignKey('ihr_tr_hegemony_identifier.id', ondelete='CASCADE', + name='fk_tr_hegemony_origin_id'), + nullable=False, + doc='Dependent network, it can be any public ASN. Retrieve all dependencies of a network by setting only this parameter and a timebin.') + + dependency = relationship('TRHegemonyIdentifier', foreign_keys=[dependency_id]) + origin = relationship('TRHegemonyIdentifier', foreign_keys=[origin_id], back_populates='local_graph') diff --git a/models/tr_hegemony_identifier.py b/models/tr_hegemony_identifier.py new file mode 100644 index 0000000..7d0a876 --- /dev/null +++ b/models/tr_hegemony_identifier.py @@ -0,0 +1,17 @@ +from sqlalchemy import Column, BigInteger, Integer, String, PrimaryKeyConstraint +from config.database import Base + + +class TRHegemonyIdentifier(Base): + __tablename__ = 'ihr_tr_hegemony_identifier' + + id = Column(BigInteger, autoincrement=True, primary_key=True) + + name = Column(String(255), nullable=False, + doc='Value of the identifier. The meaning depends on the identifier type:
  • type=AS: ASN
  • type=IX: PeeringDB IX ID
  • type=MB: IXP member (format: ix_id;asn)
  • type=IP: Interface IP of an IXP member
') + + type = Column(String(4), nullable=False, + doc='Type of the identifier. Possible values are:
  • AS: Autonomous System
  • IX: IXP
  • MB: IXP member
  • IP: IXP member IP
') + + af = Column(Integer, nullable=False, + doc='Address family (IP version), values are either 4 or 6.') diff --git a/repositories/country_repository.py b/repositories/country_repository.py index b31e2e9..9f56401 100644 --- a/repositories/country_repository.py +++ b/repositories/country_repository.py @@ -1,5 +1,5 @@ from sqlalchemy.orm import Session -from models.country_model import Country +from models.country import Country from typing import Optional, List, Tuple # Added Tuple for return type from sqlalchemy import asc from globals import page_size From 999663d8af7d02c4d415b7beea6d0f6ae5c2afae Mon Sep 17 00:00:00 2001 From: ibraam Date: Mon, 16 Jun 2025 15:54:25 +0300 Subject: [PATCH 11/43] Added more database models --- models/asn.py | 2 +- models/delay_alarms.py | 2 +- models/disco_events.py | 8 ++++---- models/disco_probes.py | 2 +- models/forwarding_alarms.py | 2 +- models/hegemonycone.py | 2 +- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/models/asn.py b/models/asn.py index 962864e..c845c75 100644 --- a/models/asn.py +++ b/models/asn.py @@ -1,4 +1,4 @@ -from sqlalchemy import Column, BigInteger, Integer, String, Boolean, DateTime, ForeignKey, Index +from sqlalchemy import Column, BigInteger, Integer, String, Boolean from sqlalchemy.orm import relationship from config.database import Base diff --git a/models/delay_alarms.py b/models/delay_alarms.py index 056203a..8151d88 100644 --- a/models/delay_alarms.py +++ b/models/delay_alarms.py @@ -1,5 +1,5 @@ from sqlalchemy import ( - Column, Integer, String, Float, DateTime, ForeignKey, BigInteger, PrimaryKeyConstraint + Column, Integer, String, Float, ForeignKey, BigInteger, PrimaryKeyConstraint ) from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import relationship diff --git a/models/disco_events.py b/models/disco_events.py index 06e9e41..19833da 100644 --- a/models/disco_events.py +++ b/models/disco_events.py @@ -1,8 +1,8 @@ from sqlalchemy import ( - Column, Integer, String, Float, DateTime, Boolean, BigInteger + Column, Integer, String, Float, Boolean, BigInteger ) -from sqlalchemy.orm import declarative_base from config.database import Base +from sqlalchemy.dialects.postgresql import TIMESTAMP class DiscoEvents(Base): @@ -38,12 +38,12 @@ class DiscoEvents(Base): doc='Name of the topological (ASN) or geographical area where the network disconnection happened.' ) starttime = Column( - DateTime, + TIMESTAMP(timezone=True), nullable=False, doc='Estimated start time of the network disconnection.' ) endtime = Column( - DateTime, + TIMESTAMP(timezone=True), nullable=False, doc=( 'Estimated end time of the network disconnection. ' diff --git a/models/disco_probes.py b/models/disco_probes.py index 15a8b92..78620ac 100644 --- a/models/disco_probes.py +++ b/models/disco_probes.py @@ -15,7 +15,7 @@ class DiscoProbes(Base): starttime = Column(TIMESTAMP(timezone=True), nullable=False, doc='Probe disconnection time.') - endtime = Column(TIMESTAMP(timezone=True), nullable=True, + endtime = Column(TIMESTAMP(timezone=True), nullable=False, doc='Reconnection time of the probe, this may not be reported if other probes have reconnected earlier.') level = Column(Float, default=0.0, nullable=False, diff --git a/models/forwarding_alarms.py b/models/forwarding_alarms.py index 6a0a2a7..5532339 100644 --- a/models/forwarding_alarms.py +++ b/models/forwarding_alarms.py @@ -13,7 +13,7 @@ class ForwardingAlarms(Base): __indexes__ = [ { - 'name': '"ihr_forwarding_alarms_asn_id_timebin_idx"', + 'name': 'ihr_forwarding_alarms_asn_id_timebin_idx', 'columns': ['asn_id', 'timebin DESC'] } ] diff --git a/models/hegemonycone.py b/models/hegemonycone.py index 108f062..cde3078 100644 --- a/models/hegemonycone.py +++ b/models/hegemonycone.py @@ -1,4 +1,4 @@ -from sqlalchemy import Column, BigInteger, Integer, DateTime, ForeignKey, PrimaryKeyConstraint +from sqlalchemy import Column, BigInteger, Integer, ForeignKey, PrimaryKeyConstraint from sqlalchemy.orm import relationship from config.database import Base from sqlalchemy.dialects.postgresql import TIMESTAMP From d1af5ad651944c34e2e9331b80a7c50680af5af7 Mon Sep 17 00:00:00 2001 From: ibraam Date: Thu, 19 Jun 2025 15:40:41 +0300 Subject: [PATCH 12/43] Completed hegemony cone endpoint --- .../6f24edb78b64_initial_migration2.py | 166 -------- .../880ae10f65b5_initial_migration.py | 373 ++++++++++++++++++ controllers/hegemony_cone_controller.py | 95 +++++ dtos/hegemony_cone_dto.py | 12 + models/asn.py | 21 +- models/hegemonycone.py | 9 +- repositories/hegemony_cone_repository.py | 43 ++ services/hegemony_cone_service.py | 40 ++ 8 files changed, 583 insertions(+), 176 deletions(-) delete mode 100644 alembic/versions/6f24edb78b64_initial_migration2.py create mode 100644 alembic/versions/880ae10f65b5_initial_migration.py create mode 100644 controllers/hegemony_cone_controller.py create mode 100644 dtos/hegemony_cone_dto.py create mode 100644 repositories/hegemony_cone_repository.py create mode 100644 services/hegemony_cone_service.py diff --git a/alembic/versions/6f24edb78b64_initial_migration2.py b/alembic/versions/6f24edb78b64_initial_migration2.py deleted file mode 100644 index 314b531..0000000 --- a/alembic/versions/6f24edb78b64_initial_migration2.py +++ /dev/null @@ -1,166 +0,0 @@ -"""initial migration2 - -Revision ID: 6f24edb78b64 -Revises: -Create Date: 2025-06-15 19:59:45.170363 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = '6f24edb78b64' -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - """Upgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('ihr_asn', - sa.Column('number', sa.BigInteger(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('tartiflette', sa.Boolean(), nullable=False), - sa.Column('disco', sa.Boolean(), nullable=False), - sa.Column('ashash', sa.Boolean(), nullable=False), - sa.PrimaryKeyConstraint('number') - ) - op.create_table('ihr_atlas_location', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('type', sa.String(length=4), nullable=False), - sa.Column('af', sa.Integer(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('ihr_country', - sa.Column('code', sa.String(length=4), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('tartiflette', sa.Boolean(), nullable=False), - sa.Column('disco', sa.Boolean(), nullable=False), - sa.PrimaryKeyConstraint('code') - ) - op.create_table('ihr_disco_events', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('mongoid', sa.String(length=24), nullable=False), - sa.Column('streamtype', sa.String(length=10), nullable=False), - sa.Column('streamname', sa.String(length=128), nullable=False), - sa.Column('starttime', sa.DateTime(), nullable=False), - sa.Column('endtime', sa.DateTime(), nullable=False), - sa.Column('avglevel', sa.Float(), nullable=False), - sa.Column('nbdiscoprobes', sa.Integer(), nullable=False), - sa.Column('totalprobes', sa.Integer(), nullable=False), - sa.Column('ongoing', sa.Boolean(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('ihr_tr_hegemony_identifier', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('type', sa.String(length=4), nullable=False), - sa.Column('af', sa.Integer(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('ihr_delay_alarms', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('ip', sa.String(length=64), nullable=False), - sa.Column('link', sa.String(length=128), nullable=False), - sa.Column('medianrtt', sa.Float(), nullable=False), - sa.Column('diffmedian', sa.Float(), nullable=False), - sa.Column('deviation', sa.Float(), nullable=False), - sa.Column('nbprobes', sa.Integer(), nullable=False), - sa.Column('msm_prb_ids', postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column('asn_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_delay_alarms', by_range('timebin', INTERVAL '2 day'));") - op.create_table('ihr_forwarding_alarms', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('ip', sa.String(length=64), nullable=False), - sa.Column('correlation', sa.Float(), nullable=False), - sa.Column('responsibility', sa.Float(), nullable=False), - sa.Column('pktdiff', sa.Float(), nullable=False), - sa.Column('previoushop', sa.String(length=64), nullable=False), - sa.Column('msm_prb_ids', postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column('asn_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_forwardingalarms_asn_id', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_forwarding_alarms', by_range('timebin', INTERVAL '2 day'));") - op.create_table('ihr_hegemonycone', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('conesize', sa.Integer(), nullable=False), - sa.Column('af', sa.Integer(), nullable=False), - sa.Column('asn_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_ihr_hegemonycone_asn_id', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_hegemonycone', by_range('timebin', INTERVAL '2 day'));") - op.execute("ALTER TABLE ihr_hegemonycone SET (timescaledb.compress, timescaledb.compress_segmentby = 'asn_id,af', timescaledb.compress_orderby = 'timebin');") - op.execute("SELECT add_compression_policy('ihr_hegemonycone', INTERVAL '7 days');") - op.create_table('ihr_metis_atlas_selection', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('metric', sa.String(length=16), nullable=False), - sa.Column('rank', sa.Integer(), nullable=False), - sa.Column('af', sa.Integer(), nullable=False), - sa.Column('mean', sa.Float(), nullable=False), - sa.Column('asn_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_metis_atlas_selection_asn_id', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_metis_atlas_selection', by_range('timebin', INTERVAL '7 day'));") - op.create_table('ihr_tr_hegemony', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('hege', sa.Float(), nullable=False), - sa.Column('af', sa.Integer(), nullable=False), - sa.Column('nbsamples', sa.Integer(), nullable=False), - sa.Column('dependency_id', sa.BigInteger(), nullable=False), - sa.Column('origin_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['dependency_id'], ['ihr_tr_hegemony_identifier.id'], name='fk_tr_hegemony_dependency_id', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['origin_id'], ['ihr_tr_hegemony_identifier.id'], name='fk_tr_hegemony_origin_id', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_tr_hegemony', by_range('timebin', INTERVAL '2 day'));") - op.execute('CREATE INDEX IF NOT EXISTS ihr_delay_alarms_asn_id_timebin_idx ON ihr_delay_alarms (asn_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_disco_events_mongoid_3a488192 ON ihr_disco_events (mongoid);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_disco_events_streamtype_streamname_st_bda16df6_idx ON ihr_disco_events (streamtype, streamname, starttime, endtime);') - op.execute('CREATE INDEX IF NOT EXISTS "ihr_forwarding_alarms_asn_id_timebin_idx" ON ihr_forwarding_alarms (asn_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemonycone_asn_id_timebin_idx ON ihr_hegemonycone (asn_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_metis_atlas_selection_metric_rank_timebin_idx ON ihr_metis_atlas_selection (metric, rank, timebin);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_tr_hegemony_dependency_id_timebin_idx ON ihr_tr_hegemony (dependency_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_tr_hegemony_origin_id_timebin_idx ON ihr_tr_hegemony (origin_id, timebin DESC);') - # ### end Alembic commands ### - - -def downgrade() -> None: - """Downgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.execute('ALTER TABLE ihr_hegemonycone SET (timescaledb.compress = false);') - op.execute("SELECT remove_compression_policy('ihr_hegemonycone', if_exists => TRUE);") - op.execute('DROP INDEX IF EXISTS ihr_delay_alarms_asn_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_disco_events_mongoid_3a488192;') - op.execute('DROP INDEX IF EXISTS ihr_disco_events_streamtype_streamname_st_bda16df6_idx;') - op.execute('DROP INDEX IF EXISTS "ihr_forwarding_alarms_asn_id_timebin_idx";') - op.execute('DROP INDEX IF EXISTS ihr_hegemonycone_asn_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_metis_atlas_selection_metric_rank_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_tr_hegemony_dependency_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_tr_hegemony_origin_id_timebin_idx;') - op.drop_table('ihr_tr_hegemony') - op.drop_table('ihr_metis_atlas_selection') - op.drop_table('ihr_hegemonycone') - op.drop_table('ihr_forwarding_alarms') - op.drop_table('ihr_delay_alarms') - op.drop_table('ihr_tr_hegemony_identifier') - op.drop_table('ihr_disco_events') - op.drop_table('ihr_country') - op.drop_table('ihr_atlas_location') - op.drop_table('ihr_asn') - # ### end Alembic commands ### diff --git a/alembic/versions/880ae10f65b5_initial_migration.py b/alembic/versions/880ae10f65b5_initial_migration.py new file mode 100644 index 0000000..952a9bc --- /dev/null +++ b/alembic/versions/880ae10f65b5_initial_migration.py @@ -0,0 +1,373 @@ +"""initial migration + +Revision ID: 880ae10f65b5 +Revises: +Create Date: 2025-06-19 15:39:01.419361 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = '880ae10f65b5' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('ihr_asn', + sa.Column('number', sa.BigInteger(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('tartiflette', sa.Boolean(), nullable=False), + sa.Column('disco', sa.Boolean(), nullable=False), + sa.Column('ashash', sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint('number') + ) + op.create_table('ihr_atlas_location', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('type', sa.String(length=4), nullable=False), + sa.Column('af', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('ihr_country', + sa.Column('code', sa.String(length=4), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('tartiflette', sa.Boolean(), nullable=False), + sa.Column('disco', sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint('code') + ) + op.create_table('ihr_disco_events', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('mongoid', sa.String(length=24), nullable=False), + sa.Column('streamtype', sa.String(length=10), nullable=False), + sa.Column('streamname', sa.String(length=128), nullable=False), + sa.Column('starttime', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('endtime', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('avglevel', sa.Float(), nullable=False), + sa.Column('nbdiscoprobes', sa.Integer(), nullable=False), + sa.Column('totalprobes', sa.Integer(), nullable=False), + sa.Column('ongoing', sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('ihr_tr_hegemony_identifier', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('type', sa.String(length=4), nullable=False), + sa.Column('af', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('ihr_atlas_delay', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('median', sa.Float(), nullable=False), + sa.Column('nbtracks', sa.Integer(), nullable=False), + sa.Column('nbprobes', sa.Integer(), nullable=False), + sa.Column('entropy', sa.Float(), nullable=False), + sa.Column('hop', sa.Integer(), nullable=False), + sa.Column('nbrealrtts', sa.Integer(), nullable=False), + sa.Column('startpoint_id', sa.BigInteger(), nullable=False), + sa.Column('endpoint_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['endpoint_id'], ['ihr_atlas_location.id'], name='fk_atlas_delay_endpoint', ondelete='CASCADE'), + sa.ForeignKeyConstraint(['startpoint_id'], ['ihr_atlas_location.id'], name='fk_atlas_delay_startpoint', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_atlas_delay', by_range('timebin', INTERVAL '2 day'));") + op.execute("ALTER TABLE ihr_atlas_delay SET (timescaledb.compress, timescaledb.compress_segmentby = 'startpoint_id,endpoint_id', timescaledb.compress_orderby = 'timebin');") + op.execute("SELECT add_compression_policy('ihr_atlas_delay', INTERVAL '7 days');") + op.create_table('ihr_atlas_delay_alarms', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('deviation', sa.Float(), nullable=False), + sa.Column('startpoint_id', sa.BigInteger(), nullable=False), + sa.Column('endpoint_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['endpoint_id'], ['ihr_atlas_location.id'], name='fk_atlas_delay_endpoint', ondelete='CASCADE'), + sa.ForeignKeyConstraint(['startpoint_id'], ['ihr_atlas_location.id'], name='fk_atlas_delay_startpoint', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_atlas_delay_alarms', by_range('timebin', INTERVAL '2 day'));") + op.create_table('ihr_delay', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('magnitude', sa.Float(), nullable=False), + sa.Column('asn_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_delay_asn_id', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_delay', by_range('timebin', INTERVAL '2 day'));") + op.create_table('ihr_delay_alarms', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('ip', sa.String(length=64), nullable=False), + sa.Column('link', sa.String(length=128), nullable=False), + sa.Column('medianrtt', sa.Float(), nullable=False), + sa.Column('diffmedian', sa.Float(), nullable=False), + sa.Column('deviation', sa.Float(), nullable=False), + sa.Column('nbprobes', sa.Integer(), nullable=False), + sa.Column('msm_prb_ids', postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.Column('asn_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_delay_alarms', by_range('timebin', INTERVAL '2 day'));") + op.create_table('ihr_disco_probes', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('probe_id', sa.Integer(), nullable=False), + sa.Column('starttime', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('endtime', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('level', sa.Float(), nullable=False), + sa.Column('ipv4', sa.String(length=64), nullable=False), + sa.Column('prefixv4', sa.String(length=70), nullable=False), + sa.Column('lat', sa.Float(), nullable=False), + sa.Column('lon', sa.Float(), nullable=False), + sa.Column('event_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['event_id'], ['ihr_disco_events.id'], name='fk_disco_probes_event_id', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('ihr_forwarding', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('magnitude', sa.Float(), nullable=False), + sa.Column('asn_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_forwarding_asn_id', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_forwarding', by_range('timebin', INTERVAL '2 day'));") + op.create_table('ihr_forwarding_alarms', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('ip', sa.String(length=64), nullable=False), + sa.Column('correlation', sa.Float(), nullable=False), + sa.Column('responsibility', sa.Float(), nullable=False), + sa.Column('pktdiff', sa.Float(), nullable=False), + sa.Column('previoushop', sa.String(length=64), nullable=False), + sa.Column('msm_prb_ids', postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.Column('asn_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_forwardingalarms_asn_id', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_forwarding_alarms', by_range('timebin', INTERVAL '2 day'));") + op.create_table('ihr_hegemony', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('hege', sa.Float(), nullable=False), + sa.Column('af', sa.Integer(), nullable=False), + sa.Column('asn_id', sa.BigInteger(), nullable=False), + sa.Column('originasn_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_hegemony_asn_id', ondelete='CASCADE'), + sa.ForeignKeyConstraint(['originasn_id'], ['ihr_asn.number'], name='fk_hegemony_originasn_id', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_hegemony', by_range('timebin', INTERVAL '2 day'));") + op.execute("ALTER TABLE ihr_hegemony SET (timescaledb.compress, timescaledb.compress_segmentby = 'af,originasn_id,asn_id', timescaledb.compress_orderby = 'timebin');") + op.execute("SELECT add_compression_policy('ihr_hegemony', INTERVAL '7 days');") + op.create_table('ihr_hegemony_alarms', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('deviation', sa.Float(), nullable=False), + sa.Column('af', sa.Integer(), nullable=False), + sa.Column('asn_id', sa.BigInteger(), nullable=False), + sa.Column('originasn_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_hegemony_alarms_asn_id', ondelete='CASCADE'), + sa.ForeignKeyConstraint(['originasn_id'], ['ihr_asn.number'], name='fk_hegemony_alarms_originasn_id', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_hegemony_alarms', by_range('timebin', INTERVAL '2 day'));") + op.create_table('ihr_hegemony_country', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('hege', sa.Float(), nullable=False), + sa.Column('af', sa.Integer(), nullable=False), + sa.Column('weight', sa.Float(), nullable=False), + sa.Column('weightscheme', sa.String(length=16), nullable=False), + sa.Column('transitonly', sa.Boolean(), nullable=False), + sa.Column('asn_id', sa.BigInteger(), nullable=False), + sa.Column('country_id', sa.String(length=4), nullable=False), + sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_hegemony_country_asn_id', ondelete='CASCADE'), + sa.ForeignKeyConstraint(['country_id'], ['ihr_country.code'], name='fk_hegemony_country_country_id', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_hegemony_country', by_range('timebin', INTERVAL '2 day'));") + op.create_table('ihr_hegemony_prefix', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('prefix', sa.String(length=64), nullable=False), + sa.Column('hege', sa.Float(), nullable=False), + sa.Column('af', sa.Integer(), nullable=False), + sa.Column('visibility', sa.Float(), nullable=False), + sa.Column('rpki_status', sa.String(length=32), nullable=False), + sa.Column('irr_status', sa.String(length=32), nullable=False), + sa.Column('delegated_prefix_status', sa.String(length=32), nullable=False), + sa.Column('delegated_asn_status', sa.String(length=32), nullable=False), + sa.Column('descr', sa.String(length=64), nullable=False), + sa.Column('moas', sa.Boolean(), nullable=False), + sa.Column('asn_id', sa.BigInteger(), nullable=False), + sa.Column('originasn_id', sa.BigInteger(), nullable=False), + sa.Column('country_id', sa.String(length=4), nullable=False), + sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_hegemony_prefix_asn_id', ondelete='CASCADE'), + sa.ForeignKeyConstraint(['country_id'], ['ihr_country.code'], name='fk_hegemony_prefix_country_id', ondelete='CASCADE'), + sa.ForeignKeyConstraint(['originasn_id'], ['ihr_asn.number'], name='fk_hegemony_prefix_originasn_id', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_hegemony_prefix', by_range('timebin', INTERVAL '2 day'));") + op.create_table('ihr_hegemonycone', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('conesize', sa.Integer(), nullable=False), + sa.Column('af', sa.Integer(), nullable=False), + sa.Column('asn_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_ihr_hegemonycone_asn_id', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_hegemonycone', by_range('timebin', INTERVAL '2 day'));") + op.execute("ALTER TABLE ihr_hegemonycone SET (timescaledb.compress, timescaledb.compress_segmentby = 'asn_id,af', timescaledb.compress_orderby = 'timebin');") + op.execute("SELECT add_compression_policy('ihr_hegemonycone', INTERVAL '7 days');") + op.create_table('ihr_metis_atlas_deployment', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('metric', sa.String(length=16), nullable=False), + sa.Column('rank', sa.Integer(), nullable=False), + sa.Column('af', sa.Integer(), nullable=False), + sa.Column('mean', sa.Float(), nullable=False), + sa.Column('nbsamples', sa.Integer(), nullable=False), + sa.Column('asn_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_metis_atlas_deployment_asn_id', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_metis_atlas_deployment', by_range('timebin', INTERVAL '7 day'));") + op.create_table('ihr_metis_atlas_selection', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('metric', sa.String(length=16), nullable=False), + sa.Column('rank', sa.Integer(), nullable=False), + sa.Column('af', sa.Integer(), nullable=False), + sa.Column('mean', sa.Float(), nullable=False), + sa.Column('asn_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_metis_atlas_selection_asn_id', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_metis_atlas_selection', by_range('timebin', INTERVAL '7 day'));") + op.create_table('ihr_tr_hegemony', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('hege', sa.Float(), nullable=False), + sa.Column('af', sa.Integer(), nullable=False), + sa.Column('nbsamples', sa.Integer(), nullable=False), + sa.Column('dependency_id', sa.BigInteger(), nullable=False), + sa.Column('origin_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['dependency_id'], ['ihr_tr_hegemony_identifier.id'], name='fk_tr_hegemony_dependency_id', ondelete='CASCADE'), + sa.ForeignKeyConstraint(['origin_id'], ['ihr_tr_hegemony_identifier.id'], name='fk_tr_hegemony_origin_id', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', 'timebin') + ) + op.execute("SELECT create_hypertable('ihr_tr_hegemony', by_range('timebin', INTERVAL '2 day'));") + op.create_table('ihr_delay_alarms_msms', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('msmid', sa.BigInteger(), nullable=False), + sa.Column('probeid', sa.Integer(), nullable=False), + sa.Column('alarm_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['alarm_id'], ['ihr_delay_alarms.id'], name='fk_delay_alarms_msms_alarm_id', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('ihr_forwarding_alarms_msms', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('msmid', sa.BigInteger(), nullable=False), + sa.Column('probeid', sa.Integer(), nullable=False), + sa.Column('alarm_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['alarm_id'], ['ihr_forwarding_alarms.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.execute('CREATE INDEX IF NOT EXISTS ihr_atlas_delay_endpoint_id_timebin_idx ON ihr_atlas_delay (endpoint_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_atlas_delay_startpoint_id_endpoint_id_timebin_idx ON ihr_atlas_delay (startpoint_id, endpoint_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_atlas_delay_startpoint_id_timebin_idx ON ihr_atlas_delay (startpoint_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_atlas_delay_alarms_startpoint_id_timebin_idx ON ihr_atlas_delay_alarms (startpoint_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_atlas_delay_alarms_endpoint_id_timebin_idx ON ihr_atlas_delay_alarms (endpoint_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_delay_asn_id_timebin_idx ON ihr_delay (asn_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_delay_alarms_asn_id_timebin_idx ON ihr_delay_alarms (asn_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_disco_events_mongoid_3a488192 ON ihr_disco_events (mongoid);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_disco_events_streamtype_streamname_st_bda16df6_idx ON ihr_disco_events (streamtype, streamname, starttime, endtime);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_forwarding_asn_id_timebin_idx ON ihr_forwarding (asn_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_forwarding_alarms_asn_id_timebin_idx ON ihr_forwarding_alarms (asn_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_asn_id_timebin_idx ON ihr_hegemony (asn_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_originasn_id_timebin_idx ON ihr_hegemony (originasn_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_asn_id_originasn_id_timebin_idx ON ihr_hegemony (asn_id, originasn_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemonycone_asn_id_timebin_idx ON ihr_hegemonycone (asn_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_alarms_asn_id_timebin_idx ON ihr_hegemony_alarms (asn_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_alarms_originasn_id_timebin_idx ON ihr_hegemony_alarms (originasn_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_country_asn_id_timebin_idx ON ihr_hegemony_country (asn_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_country_country_id_timebin_idx ON ihr_hegemony_country (country_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_prefix_prefix_timebin_idx ON ihr_hegemony_prefix (prefix, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_prefix_asn_id_timebin_idx ON ihr_hegemony_prefix (asn_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_prefix_originasn_id_timebin_idx ON ihr_hegemony_prefix (originasn_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_prefix_country_id_timebin_idx ON ihr_hegemony_prefix (country_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_metis_atlas_deployment_metric_rank_timebin_idx ON ihr_metis_atlas_deployment (metric, rank, timebin);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_metis_atlas_selection_metric_rank_timebin_idx ON ihr_metis_atlas_selection (metric, rank, timebin);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_tr_hegemony_dependency_id_timebin_idx ON ihr_tr_hegemony (dependency_id, timebin DESC);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_tr_hegemony_origin_id_timebin_idx ON ihr_tr_hegemony (origin_id, timebin DESC);') + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.execute('ALTER TABLE ihr_atlas_delay SET (timescaledb.compress = false);') + op.execute("SELECT remove_compression_policy('ihr_atlas_delay', if_exists => TRUE);") + op.execute('ALTER TABLE ihr_hegemony SET (timescaledb.compress = false);') + op.execute("SELECT remove_compression_policy('ihr_hegemony', if_exists => TRUE);") + op.execute('ALTER TABLE ihr_hegemonycone SET (timescaledb.compress = false);') + op.execute("SELECT remove_compression_policy('ihr_hegemonycone', if_exists => TRUE);") + op.execute('DROP INDEX IF EXISTS ihr_atlas_delay_endpoint_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_atlas_delay_startpoint_id_endpoint_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_atlas_delay_startpoint_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_atlas_delay_alarms_startpoint_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_atlas_delay_alarms_endpoint_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_delay_asn_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_delay_alarms_asn_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_disco_events_mongoid_3a488192;') + op.execute('DROP INDEX IF EXISTS ihr_disco_events_streamtype_streamname_st_bda16df6_idx;') + op.execute('DROP INDEX IF EXISTS ihr_forwarding_asn_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_forwarding_alarms_asn_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_hegemony_asn_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_hegemony_originasn_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_hegemony_asn_id_originasn_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_hegemonycone_asn_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_hegemony_alarms_asn_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_hegemony_alarms_originasn_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_hegemony_country_asn_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_hegemony_country_country_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_hegemony_prefix_prefix_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_hegemony_prefix_asn_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_hegemony_prefix_originasn_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_hegemony_prefix_country_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_metis_atlas_deployment_metric_rank_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_metis_atlas_selection_metric_rank_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_tr_hegemony_dependency_id_timebin_idx;') + op.execute('DROP INDEX IF EXISTS ihr_tr_hegemony_origin_id_timebin_idx;') + op.drop_table('ihr_forwarding_alarms_msms') + op.drop_table('ihr_delay_alarms_msms') + op.drop_table('ihr_tr_hegemony') + op.drop_table('ihr_metis_atlas_selection') + op.drop_table('ihr_metis_atlas_deployment') + op.drop_table('ihr_hegemonycone') + op.drop_table('ihr_hegemony_prefix') + op.drop_table('ihr_hegemony_country') + op.drop_table('ihr_hegemony_alarms') + op.drop_table('ihr_hegemony') + op.drop_table('ihr_forwarding_alarms') + op.drop_table('ihr_forwarding') + op.drop_table('ihr_disco_probes') + op.drop_table('ihr_delay_alarms') + op.drop_table('ihr_delay') + op.drop_table('ihr_atlas_delay_alarms') + op.drop_table('ihr_atlas_delay') + op.drop_table('ihr_tr_hegemony_identifier') + op.drop_table('ihr_disco_events') + op.drop_table('ihr_country') + op.drop_table('ihr_atlas_location') + op.drop_table('ihr_asn') + # ### end Alembic commands ### diff --git a/controllers/hegemony_cone_controller.py b/controllers/hegemony_cone_controller.py new file mode 100644 index 0000000..2cd94e7 --- /dev/null +++ b/controllers/hegemony_cone_controller.py @@ -0,0 +1,95 @@ +from fastapi import APIRouter, Depends, Query, Request, Response, HTTPException +from datetime import datetime, timedelta +from sqlalchemy.orm import Session +from services.hegemony_cone_service import HegemonyConeService +from dtos.generic_response_dto import GenericResponseDTO, build_url +from dtos.hegemony_cone_dto import HegemonyConeDTO +from config.database import get_db +from typing import Optional, List +from globals import page_size + +router = APIRouter(prefix="/hegemony/cones", tags=["Hegemony Cones"]) + + +class HegemonyConeController: + service = HegemonyConeService() + + @staticmethod + @router.get("/", response_model=GenericResponseDTO[HegemonyConeDTO]) + async def get_hegemony_cones( + request: Request, + db: Session = Depends(get_db), + timebin: Optional[datetime] = Query( + None, description="Get results for exact timestamp"), + timebin_gte: Optional[datetime] = Query( + None, description="Get results after or equal to this timestamp"), + timebin_lte: Optional[datetime] = Query( + None, description="Get results before or equal to this timestamp"), + asn: Optional[str] = Query( + None, description="Autonomous System Number (ASN). Can be a single value or a list of comma separated values."), + af: Optional[int] = Query( + None, description="Address Family (IP version) either 4 or 6"), + page: Optional[int] = Query( + 1, ge=1, description="A page number within the paginated result set"), + ordering: Optional[str] = Query( + None, description="Which field to use when ordering the results") + ) -> GenericResponseDTO[HegemonyConeDTO]: + """ + The number of networks that depend on a given network. This is similar to CAIDA's customer cone size. +
+ networks). + """ + # Check if at least one time parameter exists + if not any([timebin, timebin_gte, timebin_lte]): + raise HTTPException( + status_code=400, + detail="No timebin parameter. Please provide a timebin value or a range of values with timebin__lte and timebin__gte." + ) + + # If timebin is not provided, both timebin_gte and timebin_lte must be provided + if not timebin and not (timebin_gte and timebin_lte): + raise HTTPException( + status_code=400, + detail="Invalid timebin range. Please provide both timebin__lte and timebin__gte." + ) + + # If exact timebin is provided, it overrides the range parameters + if timebin: + timebin_gte = timebin + timebin_lte = timebin + + # Validate date range (max 7 days) + if timebin_gte and timebin_lte: + delta = timebin_lte - timebin_gte + if delta > timedelta(days=7): + raise HTTPException( + status_code=400, + detail="The given timebin range is too large. Should be less than 7 days." + ) + + # Convert comma-separated ASNs to list + asn_list = [int(x.strip()) for x in asn.split(",")] if asn else None + + cones, total_count = HegemonyConeController.service.get_hegemony_cones( + db, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + asn_ids=asn_list, + af=af, + page=page, + order_by=ordering + ) + + # Calculate pagination + next_page = page + 1 if (page * page_size) < total_count else None + prev_page = page - 1 if page > 1 else None + + return GenericResponseDTO( + count=total_count, + next=build_url(request, next_page), + previous=build_url(request, prev_page), + results=cones + ) diff --git a/dtos/hegemony_cone_dto.py b/dtos/hegemony_cone_dto.py new file mode 100644 index 0000000..d92089a --- /dev/null +++ b/dtos/hegemony_cone_dto.py @@ -0,0 +1,12 @@ +from pydantic import BaseModel +from datetime import datetime + + +class HegemonyConeDTO(BaseModel): + timebin: datetime + asn: int + conesize: int + af: int + + class Config: + from_attributes = True diff --git a/models/asn.py b/models/asn.py index c845c75..114ee05 100644 --- a/models/asn.py +++ b/models/asn.py @@ -1,14 +1,21 @@ -from sqlalchemy import Column, BigInteger, Integer, String, Boolean +from sqlalchemy import Column, BigInteger, String, Boolean from sqlalchemy.orm import relationship from config.database import Base + class ASN(Base): __tablename__ = 'ihr_asn' - number = Column(BigInteger, primary_key=True, doc='Autonomous System Number (ASN) or IXP ID. Note that IXP ID are negative to avoid collision.') - name = Column(String(255), nullable=False, doc='Name registered for the network.') - tartiflette = Column(Boolean, default=False, nullable=False, doc='True if participate in link delay and forwarding anomaly analysis.') - disco = Column(Boolean, default=False, nullable=False, doc='True if participate in network disconnection analysis.') - ashash = Column(Boolean, default=False, nullable=False, doc='True if participate in AS dependency analysis.') - + number = Column(BigInteger, primary_key=True, + doc='Autonomous System Number (ASN) or IXP ID. Note that IXP ID are negative to avoid collision.') + name = Column(String(255), nullable=False, + doc='Name registered for the network.') + tartiflette = Column(Boolean, default=False, nullable=False, + doc='True if participate in link delay and forwarding anomaly analysis.') + disco = Column(Boolean, default=False, nullable=False, + doc='True if participate in network disconnection analysis.') + ashash = Column(Boolean, default=False, nullable=False, + doc='True if participate in AS dependency analysis.') + hegemony_cones = relationship( + 'HegemonyCone', back_populates='asn_relation') diff --git a/models/hegemonycone.py b/models/hegemonycone.py index cde3078..253d1ec 100644 --- a/models/hegemonycone.py +++ b/models/hegemonycone.py @@ -2,6 +2,7 @@ from sqlalchemy.orm import relationship from config.database import Base from sqlalchemy.dialects.postgresql import TIMESTAMP +from models.asn import ASN class HegemonyCone(Base): @@ -31,12 +32,14 @@ class HegemonyCone(Base): id = Column(BigInteger, autoincrement=True) timebin = Column(TIMESTAMP(timezone=True), nullable=False, doc='Timestamp with time zone.') - conesize = Column(Integer, default=0, nullable=False, doc="Number of dependent networks, namely, networks that are reached through the asn.") af = Column(Integer, default=0, nullable=False, doc='Address Family (IP version), values are either 4 or 6.') - asn_id = Column(BigInteger, ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_ihr_hegemonycone_asn_id'), - nullable=False, doc='Autonomous System Number (ASN).') + asn = Column('asn_id', BigInteger, + ForeignKey('ihr_asn.number', ondelete='CASCADE', + name='fk_ihr_hegemonycone_asn_id'), + nullable=False, + doc='Autonomous System Number (ASN).') asn_relation = relationship('ASN', back_populates='hegemony_cones') diff --git a/repositories/hegemony_cone_repository.py b/repositories/hegemony_cone_repository.py new file mode 100644 index 0000000..5c6fd4a --- /dev/null +++ b/repositories/hegemony_cone_repository.py @@ -0,0 +1,43 @@ +from datetime import datetime, timedelta +from sqlalchemy.orm import Session +from models.hegemonycone import HegemonyCone +from typing import Optional, List, Tuple +from globals import page_size + + +class HegemonyConeRepository: + def get_all( + self, + db: Session, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + asn_ids: Optional[List[int]] = None, + af: Optional[int] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[HegemonyCone], int]: + query = db.query(HegemonyCone) + + # Apply filters + if timebin_gte: + query = query.filter(HegemonyCone.timebin >= timebin_gte) + if timebin_lte: + query = query.filter(HegemonyCone.timebin <= timebin_lte) + if asn_ids: + query = query.filter(HegemonyCone.asn.in_(asn_ids)) + if af: + query = query.filter(HegemonyCone.af == af) + + total_count = query.count() + + # Apply ordering + if order_by and hasattr(HegemonyCone, order_by): + query = query.order_by(getattr(HegemonyCone, order_by)) + else: + query = query.order_by(HegemonyCone.timebin) + + # Apply pagination + offset = (page - 1) * page_size + results = query.offset(offset).limit(page_size).all() + + return results, total_count diff --git a/services/hegemony_cone_service.py b/services/hegemony_cone_service.py new file mode 100644 index 0000000..4ac278d --- /dev/null +++ b/services/hegemony_cone_service.py @@ -0,0 +1,40 @@ +from sqlalchemy.orm import Session +from repositories.hegemony_cone_repository import HegemonyConeRepository +from dtos.hegemony_cone_dto import HegemonyConeDTO +from typing import Optional, List, Tuple +from datetime import datetime + + +class HegemonyConeService: + def __init__(self): + self.repository = HegemonyConeRepository() + + def get_hegemony_cones( + self, + db: Session, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + asn_ids: Optional[List[int]] = None, + af: Optional[int] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[HegemonyConeDTO], int]: + """ + Get hegemony cone data with time-based filtering. + """ + cones, total_count = self.repository.get_all( + db, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + asn_ids=asn_ids, + af=af, + page=page, + order_by=order_by + ) + + return [HegemonyConeDTO( + timebin=cone.timebin, + asn=cone.asn, + conesize=cone.conesize, + af=cone.af + ) for cone in cones], total_count From f668e60f05a65fcc2aae4c4ce44797bb5072db9c Mon Sep 17 00:00:00 2001 From: ibraam Date: Thu, 19 Jun 2025 16:39:27 +0300 Subject: [PATCH 13/43] Fixed foreign keys errors in models --- ...n.py => d16f00c0df13_initial_migration.py} | 40 +++++++++---------- models/delay_alarms_msms.py | 8 +--- models/forwarding_alarms_msms.py | 6 +-- 3 files changed, 21 insertions(+), 33 deletions(-) rename alembic/versions/{880ae10f65b5_initial_migration.py => d16f00c0df13_initial_migration.py} (98%) diff --git a/alembic/versions/880ae10f65b5_initial_migration.py b/alembic/versions/d16f00c0df13_initial_migration.py similarity index 98% rename from alembic/versions/880ae10f65b5_initial_migration.py rename to alembic/versions/d16f00c0df13_initial_migration.py index 952a9bc..5839974 100644 --- a/alembic/versions/880ae10f65b5_initial_migration.py +++ b/alembic/versions/d16f00c0df13_initial_migration.py @@ -1,8 +1,8 @@ """initial migration -Revision ID: 880ae10f65b5 +Revision ID: d16f00c0df13 Revises: -Create Date: 2025-06-19 15:39:01.419361 +Create Date: 2025-06-19 16:37:25.431083 """ from typing import Sequence, Union @@ -12,7 +12,7 @@ from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. -revision: str = '880ae10f65b5' +revision: str = 'd16f00c0df13' down_revision: Union[str, None] = None branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None @@ -43,6 +43,13 @@ def upgrade() -> None: sa.Column('disco', sa.Boolean(), nullable=False), sa.PrimaryKeyConstraint('code') ) + op.create_table('ihr_delay_alarms_msms', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('msmid', sa.BigInteger(), nullable=False), + sa.Column('probeid', sa.Integer(), nullable=False), + sa.Column('alarm_id', sa.BigInteger(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) op.create_table('ihr_disco_events', sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), sa.Column('mongoid', sa.String(length=24), nullable=False), @@ -56,6 +63,13 @@ def upgrade() -> None: sa.Column('ongoing', sa.Boolean(), nullable=False), sa.PrimaryKeyConstraint('id') ) + op.create_table('ihr_forwarding_alarms_msms', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('msmid', sa.BigInteger(), nullable=False), + sa.Column('probeid', sa.Integer(), nullable=False), + sa.Column('alarm_id', sa.BigInteger(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) op.create_table('ihr_tr_hegemony_identifier', sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), sa.Column('name', sa.String(length=255), nullable=False), @@ -266,22 +280,6 @@ def upgrade() -> None: sa.PrimaryKeyConstraint('id', 'timebin') ) op.execute("SELECT create_hypertable('ihr_tr_hegemony', by_range('timebin', INTERVAL '2 day'));") - op.create_table('ihr_delay_alarms_msms', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('msmid', sa.BigInteger(), nullable=False), - sa.Column('probeid', sa.Integer(), nullable=False), - sa.Column('alarm_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['alarm_id'], ['ihr_delay_alarms.id'], name='fk_delay_alarms_msms_alarm_id', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('ihr_forwarding_alarms_msms', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('msmid', sa.BigInteger(), nullable=False), - sa.Column('probeid', sa.Integer(), nullable=False), - sa.Column('alarm_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['alarm_id'], ['ihr_forwarding_alarms.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) op.execute('CREATE INDEX IF NOT EXISTS ihr_atlas_delay_endpoint_id_timebin_idx ON ihr_atlas_delay (endpoint_id, timebin DESC);') op.execute('CREATE INDEX IF NOT EXISTS ihr_atlas_delay_startpoint_id_endpoint_id_timebin_idx ON ihr_atlas_delay (startpoint_id, endpoint_id, timebin DESC);') op.execute('CREATE INDEX IF NOT EXISTS ihr_atlas_delay_startpoint_id_timebin_idx ON ihr_atlas_delay (startpoint_id, timebin DESC);') @@ -348,8 +346,6 @@ def downgrade() -> None: op.execute('DROP INDEX IF EXISTS ihr_metis_atlas_selection_metric_rank_timebin_idx;') op.execute('DROP INDEX IF EXISTS ihr_tr_hegemony_dependency_id_timebin_idx;') op.execute('DROP INDEX IF EXISTS ihr_tr_hegemony_origin_id_timebin_idx;') - op.drop_table('ihr_forwarding_alarms_msms') - op.drop_table('ihr_delay_alarms_msms') op.drop_table('ihr_tr_hegemony') op.drop_table('ihr_metis_atlas_selection') op.drop_table('ihr_metis_atlas_deployment') @@ -366,7 +362,9 @@ def downgrade() -> None: op.drop_table('ihr_atlas_delay_alarms') op.drop_table('ihr_atlas_delay') op.drop_table('ihr_tr_hegemony_identifier') + op.drop_table('ihr_forwarding_alarms_msms') op.drop_table('ihr_disco_events') + op.drop_table('ihr_delay_alarms_msms') op.drop_table('ihr_country') op.drop_table('ihr_atlas_location') op.drop_table('ihr_asn') diff --git a/models/delay_alarms_msms.py b/models/delay_alarms_msms.py index ec6f35f..cdbbd72 100644 --- a/models/delay_alarms_msms.py +++ b/models/delay_alarms_msms.py @@ -12,10 +12,4 @@ class DelayAlarmsMsms(Base): probeid = Column(Integer, default=0, nullable=False) - alarm_id = Column(BigInteger, - ForeignKey('ihr_delay_alarms.id', ondelete='CASCADE', - name='fk_delay_alarms_msms_alarm_id'), - nullable=False) - - alarm = relationship('DelayAlarms', foreign_keys=[ - alarm_id], backref='msmid_entries') + alarm_id = Column(BigInteger, nullable=False) diff --git a/models/forwarding_alarms_msms.py b/models/forwarding_alarms_msms.py index 2998d43..a23da6c 100644 --- a/models/forwarding_alarms_msms.py +++ b/models/forwarding_alarms_msms.py @@ -12,8 +12,4 @@ class ForwardingAlarmsMsms(Base): probeid = Column(Integer, default=0, nullable=False) - alarm_id = Column(BigInteger, - ForeignKey('ihr_forwarding_alarms.id', ondelete='CASCADE'), - nullable=False) - - alarm = relationship('ForwardingAlarms', back_populates='msms') \ No newline at end of file + alarm_id = Column(BigInteger, nullable=False) From 5ff2a66c5a04a8f9470ea79ff4fec3b5dc1683db Mon Sep 17 00:00:00 2001 From: ibraam Date: Thu, 19 Jun 2025 19:38:29 +0300 Subject: [PATCH 14/43] Added missing indexes to models --- .../versions/852def42fccc_second_migration.py | 214 ++++++++++ .../d16f00c0df13_initial_migration.py | 371 ------------------ models/atlas_location.py | 11 +- models/delay_alarms_msms.py | 5 + models/disco_probes.py | 7 +- models/forwarding_alarms_msms.py | 5 + 6 files changed, 238 insertions(+), 375 deletions(-) create mode 100644 alembic/versions/852def42fccc_second_migration.py delete mode 100644 alembic/versions/d16f00c0df13_initial_migration.py diff --git a/alembic/versions/852def42fccc_second_migration.py b/alembic/versions/852def42fccc_second_migration.py new file mode 100644 index 0000000..39b3011 --- /dev/null +++ b/alembic/versions/852def42fccc_second_migration.py @@ -0,0 +1,214 @@ +"""second migration + +Revision ID: 852def42fccc +Revises: +Create Date: 2025-06-19 19:32:34.889779 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '852def42fccc' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('ihr_atlas_delay', 'startpoint_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False) + op.alter_column('ihr_atlas_delay', 'endpoint_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False) + op.create_foreign_key('fk_atlas_delay_endpoint', 'ihr_atlas_delay', 'ihr_atlas_location', ['endpoint_id'], ['id'], ondelete='CASCADE') + op.create_foreign_key('fk_atlas_delay_startpoint', 'ihr_atlas_delay', 'ihr_atlas_location', ['startpoint_id'], ['id'], ondelete='CASCADE') + op.alter_column('ihr_atlas_delay_alarms', 'startpoint_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False) + op.alter_column('ihr_atlas_delay_alarms', 'endpoint_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False) + op.create_foreign_key('fk_atlas_delay_endpoint', 'ihr_atlas_delay_alarms', 'ihr_atlas_location', ['endpoint_id'], ['id'], ondelete='CASCADE') + op.create_foreign_key('fk_atlas_delay_startpoint', 'ihr_atlas_delay_alarms', 'ihr_atlas_location', ['startpoint_id'], ['id'], ondelete='CASCADE') + op.alter_column('ihr_atlas_location', 'id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True) + op.create_foreign_key('fk_delay_asn_id', 'ihr_delay', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') + op.create_foreign_key(None, 'ihr_delay_alarms', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') + op.alter_column('ihr_delay_alarms_msms', 'id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True) + op.alter_column('ihr_delay_alarms_msms', 'alarm_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False) + op.alter_column('ihr_disco_events', 'id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True) + op.alter_column('ihr_disco_probes', 'id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True) + op.alter_column('ihr_disco_probes', 'event_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False) + op.drop_constraint('ihr_disco_probes_event_id_64ec2998_fk_ihr_disco_events_id', 'ihr_disco_probes', type_='foreignkey') + op.create_foreign_key('fk_disco_probes_event_id', 'ihr_disco_probes', 'ihr_disco_events', ['event_id'], ['id'], ondelete='CASCADE') + op.create_foreign_key('fk_forwarding_asn_id', 'ihr_forwarding', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') + op.create_foreign_key('fk_forwardingalarms_asn_id', 'ihr_forwarding_alarms', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') + op.alter_column('ihr_forwarding_alarms_msms', 'id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True) + op.alter_column('ihr_forwarding_alarms_msms', 'alarm_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False) + op.create_foreign_key('fk_hegemony_originasn_id', 'ihr_hegemony', 'ihr_asn', ['originasn_id'], ['number'], ondelete='CASCADE') + op.create_foreign_key('fk_hegemony_asn_id', 'ihr_hegemony', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') + op.create_foreign_key('fk_hegemony_alarms_asn_id', 'ihr_hegemony_alarms', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') + op.create_foreign_key('fk_hegemony_alarms_originasn_id', 'ihr_hegemony_alarms', 'ihr_asn', ['originasn_id'], ['number'], ondelete='CASCADE') + op.create_foreign_key('fk_hegemony_country_asn_id', 'ihr_hegemony_country', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') + op.create_foreign_key('fk_hegemony_country_country_id', 'ihr_hegemony_country', 'ihr_country', ['country_id'], ['code'], ondelete='CASCADE') + op.create_foreign_key('fk_hegemony_prefix_originasn_id', 'ihr_hegemony_prefix', 'ihr_asn', ['originasn_id'], ['number'], ondelete='CASCADE') + op.create_foreign_key('fk_hegemony_prefix_asn_id', 'ihr_hegemony_prefix', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') + op.create_foreign_key('fk_hegemony_prefix_country_id', 'ihr_hegemony_prefix', 'ihr_country', ['country_id'], ['code'], ondelete='CASCADE') + op.create_foreign_key('fk_metis_atlas_deployment_asn_id', 'ihr_metis_atlas_deployment', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') + op.create_foreign_key('fk_metis_atlas_selection_asn_id', 'ihr_metis_atlas_selection', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') + op.alter_column('ihr_tr_hegemony', 'dependency_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False) + op.alter_column('ihr_tr_hegemony', 'origin_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False) + op.create_foreign_key('fk_tr_hegemony_dependency_id', 'ihr_tr_hegemony', 'ihr_tr_hegemony_identifier', ['dependency_id'], ['id'], ondelete='CASCADE') + op.create_foreign_key('fk_tr_hegemony_origin_id', 'ihr_tr_hegemony', 'ihr_tr_hegemony_identifier', ['origin_id'], ['id'], ondelete='CASCADE') + op.alter_column('ihr_tr_hegemony_identifier', 'id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True) + op.execute('CREATE INDEX IF NOT EXISTS ihr_delay_alarms_msms_alarm_id ON ihr_delay_alarms_msms (alarm_id);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_disco_probes_event_id ON ihr_disco_probes (event_id);') + op.execute('CREATE INDEX IF NOT EXISTS ihr_forwarding_alarms_msms_alarm_id ON ihr_forwarding_alarms_msms (alarm_id);') + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.execute('DROP INDEX IF EXISTS ihr_delay_alarms_msms_alarm_id;') + op.execute('DROP INDEX IF EXISTS ihr_disco_probes_event_id;') + op.execute('DROP INDEX IF EXISTS ihr_forwarding_alarms_msms_alarm_id;') + op.alter_column('ihr_tr_hegemony_identifier', 'id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True) + op.drop_constraint('fk_tr_hegemony_origin_id', 'ihr_tr_hegemony', type_='foreignkey') + op.drop_constraint('fk_tr_hegemony_dependency_id', 'ihr_tr_hegemony', type_='foreignkey') + op.alter_column('ihr_tr_hegemony', 'origin_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False) + op.alter_column('ihr_tr_hegemony', 'dependency_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False) + op.drop_constraint('fk_metis_atlas_selection_asn_id', 'ihr_metis_atlas_selection', type_='foreignkey') + op.drop_constraint('fk_metis_atlas_deployment_asn_id', 'ihr_metis_atlas_deployment', type_='foreignkey') + op.drop_constraint('fk_hegemony_prefix_country_id', 'ihr_hegemony_prefix', type_='foreignkey') + op.drop_constraint('fk_hegemony_prefix_asn_id', 'ihr_hegemony_prefix', type_='foreignkey') + op.drop_constraint('fk_hegemony_prefix_originasn_id', 'ihr_hegemony_prefix', type_='foreignkey') + op.drop_constraint('fk_hegemony_country_country_id', 'ihr_hegemony_country', type_='foreignkey') + op.drop_constraint('fk_hegemony_country_asn_id', 'ihr_hegemony_country', type_='foreignkey') + op.drop_constraint('fk_hegemony_alarms_originasn_id', 'ihr_hegemony_alarms', type_='foreignkey') + op.drop_constraint('fk_hegemony_alarms_asn_id', 'ihr_hegemony_alarms', type_='foreignkey') + op.drop_constraint('fk_hegemony_asn_id', 'ihr_hegemony', type_='foreignkey') + op.drop_constraint('fk_hegemony_originasn_id', 'ihr_hegemony', type_='foreignkey') + op.alter_column('ihr_forwarding_alarms_msms', 'alarm_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False) + op.alter_column('ihr_forwarding_alarms_msms', 'id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True) + op.drop_constraint('fk_forwardingalarms_asn_id', 'ihr_forwarding_alarms', type_='foreignkey') + op.drop_constraint('fk_forwarding_asn_id', 'ihr_forwarding', type_='foreignkey') + op.drop_constraint('fk_disco_probes_event_id', 'ihr_disco_probes', type_='foreignkey') + op.create_foreign_key('ihr_disco_probes_event_id_64ec2998_fk_ihr_disco_events_id', 'ihr_disco_probes', 'ihr_disco_events', ['event_id'], ['id'], initially='DEFERRED', deferrable=True) + op.alter_column('ihr_disco_probes', 'event_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False) + op.alter_column('ihr_disco_probes', 'id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True) + op.alter_column('ihr_disco_events', 'id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True) + op.alter_column('ihr_delay_alarms_msms', 'alarm_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False) + op.alter_column('ihr_delay_alarms_msms', 'id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True) + op.drop_constraint(None, 'ihr_delay_alarms', type_='foreignkey') + op.drop_constraint('fk_delay_asn_id', 'ihr_delay', type_='foreignkey') + op.alter_column('ihr_atlas_location', 'id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True) + op.drop_constraint('fk_atlas_delay_startpoint', 'ihr_atlas_delay_alarms', type_='foreignkey') + op.drop_constraint('fk_atlas_delay_endpoint', 'ihr_atlas_delay_alarms', type_='foreignkey') + op.alter_column('ihr_atlas_delay_alarms', 'endpoint_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False) + op.alter_column('ihr_atlas_delay_alarms', 'startpoint_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False) + op.drop_constraint('fk_atlas_delay_startpoint', 'ihr_atlas_delay', type_='foreignkey') + op.drop_constraint('fk_atlas_delay_endpoint', 'ihr_atlas_delay', type_='foreignkey') + op.alter_column('ihr_atlas_delay', 'endpoint_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False) + op.alter_column('ihr_atlas_delay', 'startpoint_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False) + # ### end Alembic commands ### diff --git a/alembic/versions/d16f00c0df13_initial_migration.py b/alembic/versions/d16f00c0df13_initial_migration.py deleted file mode 100644 index 5839974..0000000 --- a/alembic/versions/d16f00c0df13_initial_migration.py +++ /dev/null @@ -1,371 +0,0 @@ -"""initial migration - -Revision ID: d16f00c0df13 -Revises: -Create Date: 2025-06-19 16:37:25.431083 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = 'd16f00c0df13' -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - """Upgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('ihr_asn', - sa.Column('number', sa.BigInteger(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('tartiflette', sa.Boolean(), nullable=False), - sa.Column('disco', sa.Boolean(), nullable=False), - sa.Column('ashash', sa.Boolean(), nullable=False), - sa.PrimaryKeyConstraint('number') - ) - op.create_table('ihr_atlas_location', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('type', sa.String(length=4), nullable=False), - sa.Column('af', sa.Integer(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('ihr_country', - sa.Column('code', sa.String(length=4), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('tartiflette', sa.Boolean(), nullable=False), - sa.Column('disco', sa.Boolean(), nullable=False), - sa.PrimaryKeyConstraint('code') - ) - op.create_table('ihr_delay_alarms_msms', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('msmid', sa.BigInteger(), nullable=False), - sa.Column('probeid', sa.Integer(), nullable=False), - sa.Column('alarm_id', sa.BigInteger(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('ihr_disco_events', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('mongoid', sa.String(length=24), nullable=False), - sa.Column('streamtype', sa.String(length=10), nullable=False), - sa.Column('streamname', sa.String(length=128), nullable=False), - sa.Column('starttime', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('endtime', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('avglevel', sa.Float(), nullable=False), - sa.Column('nbdiscoprobes', sa.Integer(), nullable=False), - sa.Column('totalprobes', sa.Integer(), nullable=False), - sa.Column('ongoing', sa.Boolean(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('ihr_forwarding_alarms_msms', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('msmid', sa.BigInteger(), nullable=False), - sa.Column('probeid', sa.Integer(), nullable=False), - sa.Column('alarm_id', sa.BigInteger(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('ihr_tr_hegemony_identifier', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('type', sa.String(length=4), nullable=False), - sa.Column('af', sa.Integer(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('ihr_atlas_delay', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('median', sa.Float(), nullable=False), - sa.Column('nbtracks', sa.Integer(), nullable=False), - sa.Column('nbprobes', sa.Integer(), nullable=False), - sa.Column('entropy', sa.Float(), nullable=False), - sa.Column('hop', sa.Integer(), nullable=False), - sa.Column('nbrealrtts', sa.Integer(), nullable=False), - sa.Column('startpoint_id', sa.BigInteger(), nullable=False), - sa.Column('endpoint_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['endpoint_id'], ['ihr_atlas_location.id'], name='fk_atlas_delay_endpoint', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['startpoint_id'], ['ihr_atlas_location.id'], name='fk_atlas_delay_startpoint', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_atlas_delay', by_range('timebin', INTERVAL '2 day'));") - op.execute("ALTER TABLE ihr_atlas_delay SET (timescaledb.compress, timescaledb.compress_segmentby = 'startpoint_id,endpoint_id', timescaledb.compress_orderby = 'timebin');") - op.execute("SELECT add_compression_policy('ihr_atlas_delay', INTERVAL '7 days');") - op.create_table('ihr_atlas_delay_alarms', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('deviation', sa.Float(), nullable=False), - sa.Column('startpoint_id', sa.BigInteger(), nullable=False), - sa.Column('endpoint_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['endpoint_id'], ['ihr_atlas_location.id'], name='fk_atlas_delay_endpoint', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['startpoint_id'], ['ihr_atlas_location.id'], name='fk_atlas_delay_startpoint', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_atlas_delay_alarms', by_range('timebin', INTERVAL '2 day'));") - op.create_table('ihr_delay', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('magnitude', sa.Float(), nullable=False), - sa.Column('asn_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_delay_asn_id', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_delay', by_range('timebin', INTERVAL '2 day'));") - op.create_table('ihr_delay_alarms', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('ip', sa.String(length=64), nullable=False), - sa.Column('link', sa.String(length=128), nullable=False), - sa.Column('medianrtt', sa.Float(), nullable=False), - sa.Column('diffmedian', sa.Float(), nullable=False), - sa.Column('deviation', sa.Float(), nullable=False), - sa.Column('nbprobes', sa.Integer(), nullable=False), - sa.Column('msm_prb_ids', postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column('asn_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_delay_alarms', by_range('timebin', INTERVAL '2 day'));") - op.create_table('ihr_disco_probes', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('probe_id', sa.Integer(), nullable=False), - sa.Column('starttime', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('endtime', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('level', sa.Float(), nullable=False), - sa.Column('ipv4', sa.String(length=64), nullable=False), - sa.Column('prefixv4', sa.String(length=70), nullable=False), - sa.Column('lat', sa.Float(), nullable=False), - sa.Column('lon', sa.Float(), nullable=False), - sa.Column('event_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['event_id'], ['ihr_disco_events.id'], name='fk_disco_probes_event_id', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('ihr_forwarding', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('magnitude', sa.Float(), nullable=False), - sa.Column('asn_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_forwarding_asn_id', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_forwarding', by_range('timebin', INTERVAL '2 day'));") - op.create_table('ihr_forwarding_alarms', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('ip', sa.String(length=64), nullable=False), - sa.Column('correlation', sa.Float(), nullable=False), - sa.Column('responsibility', sa.Float(), nullable=False), - sa.Column('pktdiff', sa.Float(), nullable=False), - sa.Column('previoushop', sa.String(length=64), nullable=False), - sa.Column('msm_prb_ids', postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column('asn_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_forwardingalarms_asn_id', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_forwarding_alarms', by_range('timebin', INTERVAL '2 day'));") - op.create_table('ihr_hegemony', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('hege', sa.Float(), nullable=False), - sa.Column('af', sa.Integer(), nullable=False), - sa.Column('asn_id', sa.BigInteger(), nullable=False), - sa.Column('originasn_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_hegemony_asn_id', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['originasn_id'], ['ihr_asn.number'], name='fk_hegemony_originasn_id', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_hegemony', by_range('timebin', INTERVAL '2 day'));") - op.execute("ALTER TABLE ihr_hegemony SET (timescaledb.compress, timescaledb.compress_segmentby = 'af,originasn_id,asn_id', timescaledb.compress_orderby = 'timebin');") - op.execute("SELECT add_compression_policy('ihr_hegemony', INTERVAL '7 days');") - op.create_table('ihr_hegemony_alarms', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('deviation', sa.Float(), nullable=False), - sa.Column('af', sa.Integer(), nullable=False), - sa.Column('asn_id', sa.BigInteger(), nullable=False), - sa.Column('originasn_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_hegemony_alarms_asn_id', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['originasn_id'], ['ihr_asn.number'], name='fk_hegemony_alarms_originasn_id', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_hegemony_alarms', by_range('timebin', INTERVAL '2 day'));") - op.create_table('ihr_hegemony_country', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('hege', sa.Float(), nullable=False), - sa.Column('af', sa.Integer(), nullable=False), - sa.Column('weight', sa.Float(), nullable=False), - sa.Column('weightscheme', sa.String(length=16), nullable=False), - sa.Column('transitonly', sa.Boolean(), nullable=False), - sa.Column('asn_id', sa.BigInteger(), nullable=False), - sa.Column('country_id', sa.String(length=4), nullable=False), - sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_hegemony_country_asn_id', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['country_id'], ['ihr_country.code'], name='fk_hegemony_country_country_id', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_hegemony_country', by_range('timebin', INTERVAL '2 day'));") - op.create_table('ihr_hegemony_prefix', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('prefix', sa.String(length=64), nullable=False), - sa.Column('hege', sa.Float(), nullable=False), - sa.Column('af', sa.Integer(), nullable=False), - sa.Column('visibility', sa.Float(), nullable=False), - sa.Column('rpki_status', sa.String(length=32), nullable=False), - sa.Column('irr_status', sa.String(length=32), nullable=False), - sa.Column('delegated_prefix_status', sa.String(length=32), nullable=False), - sa.Column('delegated_asn_status', sa.String(length=32), nullable=False), - sa.Column('descr', sa.String(length=64), nullable=False), - sa.Column('moas', sa.Boolean(), nullable=False), - sa.Column('asn_id', sa.BigInteger(), nullable=False), - sa.Column('originasn_id', sa.BigInteger(), nullable=False), - sa.Column('country_id', sa.String(length=4), nullable=False), - sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_hegemony_prefix_asn_id', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['country_id'], ['ihr_country.code'], name='fk_hegemony_prefix_country_id', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['originasn_id'], ['ihr_asn.number'], name='fk_hegemony_prefix_originasn_id', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_hegemony_prefix', by_range('timebin', INTERVAL '2 day'));") - op.create_table('ihr_hegemonycone', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('conesize', sa.Integer(), nullable=False), - sa.Column('af', sa.Integer(), nullable=False), - sa.Column('asn_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_ihr_hegemonycone_asn_id', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_hegemonycone', by_range('timebin', INTERVAL '2 day'));") - op.execute("ALTER TABLE ihr_hegemonycone SET (timescaledb.compress, timescaledb.compress_segmentby = 'asn_id,af', timescaledb.compress_orderby = 'timebin');") - op.execute("SELECT add_compression_policy('ihr_hegemonycone', INTERVAL '7 days');") - op.create_table('ihr_metis_atlas_deployment', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('metric', sa.String(length=16), nullable=False), - sa.Column('rank', sa.Integer(), nullable=False), - sa.Column('af', sa.Integer(), nullable=False), - sa.Column('mean', sa.Float(), nullable=False), - sa.Column('nbsamples', sa.Integer(), nullable=False), - sa.Column('asn_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_metis_atlas_deployment_asn_id', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_metis_atlas_deployment', by_range('timebin', INTERVAL '7 day'));") - op.create_table('ihr_metis_atlas_selection', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('metric', sa.String(length=16), nullable=False), - sa.Column('rank', sa.Integer(), nullable=False), - sa.Column('af', sa.Integer(), nullable=False), - sa.Column('mean', sa.Float(), nullable=False), - sa.Column('asn_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['asn_id'], ['ihr_asn.number'], name='fk_metis_atlas_selection_asn_id', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_metis_atlas_selection', by_range('timebin', INTERVAL '7 day'));") - op.create_table('ihr_tr_hegemony', - sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), - sa.Column('timebin', postgresql.TIMESTAMP(timezone=True), nullable=False), - sa.Column('hege', sa.Float(), nullable=False), - sa.Column('af', sa.Integer(), nullable=False), - sa.Column('nbsamples', sa.Integer(), nullable=False), - sa.Column('dependency_id', sa.BigInteger(), nullable=False), - sa.Column('origin_id', sa.BigInteger(), nullable=False), - sa.ForeignKeyConstraint(['dependency_id'], ['ihr_tr_hegemony_identifier.id'], name='fk_tr_hegemony_dependency_id', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['origin_id'], ['ihr_tr_hegemony_identifier.id'], name='fk_tr_hegemony_origin_id', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'timebin') - ) - op.execute("SELECT create_hypertable('ihr_tr_hegemony', by_range('timebin', INTERVAL '2 day'));") - op.execute('CREATE INDEX IF NOT EXISTS ihr_atlas_delay_endpoint_id_timebin_idx ON ihr_atlas_delay (endpoint_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_atlas_delay_startpoint_id_endpoint_id_timebin_idx ON ihr_atlas_delay (startpoint_id, endpoint_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_atlas_delay_startpoint_id_timebin_idx ON ihr_atlas_delay (startpoint_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_atlas_delay_alarms_startpoint_id_timebin_idx ON ihr_atlas_delay_alarms (startpoint_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_atlas_delay_alarms_endpoint_id_timebin_idx ON ihr_atlas_delay_alarms (endpoint_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_delay_asn_id_timebin_idx ON ihr_delay (asn_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_delay_alarms_asn_id_timebin_idx ON ihr_delay_alarms (asn_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_disco_events_mongoid_3a488192 ON ihr_disco_events (mongoid);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_disco_events_streamtype_streamname_st_bda16df6_idx ON ihr_disco_events (streamtype, streamname, starttime, endtime);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_forwarding_asn_id_timebin_idx ON ihr_forwarding (asn_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_forwarding_alarms_asn_id_timebin_idx ON ihr_forwarding_alarms (asn_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_asn_id_timebin_idx ON ihr_hegemony (asn_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_originasn_id_timebin_idx ON ihr_hegemony (originasn_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_asn_id_originasn_id_timebin_idx ON ihr_hegemony (asn_id, originasn_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemonycone_asn_id_timebin_idx ON ihr_hegemonycone (asn_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_alarms_asn_id_timebin_idx ON ihr_hegemony_alarms (asn_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_alarms_originasn_id_timebin_idx ON ihr_hegemony_alarms (originasn_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_country_asn_id_timebin_idx ON ihr_hegemony_country (asn_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_country_country_id_timebin_idx ON ihr_hegemony_country (country_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_prefix_prefix_timebin_idx ON ihr_hegemony_prefix (prefix, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_prefix_asn_id_timebin_idx ON ihr_hegemony_prefix (asn_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_prefix_originasn_id_timebin_idx ON ihr_hegemony_prefix (originasn_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_hegemony_prefix_country_id_timebin_idx ON ihr_hegemony_prefix (country_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_metis_atlas_deployment_metric_rank_timebin_idx ON ihr_metis_atlas_deployment (metric, rank, timebin);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_metis_atlas_selection_metric_rank_timebin_idx ON ihr_metis_atlas_selection (metric, rank, timebin);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_tr_hegemony_dependency_id_timebin_idx ON ihr_tr_hegemony (dependency_id, timebin DESC);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_tr_hegemony_origin_id_timebin_idx ON ihr_tr_hegemony (origin_id, timebin DESC);') - # ### end Alembic commands ### - - -def downgrade() -> None: - """Downgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.execute('ALTER TABLE ihr_atlas_delay SET (timescaledb.compress = false);') - op.execute("SELECT remove_compression_policy('ihr_atlas_delay', if_exists => TRUE);") - op.execute('ALTER TABLE ihr_hegemony SET (timescaledb.compress = false);') - op.execute("SELECT remove_compression_policy('ihr_hegemony', if_exists => TRUE);") - op.execute('ALTER TABLE ihr_hegemonycone SET (timescaledb.compress = false);') - op.execute("SELECT remove_compression_policy('ihr_hegemonycone', if_exists => TRUE);") - op.execute('DROP INDEX IF EXISTS ihr_atlas_delay_endpoint_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_atlas_delay_startpoint_id_endpoint_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_atlas_delay_startpoint_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_atlas_delay_alarms_startpoint_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_atlas_delay_alarms_endpoint_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_delay_asn_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_delay_alarms_asn_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_disco_events_mongoid_3a488192;') - op.execute('DROP INDEX IF EXISTS ihr_disco_events_streamtype_streamname_st_bda16df6_idx;') - op.execute('DROP INDEX IF EXISTS ihr_forwarding_asn_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_forwarding_alarms_asn_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_hegemony_asn_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_hegemony_originasn_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_hegemony_asn_id_originasn_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_hegemonycone_asn_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_hegemony_alarms_asn_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_hegemony_alarms_originasn_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_hegemony_country_asn_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_hegemony_country_country_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_hegemony_prefix_prefix_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_hegemony_prefix_asn_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_hegemony_prefix_originasn_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_hegemony_prefix_country_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_metis_atlas_deployment_metric_rank_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_metis_atlas_selection_metric_rank_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_tr_hegemony_dependency_id_timebin_idx;') - op.execute('DROP INDEX IF EXISTS ihr_tr_hegemony_origin_id_timebin_idx;') - op.drop_table('ihr_tr_hegemony') - op.drop_table('ihr_metis_atlas_selection') - op.drop_table('ihr_metis_atlas_deployment') - op.drop_table('ihr_hegemonycone') - op.drop_table('ihr_hegemony_prefix') - op.drop_table('ihr_hegemony_country') - op.drop_table('ihr_hegemony_alarms') - op.drop_table('ihr_hegemony') - op.drop_table('ihr_forwarding_alarms') - op.drop_table('ihr_forwarding') - op.drop_table('ihr_disco_probes') - op.drop_table('ihr_delay_alarms') - op.drop_table('ihr_delay') - op.drop_table('ihr_atlas_delay_alarms') - op.drop_table('ihr_atlas_delay') - op.drop_table('ihr_tr_hegemony_identifier') - op.drop_table('ihr_forwarding_alarms_msms') - op.drop_table('ihr_disco_events') - op.drop_table('ihr_delay_alarms_msms') - op.drop_table('ihr_country') - op.drop_table('ihr_atlas_location') - op.drop_table('ihr_asn') - # ### end Alembic commands ### diff --git a/models/atlas_location.py b/models/atlas_location.py index 88723f3..7452f43 100644 --- a/models/atlas_location.py +++ b/models/atlas_location.py @@ -1,9 +1,14 @@ -from sqlalchemy import Column, Integer, String,BigInteger +from sqlalchemy import Column, Integer, String, BigInteger from config.database import Base -class AtlasLocation(Base): - __tablename__ = 'ihr_atlas_location' +class AtlasLocation(Base): + __tablename__ = 'ihr_atlas_location' + __indexes__ = [ + { + 'name': 'ihr_atlas_location_af_name_type_idx', + 'columns': ['af', 'name','type'], + },] id = Column(BigInteger, primary_key=True, autoincrement=True) name = Column( String(255), diff --git a/models/delay_alarms_msms.py b/models/delay_alarms_msms.py index cdbbd72..cf640e6 100644 --- a/models/delay_alarms_msms.py +++ b/models/delay_alarms_msms.py @@ -6,6 +6,11 @@ class DelayAlarmsMsms(Base): __tablename__ = 'ihr_delay_alarms_msms' + __indexes__=[{ + 'name':'ihr_delay_alarms_msms_alarm_id', + 'columns':['alarm_id'] + }] + id = Column(BigInteger, primary_key=True, autoincrement=True) msmid = Column(BigInteger, default=0, nullable=False) diff --git a/models/disco_probes.py b/models/disco_probes.py index 78620ac..de7c576 100644 --- a/models/disco_probes.py +++ b/models/disco_probes.py @@ -1,4 +1,4 @@ -from sqlalchemy import Column, BigInteger, Float, ForeignKey, String,Integer +from sqlalchemy import Column, BigInteger, Float, ForeignKey, String, Integer from sqlalchemy.dialects.postgresql import TIMESTAMP from sqlalchemy.orm import relationship from config.database import Base @@ -7,6 +7,11 @@ class DiscoProbes(Base): __tablename__ = 'ihr_disco_probes' + __indexes__ = [{ + 'name': 'ihr_disco_probes_event_id', + 'columns': ['event_id'] + }] + id = Column(BigInteger, primary_key=True, autoincrement=True) probe_id = Column(Integer, nullable=False, diff --git a/models/forwarding_alarms_msms.py b/models/forwarding_alarms_msms.py index a23da6c..52bbb3b 100644 --- a/models/forwarding_alarms_msms.py +++ b/models/forwarding_alarms_msms.py @@ -6,6 +6,11 @@ class ForwardingAlarmsMsms(Base): __tablename__ = 'ihr_forwarding_alarms_msms' + __indexes__ = [{ + 'name': 'ihr_forwarding_alarms_msms_alarm_id', + 'columns': ['alarm_id'] + }] + id = Column(BigInteger, primary_key=True, autoincrement=True) msmid = Column(BigInteger, default=0, nullable=False) From 975d2ee2f9d4c04fbcce8c7a86c36b97266a8e1c Mon Sep 17 00:00:00 2001 From: ibraam Date: Mon, 23 Jun 2025 11:22:09 +0300 Subject: [PATCH 15/43] Untracked migration files --- .../versions/852def42fccc_second_migration.py | 214 ------------------ 1 file changed, 214 deletions(-) delete mode 100644 alembic/versions/852def42fccc_second_migration.py diff --git a/alembic/versions/852def42fccc_second_migration.py b/alembic/versions/852def42fccc_second_migration.py deleted file mode 100644 index 39b3011..0000000 --- a/alembic/versions/852def42fccc_second_migration.py +++ /dev/null @@ -1,214 +0,0 @@ -"""second migration - -Revision ID: 852def42fccc -Revises: -Create Date: 2025-06-19 19:32:34.889779 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = '852def42fccc' -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - """Upgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('ihr_atlas_delay', 'startpoint_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('ihr_atlas_delay', 'endpoint_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.create_foreign_key('fk_atlas_delay_endpoint', 'ihr_atlas_delay', 'ihr_atlas_location', ['endpoint_id'], ['id'], ondelete='CASCADE') - op.create_foreign_key('fk_atlas_delay_startpoint', 'ihr_atlas_delay', 'ihr_atlas_location', ['startpoint_id'], ['id'], ondelete='CASCADE') - op.alter_column('ihr_atlas_delay_alarms', 'startpoint_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('ihr_atlas_delay_alarms', 'endpoint_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.create_foreign_key('fk_atlas_delay_endpoint', 'ihr_atlas_delay_alarms', 'ihr_atlas_location', ['endpoint_id'], ['id'], ondelete='CASCADE') - op.create_foreign_key('fk_atlas_delay_startpoint', 'ihr_atlas_delay_alarms', 'ihr_atlas_location', ['startpoint_id'], ['id'], ondelete='CASCADE') - op.alter_column('ihr_atlas_location', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True) - op.create_foreign_key('fk_delay_asn_id', 'ihr_delay', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') - op.create_foreign_key(None, 'ihr_delay_alarms', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') - op.alter_column('ihr_delay_alarms_msms', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True) - op.alter_column('ihr_delay_alarms_msms', 'alarm_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('ihr_disco_events', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True) - op.alter_column('ihr_disco_probes', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True) - op.alter_column('ihr_disco_probes', 'event_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.drop_constraint('ihr_disco_probes_event_id_64ec2998_fk_ihr_disco_events_id', 'ihr_disco_probes', type_='foreignkey') - op.create_foreign_key('fk_disco_probes_event_id', 'ihr_disco_probes', 'ihr_disco_events', ['event_id'], ['id'], ondelete='CASCADE') - op.create_foreign_key('fk_forwarding_asn_id', 'ihr_forwarding', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') - op.create_foreign_key('fk_forwardingalarms_asn_id', 'ihr_forwarding_alarms', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') - op.alter_column('ihr_forwarding_alarms_msms', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True) - op.alter_column('ihr_forwarding_alarms_msms', 'alarm_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.create_foreign_key('fk_hegemony_originasn_id', 'ihr_hegemony', 'ihr_asn', ['originasn_id'], ['number'], ondelete='CASCADE') - op.create_foreign_key('fk_hegemony_asn_id', 'ihr_hegemony', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') - op.create_foreign_key('fk_hegemony_alarms_asn_id', 'ihr_hegemony_alarms', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') - op.create_foreign_key('fk_hegemony_alarms_originasn_id', 'ihr_hegemony_alarms', 'ihr_asn', ['originasn_id'], ['number'], ondelete='CASCADE') - op.create_foreign_key('fk_hegemony_country_asn_id', 'ihr_hegemony_country', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') - op.create_foreign_key('fk_hegemony_country_country_id', 'ihr_hegemony_country', 'ihr_country', ['country_id'], ['code'], ondelete='CASCADE') - op.create_foreign_key('fk_hegemony_prefix_originasn_id', 'ihr_hegemony_prefix', 'ihr_asn', ['originasn_id'], ['number'], ondelete='CASCADE') - op.create_foreign_key('fk_hegemony_prefix_asn_id', 'ihr_hegemony_prefix', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') - op.create_foreign_key('fk_hegemony_prefix_country_id', 'ihr_hegemony_prefix', 'ihr_country', ['country_id'], ['code'], ondelete='CASCADE') - op.create_foreign_key('fk_metis_atlas_deployment_asn_id', 'ihr_metis_atlas_deployment', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') - op.create_foreign_key('fk_metis_atlas_selection_asn_id', 'ihr_metis_atlas_selection', 'ihr_asn', ['asn_id'], ['number'], ondelete='CASCADE') - op.alter_column('ihr_tr_hegemony', 'dependency_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('ihr_tr_hegemony', 'origin_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.create_foreign_key('fk_tr_hegemony_dependency_id', 'ihr_tr_hegemony', 'ihr_tr_hegemony_identifier', ['dependency_id'], ['id'], ondelete='CASCADE') - op.create_foreign_key('fk_tr_hegemony_origin_id', 'ihr_tr_hegemony', 'ihr_tr_hegemony_identifier', ['origin_id'], ['id'], ondelete='CASCADE') - op.alter_column('ihr_tr_hegemony_identifier', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True) - op.execute('CREATE INDEX IF NOT EXISTS ihr_delay_alarms_msms_alarm_id ON ihr_delay_alarms_msms (alarm_id);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_disco_probes_event_id ON ihr_disco_probes (event_id);') - op.execute('CREATE INDEX IF NOT EXISTS ihr_forwarding_alarms_msms_alarm_id ON ihr_forwarding_alarms_msms (alarm_id);') - # ### end Alembic commands ### - - -def downgrade() -> None: - """Downgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.execute('DROP INDEX IF EXISTS ihr_delay_alarms_msms_alarm_id;') - op.execute('DROP INDEX IF EXISTS ihr_disco_probes_event_id;') - op.execute('DROP INDEX IF EXISTS ihr_forwarding_alarms_msms_alarm_id;') - op.alter_column('ihr_tr_hegemony_identifier', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True) - op.drop_constraint('fk_tr_hegemony_origin_id', 'ihr_tr_hegemony', type_='foreignkey') - op.drop_constraint('fk_tr_hegemony_dependency_id', 'ihr_tr_hegemony', type_='foreignkey') - op.alter_column('ihr_tr_hegemony', 'origin_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('ihr_tr_hegemony', 'dependency_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.drop_constraint('fk_metis_atlas_selection_asn_id', 'ihr_metis_atlas_selection', type_='foreignkey') - op.drop_constraint('fk_metis_atlas_deployment_asn_id', 'ihr_metis_atlas_deployment', type_='foreignkey') - op.drop_constraint('fk_hegemony_prefix_country_id', 'ihr_hegemony_prefix', type_='foreignkey') - op.drop_constraint('fk_hegemony_prefix_asn_id', 'ihr_hegemony_prefix', type_='foreignkey') - op.drop_constraint('fk_hegemony_prefix_originasn_id', 'ihr_hegemony_prefix', type_='foreignkey') - op.drop_constraint('fk_hegemony_country_country_id', 'ihr_hegemony_country', type_='foreignkey') - op.drop_constraint('fk_hegemony_country_asn_id', 'ihr_hegemony_country', type_='foreignkey') - op.drop_constraint('fk_hegemony_alarms_originasn_id', 'ihr_hegemony_alarms', type_='foreignkey') - op.drop_constraint('fk_hegemony_alarms_asn_id', 'ihr_hegemony_alarms', type_='foreignkey') - op.drop_constraint('fk_hegemony_asn_id', 'ihr_hegemony', type_='foreignkey') - op.drop_constraint('fk_hegemony_originasn_id', 'ihr_hegemony', type_='foreignkey') - op.alter_column('ihr_forwarding_alarms_msms', 'alarm_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('ihr_forwarding_alarms_msms', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True) - op.drop_constraint('fk_forwardingalarms_asn_id', 'ihr_forwarding_alarms', type_='foreignkey') - op.drop_constraint('fk_forwarding_asn_id', 'ihr_forwarding', type_='foreignkey') - op.drop_constraint('fk_disco_probes_event_id', 'ihr_disco_probes', type_='foreignkey') - op.create_foreign_key('ihr_disco_probes_event_id_64ec2998_fk_ihr_disco_events_id', 'ihr_disco_probes', 'ihr_disco_events', ['event_id'], ['id'], initially='DEFERRED', deferrable=True) - op.alter_column('ihr_disco_probes', 'event_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('ihr_disco_probes', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True) - op.alter_column('ihr_disco_events', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True) - op.alter_column('ihr_delay_alarms_msms', 'alarm_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('ihr_delay_alarms_msms', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True) - op.drop_constraint(None, 'ihr_delay_alarms', type_='foreignkey') - op.drop_constraint('fk_delay_asn_id', 'ihr_delay', type_='foreignkey') - op.alter_column('ihr_atlas_location', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True) - op.drop_constraint('fk_atlas_delay_startpoint', 'ihr_atlas_delay_alarms', type_='foreignkey') - op.drop_constraint('fk_atlas_delay_endpoint', 'ihr_atlas_delay_alarms', type_='foreignkey') - op.alter_column('ihr_atlas_delay_alarms', 'endpoint_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('ihr_atlas_delay_alarms', 'startpoint_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.drop_constraint('fk_atlas_delay_startpoint', 'ihr_atlas_delay', type_='foreignkey') - op.drop_constraint('fk_atlas_delay_endpoint', 'ihr_atlas_delay', type_='foreignkey') - op.alter_column('ihr_atlas_delay', 'endpoint_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('ihr_atlas_delay', 'startpoint_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - # ### end Alembic commands ### From 6ad10dd7ede3c1a641f926b3637419ec4de78a2b Mon Sep 17 00:00:00 2001 From: ibraam Date: Mon, 23 Jun 2025 14:36:33 +0300 Subject: [PATCH 16/43] Renamed to hegemony_cone.py and added alembic/versions to gitignore --- .gitignore | 4 +++- models/{hegemonycone.py => hegemony_cone.py} | 0 repositories/hegemony_cone_repository.py | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) rename models/{hegemonycone.py => hegemony_cone.py} (100%) diff --git a/.gitignore b/.gitignore index 7b004e5..92a6143 100644 --- a/.gitignore +++ b/.gitignore @@ -191,4 +191,6 @@ cython_debug/ # exclude from AI features like autocomplete and code analysis. Recommended for sensitive data # refer to https://docs.cursor.com/context/ignore-files .cursorignore -.cursorindexingignore \ No newline at end of file +.cursorindexingignore + +alembic/versions/ \ No newline at end of file diff --git a/models/hegemonycone.py b/models/hegemony_cone.py similarity index 100% rename from models/hegemonycone.py rename to models/hegemony_cone.py diff --git a/repositories/hegemony_cone_repository.py b/repositories/hegemony_cone_repository.py index 5c6fd4a..8e2b067 100644 --- a/repositories/hegemony_cone_repository.py +++ b/repositories/hegemony_cone_repository.py @@ -1,6 +1,6 @@ from datetime import datetime, timedelta from sqlalchemy.orm import Session -from models.hegemonycone import HegemonyCone +from models.hegemony_cone import HegemonyCone from typing import Optional, List, Tuple from globals import page_size From 7e9266741504583e6b861ce5075684c7910af552 Mon Sep 17 00:00:00 2001 From: ibraam Date: Wed, 25 Jun 2025 12:58:21 +0300 Subject: [PATCH 17/43] Returned some id(primary key) of tables back from BigInteger to Integer --- models/atlas_delay.py | 4 ++-- models/atlas_delay_alarms.py | 6 +++--- models/atlas_location.py | 2 +- models/delay_alarms_msms.py | 4 ++-- models/disco_events.py | 2 +- models/disco_probes.py | 4 ++-- models/forwarding_alarms_msms.py | 4 ++-- models/tr_hegemony.py | 4 ++-- models/tr_hegemony_identifier.py | 2 +- 9 files changed, 16 insertions(+), 16 deletions(-) diff --git a/models/atlas_delay.py b/models/atlas_delay.py index b534958..798aaec 100644 --- a/models/atlas_delay.py +++ b/models/atlas_delay.py @@ -61,13 +61,13 @@ class AtlasDelay(Base): nbrealrtts = Column(Integer, default=0, nullable=False, doc='Number of RTT samples directly obtained from traceroutes (as opposed to differential RTTs).') - startpoint_id = Column(BigInteger, + startpoint_id = Column(Integer, ForeignKey( 'ihr_atlas_location.id', ondelete='CASCADE', name='fk_atlas_delay_startpoint'), nullable=False, doc='Starting location for the delay estimation.') - endpoint_id = Column(BigInteger, + endpoint_id = Column(Integer, ForeignKey( 'ihr_atlas_location.id', ondelete='CASCADE', name='fk_atlas_delay_endpoint'), nullable=False, diff --git a/models/atlas_delay_alarms.py b/models/atlas_delay_alarms.py index b65b2ee..d0396ff 100644 --- a/models/atlas_delay_alarms.py +++ b/models/atlas_delay_alarms.py @@ -1,4 +1,4 @@ -from sqlalchemy import Column, BigInteger, Float, ForeignKey, PrimaryKeyConstraint +from sqlalchemy import Column, BigInteger, Float, ForeignKey, PrimaryKeyConstraint,Integer from sqlalchemy.dialects.postgresql import TIMESTAMP from sqlalchemy.orm import relationship from config.database import Base @@ -35,12 +35,12 @@ class AtlasDelayAlarms(Base): deviation = Column(Float, default=0.0, nullable=False, doc='Significance of the AS Hegemony change.') - startpoint_id = Column(BigInteger, + startpoint_id = Column(Integer, ForeignKey('ihr_atlas_location.id', ondelete='CASCADE', name='fk_atlas_delay_startpoint'), nullable=False, doc='Starting location reported as anomalous.') - endpoint_id = Column(BigInteger, + endpoint_id = Column(Integer, ForeignKey('ihr_atlas_location.id', ondelete='CASCADE', name='fk_atlas_delay_endpoint'), nullable=False, doc='Ending location reported as anomalous.') diff --git a/models/atlas_location.py b/models/atlas_location.py index 7452f43..a2e54f1 100644 --- a/models/atlas_location.py +++ b/models/atlas_location.py @@ -9,7 +9,7 @@ class AtlasLocation(Base): 'name': 'ihr_atlas_location_af_name_type_idx', 'columns': ['af', 'name','type'], },] - id = Column(BigInteger, primary_key=True, autoincrement=True) + id = Column(Integer, primary_key=True, autoincrement=True) name = Column( String(255), nullable=False, diff --git a/models/delay_alarms_msms.py b/models/delay_alarms_msms.py index cf640e6..4c9a1b7 100644 --- a/models/delay_alarms_msms.py +++ b/models/delay_alarms_msms.py @@ -11,10 +11,10 @@ class DelayAlarmsMsms(Base): 'columns':['alarm_id'] }] - id = Column(BigInteger, primary_key=True, autoincrement=True) + id = Column(Integer, primary_key=True, autoincrement=True) msmid = Column(BigInteger, default=0, nullable=False) probeid = Column(Integer, default=0, nullable=False) - alarm_id = Column(BigInteger, nullable=False) + alarm_id = Column(Integer, nullable=False) diff --git a/models/disco_events.py b/models/disco_events.py index 19833da..656872b 100644 --- a/models/disco_events.py +++ b/models/disco_events.py @@ -17,7 +17,7 @@ class DiscoEvents(Base): }] - id = Column(BigInteger, primary_key=True, autoincrement=True) + id = Column(Integer, primary_key=True, autoincrement=True) mongoid = Column( String(24), nullable=False, diff --git a/models/disco_probes.py b/models/disco_probes.py index de7c576..59a4a1d 100644 --- a/models/disco_probes.py +++ b/models/disco_probes.py @@ -12,7 +12,7 @@ class DiscoProbes(Base): 'columns': ['event_id'] }] - id = Column(BigInteger, primary_key=True, autoincrement=True) + id = Column(Integer, primary_key=True, autoincrement=True) probe_id = Column(Integer, nullable=False, doc='Atlas probe ID of disconnected probe.') @@ -38,7 +38,7 @@ class DiscoProbes(Base): lon = Column(Float, default=0.0, nullable=False, doc='Longitude of the probe during the network detection as reported by RIPE Atlas.') - event_id = Column(BigInteger, + event_id = Column(Integer, ForeignKey('ihr_disco_events.id', ondelete='CASCADE', name='fk_disco_probes_event_id'), nullable=False, diff --git a/models/forwarding_alarms_msms.py b/models/forwarding_alarms_msms.py index 52bbb3b..5681b42 100644 --- a/models/forwarding_alarms_msms.py +++ b/models/forwarding_alarms_msms.py @@ -11,10 +11,10 @@ class ForwardingAlarmsMsms(Base): 'columns': ['alarm_id'] }] - id = Column(BigInteger, primary_key=True, autoincrement=True) + id = Column(Integer, primary_key=True, autoincrement=True) msmid = Column(BigInteger, default=0, nullable=False) probeid = Column(Integer, default=0, nullable=False) - alarm_id = Column(BigInteger, nullable=False) + alarm_id = Column(Integer, nullable=False) diff --git a/models/tr_hegemony.py b/models/tr_hegemony.py index cc7c571..f32be6a 100644 --- a/models/tr_hegemony.py +++ b/models/tr_hegemony.py @@ -41,13 +41,13 @@ class TRHegemony(Base): nbsamples = Column(Integer, default=0, nullable=False, doc='The number of probe ASes for which we have traceroutes to the origin in the time interval. We only include AS Hegemony values that are based on traceroutes from at least ten probe ASes.') - dependency_id = Column(BigInteger, + dependency_id = Column(Integer, ForeignKey('ihr_tr_hegemony_identifier.id', ondelete='CASCADE', name='fk_tr_hegemony_dependency_id'), nullable=False, doc='Dependency. Transit network or IXP commonly seen in traceroutes towards the origin.') - origin_id = Column(BigInteger, + origin_id = Column(Integer, ForeignKey('ihr_tr_hegemony_identifier.id', ondelete='CASCADE', name='fk_tr_hegemony_origin_id'), nullable=False, diff --git a/models/tr_hegemony_identifier.py b/models/tr_hegemony_identifier.py index 7d0a876..e48b9a8 100644 --- a/models/tr_hegemony_identifier.py +++ b/models/tr_hegemony_identifier.py @@ -5,7 +5,7 @@ class TRHegemonyIdentifier(Base): __tablename__ = 'ihr_tr_hegemony_identifier' - id = Column(BigInteger, autoincrement=True, primary_key=True) + id = Column(Integer, autoincrement=True, primary_key=True) name = Column(String(255), nullable=False, doc='Value of the identifier. The meaning depends on the identifier type:
  • type=AS: ASN
  • type=IX: PeeringDB IX ID
  • type=MB: IXP member (format: ix_id;asn)
  • type=IP: Interface IP of an IXP member
') From 1a4b713d8e21faeb67cc7ef26092c2e2a147e38c Mon Sep 17 00:00:00 2001 From: ibraam Date: Wed, 25 Jun 2025 13:17:14 +0300 Subject: [PATCH 18/43] Removed foreign key constraints from some models --- models/atlas_delay.py | 11 +---------- models/atlas_delay_alarms.py | 5 ----- models/delay.py | 6 +----- models/delay_alarms.py | 3 --- models/forwarding.py | 3 --- models/forwarding_alarms.py | 4 +--- models/hegemony.py | 6 +----- models/hegemony_alarms.py | 7 ------- models/hegemony_country.py | 5 +---- models/hegemony_prefix.py | 7 ------- models/metis_atlas_deployment.py | 3 +-- models/metis_atlas_selection.py | 2 -- models/tr_hegemony.py | 7 +------ 13 files changed, 7 insertions(+), 62 deletions(-) diff --git a/models/atlas_delay.py b/models/atlas_delay.py index 798aaec..bc189ec 100644 --- a/models/atlas_delay.py +++ b/models/atlas_delay.py @@ -62,18 +62,9 @@ class AtlasDelay(Base): doc='Number of RTT samples directly obtained from traceroutes (as opposed to differential RTTs).') startpoint_id = Column(Integer, - ForeignKey( - 'ihr_atlas_location.id', ondelete='CASCADE', name='fk_atlas_delay_startpoint'), nullable=False, doc='Starting location for the delay estimation.') endpoint_id = Column(Integer, - ForeignKey( - 'ihr_atlas_location.id', ondelete='CASCADE', name='fk_atlas_delay_endpoint'), nullable=False, - doc='Ending location for the delay estimation.') - - startpoint = relationship('AtlasLocation', foreign_keys=[ - startpoint_id], backref='location_startpoint') - endpoint = relationship('AtlasLocation', foreign_keys=[ - endpoint_id], backref='location_endpoint') + doc='Ending location for the delay estimation.') \ No newline at end of file diff --git a/models/atlas_delay_alarms.py b/models/atlas_delay_alarms.py index d0396ff..673beb3 100644 --- a/models/atlas_delay_alarms.py +++ b/models/atlas_delay_alarms.py @@ -36,14 +36,9 @@ class AtlasDelayAlarms(Base): doc='Significance of the AS Hegemony change.') startpoint_id = Column(Integer, - ForeignKey('ihr_atlas_location.id', ondelete='CASCADE', name='fk_atlas_delay_startpoint'), nullable=False, doc='Starting location reported as anomalous.') endpoint_id = Column(Integer, - ForeignKey('ihr_atlas_location.id', ondelete='CASCADE', name='fk_atlas_delay_endpoint'), nullable=False, doc='Ending location reported as anomalous.') - - startpoint = relationship('AtlasLocation', foreign_keys=[startpoint_id], backref='anomalous_startpoint') - endpoint = relationship('AtlasLocation', foreign_keys=[endpoint_id], backref='anomalous_endpoint') diff --git a/models/delay.py b/models/delay.py index 55f9443..f82c619 100644 --- a/models/delay.py +++ b/models/delay.py @@ -31,9 +31,5 @@ class Delay(Base): magnitude = Column(Float, default=0.0, nullable=False, doc='Cumulated link delay deviation. Values close to zero represent usual delays for the network, whereas higher values stand for significant links congestion in the monitored network.') - asn_id = Column(BigInteger, - ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_delay_asn_id'), - nullable=False, + asn_id = Column(BigInteger, nullable=False, doc='ASN or IXP ID of the monitored network (see number in /network/).') - - asn = relationship('ASN', foreign_keys=[asn_id]) diff --git a/models/delay_alarms.py b/models/delay_alarms.py index 8151d88..8b6e0a8 100644 --- a/models/delay_alarms.py +++ b/models/delay_alarms.py @@ -74,10 +74,7 @@ class DelayAlarms(Base): ) asn_id = Column( BigInteger, - ForeignKey('ihr_asn.number', ondelete='CASCADE'), nullable=False, doc='ASN or IXPID of the reported network.' ) - asn_relation = relationship('ASN', backref='delay_alarms') - diff --git a/models/forwarding.py b/models/forwarding.py index 5aa6ef6..ff8a85c 100644 --- a/models/forwarding.py +++ b/models/forwarding.py @@ -32,8 +32,5 @@ class Forwarding(Base): doc='Cumulated link delay deviation. Values close to zero represent usual delays for the network, whereas higher values stand for significant links congestion in the monitored network.') asn_id = Column(BigInteger, - ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_forwarding_asn_id'), nullable=False, doc='ASN or IXP ID of the monitored network (see number in /network/).') - - asn = relationship('ASN', foreign_keys=[asn_id]) diff --git a/models/forwarding_alarms.py b/models/forwarding_alarms.py index 5532339..8adec29 100644 --- a/models/forwarding_alarms.py +++ b/models/forwarding_alarms.py @@ -41,7 +41,5 @@ class ForwardingAlarms(Base): msm_prb_ids = Column(JSONB, nullable=True, default=None, doc='List of Atlas measurement and probe IDs used to compute this alarm.') - asn_id = Column(BigInteger, ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_forwardingalarms_asn_id'), + asn_id = Column(BigInteger, nullable=False, doc='ASN or IXPID of the reported network.') - - asn_relation = relationship('ASN', back_populates='forwarding_alarms') diff --git a/models/hegemony.py b/models/hegemony.py index a1d9cd3..db3f22b 100644 --- a/models/hegemony.py +++ b/models/hegemony.py @@ -48,16 +48,12 @@ class Hegemony(Base): doc='Address Family (IP version), values are either 4 or 6.') asn_id = Column(BigInteger, - ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_hegemony_asn_id'), nullable=False, doc='Dependency. Transit network commonly seen in BGP paths towards originasn.') originasn_id = Column(BigInteger, - ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_hegemony_originasn_id'), nullable=False, doc='Dependent network, it can be any public ASN. Retrieve all dependencies of a network by setting only this parameter and a timebin.') - asn = relationship('ASN', foreign_keys=[asn_id]) - originasn = relationship('ASN', foreign_keys=[originasn_id]) - + diff --git a/models/hegemony_alarms.py b/models/hegemony_alarms.py index 59ae8c2..e2261d5 100644 --- a/models/hegemony_alarms.py +++ b/models/hegemony_alarms.py @@ -39,16 +39,9 @@ class HegemonyAlarms(Base): doc='Address Family (IP version), values are either 4 or 6.') asn_id = Column(BigInteger, - ForeignKey('ihr_asn.number', ondelete='CASCADE', - name='fk_hegemony_alarms_asn_id'), nullable=False, doc='ASN of the anomalous dependency (transit network).') originasn_id = Column(BigInteger, - ForeignKey('ihr_asn.number', ondelete='CASCADE', - name='fk_hegemony_alarms_originasn_id'), nullable=False, doc='ASN of the reported dependent network.') - - asn = relationship('ASN', foreign_keys=[asn_id]) - originasn = relationship('ASN', foreign_keys=[originasn_id]) diff --git a/models/hegemony_country.py b/models/hegemony_country.py index 8cde03a..38c5c96 100644 --- a/models/hegemony_country.py +++ b/models/hegemony_country.py @@ -48,14 +48,11 @@ class HegemonyCountry(Base): doc='If True, then origin ASNs of BGP path are ignored (focus only on transit networks).') asn_id = Column(BigInteger, - ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_hegemony_country_asn_id'), nullable=False, doc='Dependency. Network commonly seen in BGP paths towards monitored country.') country_id = Column(String(4), - ForeignKey('ihr_country.code', ondelete='CASCADE', name='fk_hegemony_country_country_id'), nullable=False, doc='Monitored country. Retrieve all dependencies of a country by setting only this parameter and a timebin.') - asn = relationship('ASN') - country = relationship('Country') + \ No newline at end of file diff --git a/models/hegemony_prefix.py b/models/hegemony_prefix.py index 1056a17..62df125 100644 --- a/models/hegemony_prefix.py +++ b/models/hegemony_prefix.py @@ -71,19 +71,12 @@ class HegemonyPrefix(Base): doc='True if the prefix is originated by multiple ASNs.') asn_id = Column(BigInteger, - ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_hegemony_prefix_asn_id'), nullable=False, doc='Dependency. Network commonly seen in BGP paths towards monitored prefix.') originasn_id = Column(BigInteger, - ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_hegemony_prefix_originasn_id'), nullable=False, doc='Network seen as originating the monitored prefix.') country_id = Column(String(4), - ForeignKey('ihr_country.code', ondelete='CASCADE', name='fk_hegemony_prefix_country_id'), nullable=False, doc="Country for the monitored prefix identified by Maxmind's Geolite2 geolocation database.") - - asn = relationship('ASN', foreign_keys=[asn_id], back_populates='prefix_asn') - originasn = relationship('ASN', foreign_keys=[originasn_id], back_populates='prefix_originasn') - country = relationship('Country') diff --git a/models/metis_atlas_deployment.py b/models/metis_atlas_deployment.py index e6f3f0b..fe56049 100644 --- a/models/metis_atlas_deployment.py +++ b/models/metis_atlas_deployment.py @@ -44,8 +44,7 @@ class MetisAtlasDeployment(Base): doc='The number of probe ASes for which we have traceroutes to this AS in the time interval. We currently only include candidates that were reached by at least 50% of probe ASes, hence these values are always large.') asn_id = Column(BigInteger, - ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_metis_atlas_deployment_asn_id'), nullable=False, doc="Atlas probes' Autonomous System Number.") - asn = relationship('ASN') + \ No newline at end of file diff --git a/models/metis_atlas_selection.py b/models/metis_atlas_selection.py index 180c2a0..5ed105f 100644 --- a/models/metis_atlas_selection.py +++ b/models/metis_atlas_selection.py @@ -41,8 +41,6 @@ class MetisAtlasSelection(Base): doc='The mean distance value (e.g., AS-path length) we get when using all ASes up to this rank. This decreases with increasing rank, since lower ranks represent closer ASes.') asn_id = Column(BigInteger, - ForeignKey('ihr_asn.number', ondelete='CASCADE', name='fk_metis_atlas_selection_asn_id'), nullable=False, doc="Atlas probes' Autonomous System Number.") - asn = relationship('ASN') diff --git a/models/tr_hegemony.py b/models/tr_hegemony.py index f32be6a..6be8e6c 100644 --- a/models/tr_hegemony.py +++ b/models/tr_hegemony.py @@ -42,16 +42,11 @@ class TRHegemony(Base): doc='The number of probe ASes for which we have traceroutes to the origin in the time interval. We only include AS Hegemony values that are based on traceroutes from at least ten probe ASes.') dependency_id = Column(Integer, - ForeignKey('ihr_tr_hegemony_identifier.id', ondelete='CASCADE', - name='fk_tr_hegemony_dependency_id'), nullable=False, doc='Dependency. Transit network or IXP commonly seen in traceroutes towards the origin.') origin_id = Column(Integer, - ForeignKey('ihr_tr_hegemony_identifier.id', ondelete='CASCADE', - name='fk_tr_hegemony_origin_id'), nullable=False, doc='Dependent network, it can be any public ASN. Retrieve all dependencies of a network by setting only this parameter and a timebin.') - dependency = relationship('TRHegemonyIdentifier', foreign_keys=[dependency_id]) - origin = relationship('TRHegemonyIdentifier', foreign_keys=[origin_id], back_populates='local_graph') + \ No newline at end of file From 92c6428a4bdebb09de6cd4ed4731089547617caa Mon Sep 17 00:00:00 2001 From: ibraam Date: Wed, 25 Jun 2025 20:25:26 +0300 Subject: [PATCH 19/43] Added networks-endpoint --- controllers/networks_controller.py | 63 +++++++++++++++++++++++++++++ dtos/networks_dto.py | 22 ++++++++++ repositories/networks_repository.py | 55 +++++++++++++++++++++++++ services/networks_service.py | 36 +++++++++++++++++ 4 files changed, 176 insertions(+) create mode 100644 controllers/networks_controller.py create mode 100644 dtos/networks_dto.py create mode 100644 repositories/networks_repository.py create mode 100644 services/networks_service.py diff --git a/controllers/networks_controller.py b/controllers/networks_controller.py new file mode 100644 index 0000000..1e2570d --- /dev/null +++ b/controllers/networks_controller.py @@ -0,0 +1,63 @@ +from fastapi import APIRouter, Depends, Query, Request +from sqlalchemy.orm import Session +from typing import Optional, List +from services.networks_service import NetworksService +from dtos.networks_dto import NetworksDTO +from dtos.generic_response_dto import GenericResponseDTO, build_url +from config.database import get_db +from globals import page_size + +router = APIRouter(prefix="/networks", tags=["Networks"]) + + +class NetworksController: + service = NetworksService() + + @staticmethod + @router.get("/", response_model=GenericResponseDTO[NetworksDTO]) + async def get_networks( + request: Request, + db: Session = Depends(get_db), + name: Optional[str] = Query( + None, description="Search for a substring in networks name"), + number: Optional[str] = Query( + None, description="Search by ASN or IXP ID. It can be either a single value (e.g. 2497) or a list of comma separated values (e.g. 2497,2500,2501)"), + number_gte: Optional[int] = Query( + None, description="Autonomous System Number (ASN) or IXP ID. Note that IXP ID are negative to avoid colision."), + number_lte: Optional[int] = Query( + None, description="Autonomous System Number (ASN) or IXP ID. Note that IXP ID are negative to avoid colision."), + search: Optional[str] = Query( + None, description="Search for both ASN/IXPID and substring in names"), + page: Optional[int] = Query(1, ge=1, description="A page number within the paginated result set."), + ordering: Optional[str] = Query( + None, description="Which field to use when ordering the results.") + ) -> GenericResponseDTO[NetworksDTO]: + """ + List networks referenced on IHR (see. /network_delay/locations/ for network delay locations). + Can be searched by keyword, ASN, or IXPID. Range of ASN/IXPID can be obtained with parameters number__lte and number__gte. + """ + # Convert comma-separated numbers to list if provided + number_list = [int(x.strip()) + for x in number.split(",")] if number else None + + networks, total_count = NetworksController.service.get_networks( + db, + name=name, + numbers=number_list, + number_gte=number_gte, + number_lte=number_lte, + search=search, + page=page, + order_by=ordering + ) + + # Calculate pagination + next_page = page + 1 if (page * page_size) < total_count else None + prev_page = page - 1 if page > 1 else None + + return GenericResponseDTO( + count=total_count, + next=build_url(request, next_page), + previous=build_url(request, prev_page), + results=networks + ) diff --git a/dtos/networks_dto.py b/dtos/networks_dto.py new file mode 100644 index 0000000..237859b --- /dev/null +++ b/dtos/networks_dto.py @@ -0,0 +1,22 @@ +from pydantic import BaseModel + + +class NetworksDTO(BaseModel): + number: int + name: str + hegemony: bool + delay_forwarding: bool + disco: bool + + class Config: + from_attributes = True + + @staticmethod + def from_model(asn): + return NetworksDTO( + number=asn.number, + name=asn.name, + hegemony=asn.ashash, + delay_forwarding=asn.tartiflette, + disco=asn.disco + ) diff --git a/repositories/networks_repository.py b/repositories/networks_repository.py new file mode 100644 index 0000000..9117875 --- /dev/null +++ b/repositories/networks_repository.py @@ -0,0 +1,55 @@ +from sqlalchemy.orm import Session +from sqlalchemy import or_,String +from models.asn import ASN +from typing import Optional, List, Tuple +from globals import page_size + + +class NetworksRepository: + def get_all( + self, + db: Session, + name: Optional[str] = None, + numbers: Optional[List[int]] = None, + number_gte: Optional[int] = None, + number_lte: Optional[int] = None, + search: Optional[str] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[ASN], int]: + query = db.query(ASN) + + # Apply filters + if name: + query = query.filter(ASN.name.ilike(f"%{name}%")) + if numbers: + query = query.filter(ASN.number.in_(numbers)) + if number_gte: + query = query.filter(ASN.number >= number_gte) + if number_lte: + query = query.filter(ASN.number <= number_lte) + if search: + # Handle AS/IX prefix in search + search_value = search + if search.upper().startswith(("AS", "IX")): + try: + search_value = str(int(search[2:])) + except ValueError: + pass + + query = query.filter(or_( + ASN.number.cast(String).contains(search_value), + ASN.name.ilike(f"%{search}%") + )) + + total_count = query.count() + + # Apply ordering + if order_by and hasattr(ASN, order_by): + query = query.order_by(getattr(ASN, order_by)) + + # Apply pagination + offset = (page - 1) * page_size + results = query.offset(offset).limit(page_size).all() + + return results, total_count diff --git a/services/networks_service.py b/services/networks_service.py new file mode 100644 index 0000000..08b9dbf --- /dev/null +++ b/services/networks_service.py @@ -0,0 +1,36 @@ +from sqlalchemy.orm import Session +from repositories.networks_repository import NetworksRepository +from dtos.networks_dto import NetworksDTO +from typing import Optional, List, Tuple + + +class NetworksService: + def __init__(self): + self.repository = NetworksRepository() + + def get_networks( + self, + db: Session, + name: Optional[str] = None, + numbers: Optional[List[int]] = None, + number_gte: Optional[int] = None, + number_lte: Optional[int] = None, + search: Optional[str] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[NetworksDTO], int]: + """ + Get network data with various filtering options. + """ + networks, total_count = self.repository.get_all( + db, + name=name, + numbers=numbers, + number_gte=number_gte, + number_lte=number_lte, + search=search, + page=page, + order_by=order_by + ) + + return [NetworksDTO.from_model(network) for network in networks], total_count From bb3e0d3146a08133dd3559fbd411b621c2d12e55 Mon Sep 17 00:00:00 2001 From: ibraam Date: Sun, 29 Jun 2025 19:21:47 +0300 Subject: [PATCH 20/43] Added link-delay endpoint --- controllers/hegemony_cone_controller.py | 29 +--------- controllers/link_controller.py | 70 +++++++++++++++++++++++++ dtos/link_delay_dto.py | 12 +++++ models/delay.py | 10 +++- repositories/delay_repository.py | 44 ++++++++++++++++ services/link_service.py | 40 ++++++++++++++ utils.py | 40 ++++++++++++++ 7 files changed, 216 insertions(+), 29 deletions(-) create mode 100644 controllers/link_controller.py create mode 100644 dtos/link_delay_dto.py create mode 100644 repositories/delay_repository.py create mode 100644 services/link_service.py create mode 100644 utils.py diff --git a/controllers/hegemony_cone_controller.py b/controllers/hegemony_cone_controller.py index 2cd94e7..27be814 100644 --- a/controllers/hegemony_cone_controller.py +++ b/controllers/hegemony_cone_controller.py @@ -7,6 +7,7 @@ from config.database import get_db from typing import Optional, List from globals import page_size +from utils import * router = APIRouter(prefix="/hegemony/cones", tags=["Hegemony Cones"]) @@ -42,33 +43,7 @@ async def get_hegemony_cones( networks). """ - # Check if at least one time parameter exists - if not any([timebin, timebin_gte, timebin_lte]): - raise HTTPException( - status_code=400, - detail="No timebin parameter. Please provide a timebin value or a range of values with timebin__lte and timebin__gte." - ) - - # If timebin is not provided, both timebin_gte and timebin_lte must be provided - if not timebin and not (timebin_gte and timebin_lte): - raise HTTPException( - status_code=400, - detail="Invalid timebin range. Please provide both timebin__lte and timebin__gte." - ) - - # If exact timebin is provided, it overrides the range parameters - if timebin: - timebin_gte = timebin - timebin_lte = timebin - - # Validate date range (max 7 days) - if timebin_gte and timebin_lte: - delta = timebin_lte - timebin_gte - if delta > timedelta(days=7): - raise HTTPException( - status_code=400, - detail="The given timebin range is too large. Should be less than 7 days." - ) + timebin_gte, timebin_lte = validate_timebin_params(timebin, timebin_gte, timebin_lte) # Convert comma-separated ASNs to list asn_list = [int(x.strip()) for x in asn.split(",")] if asn else None diff --git a/controllers/link_controller.py b/controllers/link_controller.py new file mode 100644 index 0000000..cb9ae1b --- /dev/null +++ b/controllers/link_controller.py @@ -0,0 +1,70 @@ +from fastapi import APIRouter, Depends, Query, Request, HTTPException +from datetime import datetime +from sqlalchemy.orm import Session +from services.link_service import LinkService +from dtos.generic_response_dto import GenericResponseDTO, build_url +from dtos.link_delay_dto import LinkDelayDTO +from config.database import get_db +from typing import Optional +from globals import page_size +from utils import validate_timebin_params + +router = APIRouter(prefix="/link", tags=["Link"]) + + +class LinkController: + service = LinkService() + + @staticmethod + @router.get("/delay", response_model=GenericResponseDTO[LinkDelayDTO]) + async def get_link_delays( + request: Request, + db: Session = Depends(get_db), + timebin: Optional[datetime] = Query( + None, description="Timestamp of reported value."), + timebin_gte: Optional[datetime] = Query( + None, description="Timestamp of reported value."), + timebin_lte: Optional[datetime] = Query( + None, description="Timestamp of reported value."), + asn: Optional[str] = Query( + None, description="ASN or IXP ID of the monitored network (see number in /network/). Can be a single value or a list of comma separated values."), + magnitude: Optional[float] = Query( + None, description="Cumulated link delay deviation. Values close to zero represent usual delays for the network, whereas higher values stand for significant links congestion in the monitored network."), + page: Optional[int] = Query( + 1, ge=1, description="A page number within the paginated result set"), + ordering: Optional[str] = Query( + None, description="Which field to use when ordering the results.") + ) -> GenericResponseDTO[LinkDelayDTO]: + """ + List cumulated link delay changes (magnitude) for each monitored network. Magnitude values close to zero represent usual delays for the network, whereas higher values stand for significant links congestion in the monitored network. + The details of each congested link is available in /delay/alarms/. +
    +
  • Required parameters: timebin or a range of timebins (using the two parameters timebin__lte and timebin__gte).
  • +
  • Limitations: At most 7 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
  • +
+ """ + timebin_gte, timebin_lte = validate_timebin_params(timebin, timebin_gte, timebin_lte) + + # Convert comma-separated ASNs to list + asn_list = [int(x.strip()) for x in asn.split(",")] if asn else None + + delays, total_count = LinkController.service.get_link_delays( + db, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + asn_ids=asn_list, + magnitude=magnitude, + page=page, + order_by=ordering + ) + + # Calculate pagination + next_page = page + 1 if (page * page_size) < total_count else None + prev_page = page - 1 if page > 1 else None + + return GenericResponseDTO( + count=total_count, + next=build_url(request, next_page), + previous=build_url(request, prev_page), + results=delays + ) diff --git a/dtos/link_delay_dto.py b/dtos/link_delay_dto.py new file mode 100644 index 0000000..7e38e48 --- /dev/null +++ b/dtos/link_delay_dto.py @@ -0,0 +1,12 @@ +from pydantic import BaseModel +from datetime import datetime + + +class LinkDelayDTO(BaseModel): + timebin: datetime + asn: int + magnitude: float + asn_name: str + + class Config: + from_attributes = True diff --git a/models/delay.py b/models/delay.py index f82c619..8a3a70a 100644 --- a/models/delay.py +++ b/models/delay.py @@ -31,5 +31,11 @@ class Delay(Base): magnitude = Column(Float, default=0.0, nullable=False, doc='Cumulated link delay deviation. Values close to zero represent usual delays for the network, whereas higher values stand for significant links congestion in the monitored network.') - asn_id = Column(BigInteger, nullable=False, - doc='ASN or IXP ID of the monitored network (see number in /network/).') + asn = Column('asn_id', BigInteger, nullable=False, + doc='ASN or IXP ID of the monitored network (see number in /network/).') + + # Add relationship without foreign key constraint + asn_relation = relationship('ASN', + primaryjoin='Delay.asn == ASN.number', + foreign_keys=[asn], + backref='delays') diff --git a/repositories/delay_repository.py b/repositories/delay_repository.py new file mode 100644 index 0000000..3136267 --- /dev/null +++ b/repositories/delay_repository.py @@ -0,0 +1,44 @@ +from datetime import datetime +from sqlalchemy.orm import Session +from models.delay import Delay +from typing import Optional, List, Tuple +from globals import page_size +from sqlalchemy.orm import joinedload + + +class DelayRepository: + def get_all( + self, + db: Session, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + asn_ids: Optional[List[int]] = None, + magnitude: Optional[float] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[Delay], int]: + query = db.query(Delay).join(Delay.asn_relation) + + # Apply filters + if timebin_gte: + query = query.filter(Delay.timebin >= timebin_gte) + if timebin_lte: + query = query.filter(Delay.timebin <= timebin_lte) + if asn_ids: + query = query.filter(Delay.asn.in_(asn_ids)) + if magnitude is not None: + query = query.filter(Delay.magnitude == magnitude) + + total_count = query.count() + + # Apply ordering + if order_by and hasattr(Delay, order_by): + query = query.order_by(getattr(Delay, order_by)) + else: + query = query.order_by(Delay.timebin) + + # Apply pagination + offset = (page - 1) * page_size + results = query.offset(offset).limit(page_size).all() + + return results, total_count diff --git a/services/link_service.py b/services/link_service.py new file mode 100644 index 0000000..45809df --- /dev/null +++ b/services/link_service.py @@ -0,0 +1,40 @@ +from sqlalchemy.orm import Session +from repositories.delay_repository import DelayRepository +from dtos.link_delay_dto import LinkDelayDTO +from typing import Optional, List, Tuple +from datetime import datetime + + +class LinkService: + def __init__(self): + self.delay_repository = DelayRepository() + + def get_link_delays( + self, + db: Session, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + asn_ids: Optional[List[int]] = None, + magnitude: Optional[float] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[LinkDelayDTO], int]: + """ + Get link delay data with filtering. + """ + delays, total_count = self.delay_repository.get_all( + db, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + asn_ids=asn_ids, + magnitude=magnitude, + page=page, + order_by=order_by + ) + + return [LinkDelayDTO( + timebin=delay.timebin, + asn=delay.asn, + magnitude=delay.magnitude, + asn_name=delay.asn_relation.name if delay.asn_relation else None + ) for delay in delays], total_count diff --git a/utils.py b/utils.py new file mode 100644 index 0000000..e2bd0fd --- /dev/null +++ b/utils.py @@ -0,0 +1,40 @@ +from fastapi import HTTPException +from datetime import datetime, timedelta +from typing import Optional + + +def validate_timebin_params( + timebin: Optional[datetime], + timebin_gte: Optional[datetime], + timebin_lte: Optional[datetime] + ) -> tuple[datetime, datetime]: + + # Check if at least one time parameter exists + if not any([timebin, timebin_gte, timebin_lte]): + raise HTTPException( + status_code=400, + detail="No timebin parameter. Please provide a timebin value or a range of values with timebin__lte and timebin__gte." + ) + + # If timebin is not provided, both timebin_gte and timebin_lte must be provided + if not timebin and not (timebin_gte and timebin_lte): + raise HTTPException( + status_code=400, + detail="Invalid timebin range. Please provide both timebin__lte and timebin__gte." + ) + + # If exact timebin is provided, it overrides the range parameters + if timebin: + timebin_gte = timebin + timebin_lte = timebin + + # Validate date range (max 7 days) + if timebin_gte and timebin_lte: + delta = timebin_lte - timebin_gte + if delta > timedelta(days=7): + raise HTTPException( + status_code=400, + detail="The given timebin range is too large. Should be less than 7 days." + ) + + return timebin_gte, timebin_lte From 58ef29b28d3b122c8c435b5228419a9579cdad73 Mon Sep 17 00:00:00 2001 From: ibraam Date: Thu, 3 Jul 2025 12:31:28 +0300 Subject: [PATCH 21/43] Added link-forwarding endpoint --- controllers/link_controller.py | 62 ++++++++++++++++++++++++++- dtos/link_forwarding_dto.py | 12 ++++++ models/forwarding.py | 11 +++-- repositories/forwarding_repository.py | 43 +++++++++++++++++++ services/link_service.py | 33 ++++++++++++++ 5 files changed, 157 insertions(+), 4 deletions(-) create mode 100644 dtos/link_forwarding_dto.py create mode 100644 repositories/forwarding_repository.py diff --git a/controllers/link_controller.py b/controllers/link_controller.py index cb9ae1b..cb0ab4f 100644 --- a/controllers/link_controller.py +++ b/controllers/link_controller.py @@ -4,6 +4,7 @@ from services.link_service import LinkService from dtos.generic_response_dto import GenericResponseDTO, build_url from dtos.link_delay_dto import LinkDelayDTO +from dtos.link_forwarding_dto import LinkForwardingDTO from config.database import get_db from typing import Optional from globals import page_size @@ -43,7 +44,8 @@ async def get_link_delays(
  • Limitations: At most 7 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
  • """ - timebin_gte, timebin_lte = validate_timebin_params(timebin, timebin_gte, timebin_lte) + timebin_gte, timebin_lte = validate_timebin_params( + timebin, timebin_gte, timebin_lte) # Convert comma-separated ASNs to list asn_list = [int(x.strip()) for x in asn.split(",")] if asn else None @@ -68,3 +70,61 @@ async def get_link_delays( previous=build_url(request, prev_page), results=delays ) + + @staticmethod + @router.get("/forwarding", response_model=GenericResponseDTO[LinkForwardingDTO]) + async def get_link_forwardings( + request: Request, + db: Session = Depends(get_db), + timebin: Optional[datetime] = Query( + None, description="Timestamp of reported value."), + timebin_gte: Optional[datetime] = Query( + None, description="Timestamp of reported value."), + timebin_lte: Optional[datetime] = Query( + None, description="Timestamp of reported value."), + asn: Optional[str] = Query( + None, description="ASN or IXP ID of the monitored network (see number in /network/). Can be a single value or a list of comma separated values."), + magnitude: Optional[float] = Query( + None, description="Cumulated forwarding anomaly deviation for each monitored network. Values close to zero represent usual forwarding paths for the network, whereas higher positive (resp. negative) values stand for an increasing (resp. decreasing) number of paths passing through the monitored network."), + page: Optional[int] = Query( + 1, ge=1, description="A page number within the paginated result set"), + ordering: Optional[str] = Query( + None, description="Which field to use when ordering the results.") + ) -> GenericResponseDTO[LinkForwardingDTO]: + """ + List cumulated forwarding anomaly deviation (magnitude) for each monitored network. + Magnitude values close to zero represent usual forwarding paths for the network, whereas + higher positive (resp. negative) values stand for an increasing (resp. decreasing) + number of paths passing through the monitored network. + The details of each forwarding anomaly is available in /forwarding/alarms/. +
      +
    • Required parameters: timebin or a range of timebins (using the two parameters timebin__lte and timebin__gte).
    • +
    • Limitations: At most 7 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
    • +
    + """ + timebin_gte, timebin_lte = validate_timebin_params( + timebin, timebin_gte, timebin_lte) + + # Convert comma-separated ASNs to list + asn_list = [int(x.strip()) for x in asn.split(",")] if asn else None + + forwardings, total_count = LinkController.service.get_link_forwardings( + db, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + asn_ids=asn_list, + magnitude=magnitude, + page=page, + order_by=ordering + ) + + # Calculate pagination + next_page = page + 1 if (page * page_size) < total_count else None + prev_page = page - 1 if page > 1 else None + + return GenericResponseDTO( + count=total_count, + next=build_url(request, next_page), + previous=build_url(request, prev_page), + results=forwardings + ) diff --git a/dtos/link_forwarding_dto.py b/dtos/link_forwarding_dto.py new file mode 100644 index 0000000..98453c6 --- /dev/null +++ b/dtos/link_forwarding_dto.py @@ -0,0 +1,12 @@ +from pydantic import BaseModel +from datetime import datetime + + +class LinkForwardingDTO(BaseModel): + timebin: datetime + asn: int + magnitude: float + asn_name: str + + class Config: + from_attributes = True diff --git a/models/forwarding.py b/models/forwarding.py index ff8a85c..c2b085d 100644 --- a/models/forwarding.py +++ b/models/forwarding.py @@ -31,6 +31,11 @@ class Forwarding(Base): magnitude = Column(Float, default=0.0, nullable=False, doc='Cumulated link delay deviation. Values close to zero represent usual delays for the network, whereas higher values stand for significant links congestion in the monitored network.') - asn_id = Column(BigInteger, - nullable=False, - doc='ASN or IXP ID of the monitored network (see number in /network/).') + asn = Column('asn_id', BigInteger, + nullable=False, + doc='ASN or IXP ID of the monitored network (see number in /network/).') + + asn_relation = relationship('ASN', + primaryjoin='Forwarding.asn == ASN.number', + foreign_keys=[asn], + backref='forwarding') diff --git a/repositories/forwarding_repository.py b/repositories/forwarding_repository.py new file mode 100644 index 0000000..46fde4e --- /dev/null +++ b/repositories/forwarding_repository.py @@ -0,0 +1,43 @@ +from datetime import datetime +from sqlalchemy.orm import Session +from models.forwarding import Forwarding +from typing import Optional, List, Tuple +from globals import page_size + + +class ForwardingRepository: + def get_all( + self, + db: Session, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + asn_ids: Optional[List[int]] = None, + magnitude: Optional[float] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[Forwarding], int]: + query = db.query(Forwarding).join(Forwarding.asn_relation) + + # Apply filters + if timebin_gte: + query = query.filter(Forwarding.timebin >= timebin_gte) + if timebin_lte: + query = query.filter(Forwarding.timebin <= timebin_lte) + if asn_ids: + query = query.filter(Forwarding.asn.in_(asn_ids)) + if magnitude is not None: + query = query.filter(Forwarding.magnitude == magnitude) + + total_count = query.count() + + # Apply ordering + if order_by and hasattr(Forwarding, order_by): + query = query.order_by(getattr(Forwarding, order_by)) + else: + query = query.order_by(Forwarding.timebin) + + # Apply pagination + offset = (page - 1) * page_size + results = query.offset(offset).limit(page_size).all() + + return results, total_count diff --git a/services/link_service.py b/services/link_service.py index 45809df..b379660 100644 --- a/services/link_service.py +++ b/services/link_service.py @@ -1,6 +1,8 @@ from sqlalchemy.orm import Session from repositories.delay_repository import DelayRepository from dtos.link_delay_dto import LinkDelayDTO +from repositories.forwarding_repository import ForwardingRepository +from dtos.link_forwarding_dto import LinkForwardingDTO from typing import Optional, List, Tuple from datetime import datetime @@ -8,6 +10,7 @@ class LinkService: def __init__(self): self.delay_repository = DelayRepository() + self.forwarding_repository = ForwardingRepository() def get_link_delays( self, @@ -38,3 +41,33 @@ def get_link_delays( magnitude=delay.magnitude, asn_name=delay.asn_relation.name if delay.asn_relation else None ) for delay in delays], total_count + + def get_link_forwardings( + self, + db: Session, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + asn_ids: Optional[List[int]] = None, + magnitude: Optional[float] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[LinkForwardingDTO], int]: + """ + Get link forwarding data with filtering. + """ + forwardings, total_count = self.forwarding_repository.get_all( + db, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + asn_ids=asn_ids, + magnitude=magnitude, + page=page, + order_by=order_by + ) + + return [LinkForwardingDTO( + timebin=forwarding.timebin, + asn=forwarding.asn, + magnitude=forwarding.magnitude, + asn_name=forwarding.asn_relation.name if forwarding.asn_relation else None + ) for forwarding in forwardings], total_count From bad1113a99a2802274a4311ab3e1e52285994add Mon Sep 17 00:00:00 2001 From: ibraam Date: Thu, 3 Jul 2025 13:55:30 +0300 Subject: [PATCH 22/43] Added /network_delay/locaions endpoint --- controllers/network_delay_controller.py | 56 +++++++++++++++++++++++ dtos/network_delay_locations_dto.py | 10 ++++ repositories/atlas_location_repository.py | 38 +++++++++++++++ services/network_delay_service.py | 36 +++++++++++++++ 4 files changed, 140 insertions(+) create mode 100644 controllers/network_delay_controller.py create mode 100644 dtos/network_delay_locations_dto.py create mode 100644 repositories/atlas_location_repository.py create mode 100644 services/network_delay_service.py diff --git a/controllers/network_delay_controller.py b/controllers/network_delay_controller.py new file mode 100644 index 0000000..eec1dbc --- /dev/null +++ b/controllers/network_delay_controller.py @@ -0,0 +1,56 @@ +from fastapi import APIRouter, Depends, Query, Request +from sqlalchemy.orm import Session +from services.network_delay_service import NetworkDelayService +from dtos.generic_response_dto import GenericResponseDTO, build_url +from dtos.network_delay_locations_dto import NetworkDelayLocationsDTO +from config.database import get_db +from typing import Optional +from globals import page_size + +router = APIRouter(prefix="/network_delay", tags=["Network Delay"]) + + +class NetworkDelayController: + service = NetworkDelayService() + + @staticmethod + @router.get("/locations", response_model=GenericResponseDTO[NetworkDelayLocationsDTO]) + async def get_network_delay_locations( + request: Request, + db: Session = Depends(get_db), + name: Optional[str] = Query( + None, description="Location identifier, can be searched by substring. The meaning of these values dependend on the location type: " + "
    • type=AS: ASN
    • type=CT: city name, region name, country code
    • " + "
    • type=PB: Atlas Probe ID
    • type=IP: IP version (4 or 6)
    "), + type: Optional[str] = Query( + None, description="Type of location. Possible values are:
    • AS: Autonomous System
    • " + "
    • CT: City
    • PB: Atlas Probe
    • IP: Whole IP space
    "), + af: Optional[int] = Query( + None, description="Address Family (IP version), values are either 4 or 6."), + page: Optional[int] = Query( + 1, ge=1, description="A page number within the paginated result set"), + ordering: Optional[str] = Query( + None, description="Which field to use when ordering the results.") + ) -> GenericResponseDTO[NetworkDelayLocationsDTO]: + """ + List locations monitored for network delay measurements. A location can be, for example, an AS, city, Atlas probe. + """ + locations, total_count = NetworkDelayController.service.get_network_delay_locations( + db, + name=name, + type=type, + af=af, + page=page, + order_by=ordering + ) + + # Calculate pagination + next_page = page + 1 if (page * page_size) < total_count else None + prev_page = page - 1 if page > 1 else None + + return GenericResponseDTO( + count=total_count, + next=build_url(request, next_page), + previous=build_url(request, prev_page), + results=locations + ) diff --git a/dtos/network_delay_locations_dto.py b/dtos/network_delay_locations_dto.py new file mode 100644 index 0000000..6613843 --- /dev/null +++ b/dtos/network_delay_locations_dto.py @@ -0,0 +1,10 @@ +from pydantic import BaseModel + + +class NetworkDelayLocationsDTO(BaseModel): + type: str + name: str + af: int + + class Config: + from_attributes = True diff --git a/repositories/atlas_location_repository.py b/repositories/atlas_location_repository.py new file mode 100644 index 0000000..79e503d --- /dev/null +++ b/repositories/atlas_location_repository.py @@ -0,0 +1,38 @@ +from sqlalchemy.orm import Session +from models.atlas_location import AtlasLocation +from typing import Optional, List, Tuple +from globals import page_size + + +class AtlasLocationRepository: + def get_all( + self, + db: Session, + name: Optional[str] = None, + type: Optional[str] = None, + af: Optional[int] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[AtlasLocation], int]: + query = db.query(AtlasLocation) + + # Apply filters + if name: + query = query.filter(AtlasLocation.name.ilike(f"%{name}%")) + if type: + query = query.filter(AtlasLocation.type == type) + if af: + query = query.filter(AtlasLocation.af == af) + + total_count = query.count() + + # Apply ordering + if order_by and hasattr(AtlasLocation, order_by): + query = query.order_by(getattr(AtlasLocation, order_by)) + + + # Apply pagination + offset = (page - 1) * page_size + results = query.offset(offset).limit(page_size).all() + + return results, total_count diff --git a/services/network_delay_service.py b/services/network_delay_service.py new file mode 100644 index 0000000..6a66bc1 --- /dev/null +++ b/services/network_delay_service.py @@ -0,0 +1,36 @@ +from sqlalchemy.orm import Session +from repositories.atlas_location_repository import AtlasLocationRepository +from dtos.network_delay_locations_dto import NetworkDelayLocationsDTO +from typing import Optional, List, Tuple + + +class NetworkDelayService: + def __init__(self): + self.atlas_location_repository = AtlasLocationRepository() + + def get_network_delay_locations( + self, + db: Session, + name: Optional[str] = None, + type: Optional[str] = None, + af: Optional[int] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[NetworkDelayLocationsDTO], int]: + """ + Get locations monitored for network delay measurements. + """ + locations, total_count = self.atlas_location_repository.get_all( + db, + name=name, + type=type, + af=af, + page=page, + order_by=order_by + ) + + return [NetworkDelayLocationsDTO( + type=location.type, + name=location.name, + af=location.af + ) for location in locations], total_count From 2f6bfe36356d079fc82703516a235fc7cb726821 Mon Sep 17 00:00:00 2001 From: ibraam Date: Thu, 3 Jul 2025 17:33:35 +0300 Subject: [PATCH 23/43] Added /metis/atlas/deployment endpoint --- controllers/metis_controller.py | 76 ++++++++++++++++ dtos/metis_atlas_deployment_dto.py | 15 ++++ models/metis_atlas_deployment.py | 7 +- .../metis_atlas_deployment_repository.py | 54 ++++++++++++ services/metis_service.py | 51 +++++++++++ utils.py | 86 +++++++++++++------ 6 files changed, 260 insertions(+), 29 deletions(-) create mode 100644 controllers/metis_controller.py create mode 100644 dtos/metis_atlas_deployment_dto.py create mode 100644 repositories/metis_atlas_deployment_repository.py create mode 100644 services/metis_service.py diff --git a/controllers/metis_controller.py b/controllers/metis_controller.py new file mode 100644 index 0000000..e6ab756 --- /dev/null +++ b/controllers/metis_controller.py @@ -0,0 +1,76 @@ +from fastapi import APIRouter, Depends, Query, Request +from datetime import datetime, timedelta, date +from sqlalchemy.orm import Session +from services.metis_service import MetisService +from dtos.generic_response_dto import GenericResponseDTO, build_url +from dtos.metis_atlas_deployment_dto import MetisAtlasDeploymentDTO +from config.database import get_db +from typing import Optional +from globals import page_size +from utils import validate_timebin_params, prepare_timebin_range + +router = APIRouter(prefix="/metis/atlas", tags=["Metis"]) + + +class MetisController: + service = MetisService() + + @staticmethod + @router.get("/deployment", response_model=GenericResponseDTO[MetisAtlasDeploymentDTO]) + async def get_metis_atlas_deployments( + request: Request, + db: Session = Depends(get_db), + timebin: Optional[datetime] = Query( + None, description="Time when the ranking is computed. The ranking uses 24 weeks of data, hence 2022-05-23T00:00 means the ranking using data from 2021-12-06T00:00 to 2022-05-23T00:00."), + timebin_gte: Optional[datetime] = Query( + None, description="Time when the ranking is computed. The ranking uses 24 weeks of data, hence 2022-05-23T00:00 means the ranking using data from 2021-12-06T00:00 to 2022-05-23T00:00."), + timebin_lte: Optional[datetime] = Query( + None, description="Time when the ranking is computed. The ranking uses 24 weeks of data, hence 2022-05-23T00:00 means the ranking using data from 2021-12-06T00:00 to 2022-05-23T00:00."), + rank: Optional[int] = Query( + None, description="Selecting all ASes with rank less than equal to 10 (i.e. rank__lte=10), gives the 10 most diverse ASes in terms of the selected metric."), + rank_lte: Optional[int] = Query( + None, description="Selecting all ASes with rank less than equal to 10 (i.e. rank__lte=10), gives the 10 most diverse ASes in terms of the selected metric."), + rank_gte: Optional[int] = Query( + None, description="Selecting all ASes with rank less than equal to 10 (i.e. rank__lte=10), gives the 10 most diverse ASes in terms of the selected metric."), + metric: Optional[str] = Query( + None, description="Distance metric used to compute diversity, possible values are: 'as_path_length', 'ip_hops', 'rtt'"), + af: Optional[int] = Query( + None, description="Address Family (IP version), values are either 4 or 6"), + page: Optional[int] = Query( + 1, ge=1, description="A page number within the paginated result set"), + ordering: Optional[str] = Query( + None, description="Which field to use when ordering the results.") + ) -> GenericResponseDTO[MetisAtlasDeploymentDTO]: + """ + Metis identifies ASes that are far from Atlas probes. Deploying Atlas probes in these ASes would be beneficial for Atlas coverage. + + """ + timebin_gte, timebin_lte = prepare_timebin_range( + timebin, timebin_gte, timebin_lte, max_days=31) + + deployments, total_count = MetisController.service.get_metis_atlas_deployments( + db, + timebin=timebin, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + rank=rank, + rank_lte=rank_lte, + rank_gte=rank_gte, + metric=metric, + af=af, + page=page, + order_by=ordering + ) + + # Calculate pagination + next_page = page + 1 if (page * page_size) < total_count else None + prev_page = page - 1 if page > 1 else None + + return GenericResponseDTO( + count=total_count, + next=build_url(request, next_page), + previous=build_url(request, prev_page), + results=deployments + ) diff --git a/dtos/metis_atlas_deployment_dto.py b/dtos/metis_atlas_deployment_dto.py new file mode 100644 index 0000000..482a4bf --- /dev/null +++ b/dtos/metis_atlas_deployment_dto.py @@ -0,0 +1,15 @@ +from pydantic import BaseModel +from datetime import datetime + + +class MetisAtlasDeploymentDTO(BaseModel): + timebin: datetime + metric: str + rank: int + asn: int + af: int + nbsamples: int + asn_name: str + + class Config: + from_attributes = True diff --git a/models/metis_atlas_deployment.py b/models/metis_atlas_deployment.py index fe56049..b10503e 100644 --- a/models/metis_atlas_deployment.py +++ b/models/metis_atlas_deployment.py @@ -43,8 +43,11 @@ class MetisAtlasDeployment(Base): nbsamples = Column(Integer, default=0, nullable=False, doc='The number of probe ASes for which we have traceroutes to this AS in the time interval. We currently only include candidates that were reached by at least 50% of probe ASes, hence these values are always large.') - asn_id = Column(BigInteger, + asn = Column('asn_id',BigInteger, nullable=False, doc="Atlas probes' Autonomous System Number.") - \ No newline at end of file + asn_relation = relationship('ASN', + primaryjoin='MetisAtlasDeployment.asn == ASN.number', + foreign_keys=[asn], + backref='metis_deployments') \ No newline at end of file diff --git a/repositories/metis_atlas_deployment_repository.py b/repositories/metis_atlas_deployment_repository.py new file mode 100644 index 0000000..df944c1 --- /dev/null +++ b/repositories/metis_atlas_deployment_repository.py @@ -0,0 +1,54 @@ +from datetime import datetime +from sqlalchemy.orm import Session +from models.metis_atlas_deployment import MetisAtlasDeployment +from typing import Optional, List, Tuple +from globals import page_size + + +class MetisAtlasDeploymentRepository: + def get_all( + self, + db: Session, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + timebin: Optional[datetime] = None, + rank: Optional[int] = None, + rank_lte: Optional[int] = None, + rank_gte: Optional[int] = None, + metric: Optional[str] = None, + af: Optional[int] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[MetisAtlasDeployment], int]: + query = db.query(MetisAtlasDeployment).join( + MetisAtlasDeployment.asn_relation) + + # Apply filters + if timebin: + query = query.filter(MetisAtlasDeployment.timebin == timebin) + if timebin_gte: + query = query.filter(MetisAtlasDeployment.timebin >= timebin_gte) + if timebin_lte: + query = query.filter(MetisAtlasDeployment.timebin <= timebin_lte) + if rank: + query = query.filter(MetisAtlasDeployment.rank == rank) + if rank_lte: + query = query.filter(MetisAtlasDeployment.rank <= rank_lte) + if rank_gte: + query = query.filter(MetisAtlasDeployment.rank >= rank_gte) + if metric: + query = query.filter(MetisAtlasDeployment.metric == metric) + if af: + query = query.filter(MetisAtlasDeployment.af == af) + + total_count = query.count() + + # Apply ordering + if order_by and hasattr(MetisAtlasDeployment, order_by): + query = query.order_by(getattr(MetisAtlasDeployment, order_by)) + + # Apply pagination + offset = (page - 1) * page_size + results = query.offset(offset).limit(page_size).all() + + return results, total_count diff --git a/services/metis_service.py b/services/metis_service.py new file mode 100644 index 0000000..57476f3 --- /dev/null +++ b/services/metis_service.py @@ -0,0 +1,51 @@ +from sqlalchemy.orm import Session +from repositories.metis_atlas_deployment_repository import MetisAtlasDeploymentRepository +from dtos.metis_atlas_deployment_dto import MetisAtlasDeploymentDTO +from typing import Optional, List, Tuple +from datetime import datetime + + +class MetisService: + def __init__(self): + self.metis_repository = MetisAtlasDeploymentRepository() + + def get_metis_atlas_deployments( + self, + db: Session, + timebin: Optional[datetime] = None, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + rank: Optional[int] = None, + rank_lte: Optional[int] = None, + rank_gte: Optional[int] = None, + metric: Optional[str] = None, + af: Optional[int] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[MetisAtlasDeploymentDTO], int]: + """ + Get Metis Atlas deployment data with filtering. + """ + deployments, total_count = self.metis_repository.get_all( + db, + timebin=timebin, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + rank=rank, + rank_lte=rank_lte, + rank_gte=rank_gte, + metric=metric, + af=af, + page=page, + order_by=order_by + ) + + return [MetisAtlasDeploymentDTO( + timebin=deployment.timebin, + metric=deployment.metric, + rank=deployment.rank, + asn=deployment.asn, + af=deployment.af, + nbsamples=deployment.nbsamples, + asn_name=deployment.asn_relation.name if deployment.asn_relation else None + ) for deployment in deployments], total_count diff --git a/utils.py b/utils.py index e2bd0fd..90ee61a 100644 --- a/utils.py +++ b/utils.py @@ -1,40 +1,72 @@ -from fastapi import HTTPException +from typing import Optional, Tuple +from datetime import datetime, timedelta, date +from fastapi import HTTPException from datetime import datetime, timedelta from typing import Optional def validate_timebin_params( - timebin: Optional[datetime], - timebin_gte: Optional[datetime], - timebin_lte: Optional[datetime] - ) -> tuple[datetime, datetime]: - - # Check if at least one time parameter exists - if not any([timebin, timebin_gte, timebin_lte]): + timebin: Optional[datetime], + timebin_gte: Optional[datetime], + timebin_lte: Optional[datetime], + max_days: int = 7 +) -> tuple[datetime, datetime]: + + # Check if at least one time parameter exists + if not any([timebin, timebin_gte, timebin_lte]): + raise HTTPException( + status_code=400, + detail="No timebin parameter. Please provide a timebin value or a range of values with timebin__lte and timebin__gte." + ) + + # If timebin is not provided, both timebin_gte and timebin_lte must be provided + if not timebin and not (timebin_gte and timebin_lte): + raise HTTPException( + status_code=400, + detail="Invalid timebin range. Please provide both timebin__lte and timebin__gte." + ) + + # If exact timebin is provided, it overrides the range parameters + if timebin: + timebin_gte = timebin + timebin_lte = timebin + + # Validate date range based on max_days parameter + if timebin_gte and timebin_lte: + delta = timebin_lte - timebin_gte + if delta > timedelta(days=max_days): raise HTTPException( status_code=400, - detail="No timebin parameter. Please provide a timebin value or a range of values with timebin__lte and timebin__gte." + detail=f"The given timebin range is too large. Should be less than {max_days} days." ) - # If timebin is not provided, both timebin_gte and timebin_lte must be provided - if not timebin and not (timebin_gte and timebin_lte): + return timebin_gte, timebin_lte + + +def prepare_timebin_range( + timebin: Optional[datetime], + timebin_gte: Optional[datetime], + timebin_lte: Optional[datetime], + max_days: int = 7 +) -> Tuple[datetime, Optional[datetime]]: + + if (not timebin_gte and timebin_lte) or (timebin_gte and not timebin_lte): + raise HTTPException( + status_code=400, + detail="Invalid timebin range. Please provide both timebin__lte and timebin__gte." + ) + # If no time filters provided, default to last 6 days (including today) + if not any([timebin, timebin_gte, timebin_lte]): + today = datetime.combine(date.today(), datetime.min.time()) + timebin_gte = today - timedelta(days=6) + + # Validate range size if both are given + if timebin_gte and timebin_lte: + delta = timebin_lte - timebin_gte + if delta > timedelta(days=max_days): raise HTTPException( status_code=400, - detail="Invalid timebin range. Please provide both timebin__lte and timebin__gte." + detail=f"The given timebin range is too large. Should be less than {max_days} days." ) - # If exact timebin is provided, it overrides the range parameters - if timebin: - timebin_gte = timebin - timebin_lte = timebin - - # Validate date range (max 7 days) - if timebin_gte and timebin_lte: - delta = timebin_lte - timebin_gte - if delta > timedelta(days=7): - raise HTTPException( - status_code=400, - detail="The given timebin range is too large. Should be less than 7 days." - ) - - return timebin_gte, timebin_lte + return timebin_gte, timebin_lte From 526f316bbb9091ca6c7d677b6b995ceab15f0d1e Mon Sep 17 00:00:00 2001 From: ibraam Date: Wed, 9 Jul 2025 16:00:42 +0300 Subject: [PATCH 24/43] Added /network_delay endpoint --- controllers/network_delay_controller.py | 106 +++++++++++++++++++- dtos/network_delay_dto.py | 39 ++++++++ models/atlas_delay.py | 10 +- repositories/atlas_delay_repository.py | 127 ++++++++++++++++++++++++ services/network_delay_service.py | 49 +++++++++ 5 files changed, 328 insertions(+), 3 deletions(-) create mode 100644 dtos/network_delay_dto.py create mode 100644 repositories/atlas_delay_repository.py diff --git a/controllers/network_delay_controller.py b/controllers/network_delay_controller.py index eec1dbc..320ce33 100644 --- a/controllers/network_delay_controller.py +++ b/controllers/network_delay_controller.py @@ -1,11 +1,14 @@ -from fastapi import APIRouter, Depends, Query, Request +from fastapi import APIRouter, Depends, Query, Request, HTTPException from sqlalchemy.orm import Session from services.network_delay_service import NetworkDelayService from dtos.generic_response_dto import GenericResponseDTO, build_url from dtos.network_delay_locations_dto import NetworkDelayLocationsDTO +from dtos.network_delay_dto import NetworkDelayDTO from config.database import get_db -from typing import Optional +from typing import Optional, List +from datetime import datetime from globals import page_size +from utils import * router = APIRouter(prefix="/network_delay", tags=["Network Delay"]) @@ -54,3 +57,102 @@ async def get_network_delay_locations( previous=build_url(request, prev_page), results=locations ) + + @staticmethod + @router.get("/", response_model=GenericResponseDTO[NetworkDelayDTO]) + async def get_network_delays( + request: Request, + db: Session = Depends(get_db), + timebin: Optional[datetime] = Query( + None, + description="Timestamp of reported value." + ), + timebin_gte: Optional[datetime] = Query( + None, + description="Timestamp of reported value." + ), + timebin_lte: Optional[datetime] = Query( + None, + description="Timestamp of reported value." + ), + startpoint_name: Optional[str] = Query( + None, + description="Starting location name. It can be a single value or a list of values separated by the pipe character (i.e. | ). The meaning of values dependend on the location type:
    • type=AS: ASN
    • type=CT: city name, region name, country code
    • type=PB: Atlas Probe ID
    • type=IP: IP version (4 or 6)
    " + ), + endpoint_name: Optional[str] = Query( + None, + description="Ending location name. It can be a single value or a list of values separated by the pipe character (i.e. | ). The meaning of values dependend on the location type:
    • type=AS: ASN
    • type=CT: city name, region name, country code
    • type=PB: Atlas Probe ID
    • type=IP: IP version (4 or 6)
    " + ), + startpoint_type: Optional[str] = Query( + None, + description="Type of starting location. Possible values are:
    • AS: Autonomous System
    • CT: City
    • PB: Atlas Probe
    • IP: Whole IP space
    " + ), + endpoint_type: Optional[str] = Query( + None, + description="Type of ending location. Possible values are:
    • AS: Autonomous System
    • CT: City
    • PB: Atlas Probe
    • IP: Whole IP space
    " + ), + startpoint_af: Optional[int] = Query( + None, + description="Address Family (IP version), values are either 4 or 6." + ), + endpoint_af: Optional[int] = Query( + None, + description="Address Family (IP version), values are either 4 or 6." + ), + startpoint_key: Optional[str] = Query( + None, + description="List of starting location key, separated by the pip character (i.e. | ). A location key is a concatenation of a type, af, and name. For example, CT4New York City, New York, US|AS4174 (yes, the last key corresponds to AS174!)." + ), + endpoint_key: Optional[str] = Query( + None, + description="List of ending location key, separated by the pip character (i.e. | ). A location key is a concatenation of a type, af, and name. For example, CT4New York City, New York, US|AS4174 (yes, the last key corresponds to AS174!)." + ), + median_gte: Optional[float] = Query( + None, description="Estimated median RTT. RTT values are directly extracted from traceroute (a.k.a. realrtts) and estimated via differential RTTs."), + median_lte: Optional[float] = Query( + None, description="Estimated median RTT. RTT values are directly extracted from traceroute (a.k.a. realrtts) and estimated via differential RTTs."), + median: Optional[float] = Query( + None, description="Estimated median RTT. RTT values are directly extracted from traceroute (a.k.a. realrtts) and estimated via differential RTTs."), + page: Optional[int] = Query( + 1, ge=1, description="A page number within the paginated result set"), + ordering: Optional[str] = Query( + None, description="Which field to use when ordering the results.") + ) -> GenericResponseDTO[NetworkDelayDTO]: + """ + List estimated network delays between two potentially remote locations. A location can be, for example, an AS, city, Atlas probe. +
      +
    • Required parameters: timebin or a range of timebins (using the two parameters timebin__lte and timebin__gte).
    • +
    • Limitations: At most 7 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
    • +
    + """ + timebin_gte, timebin_lte = validate_timebin_params( + timebin, timebin_gte, timebin_lte) + delays, total_count = NetworkDelayController.service.get_network_delays( + db, + timebin=timebin, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + startpoint_names=startpoint_name, + endpoint_names=endpoint_name, + startpoint_type=startpoint_type, + endpoint_type=endpoint_type, + startpoint_af=startpoint_af, + endpoint_af=endpoint_af, + median=median, + median_gte=median_gte, + median_lte=median_lte, + startpoint_key=startpoint_key, + endpoint_key=endpoint_key, + page=page, + order_by=ordering + ) + + next_page = page + 1 if (page * page_size) < total_count else None + prev_page = page - 1 if page > 1 else None + + return GenericResponseDTO( + count=total_count, + next=build_url(request, next_page), + previous=build_url(request, prev_page), + results=delays + ) diff --git a/dtos/network_delay_dto.py b/dtos/network_delay_dto.py new file mode 100644 index 0000000..5e6ce95 --- /dev/null +++ b/dtos/network_delay_dto.py @@ -0,0 +1,39 @@ +from pydantic import BaseModel +from datetime import datetime + + +class NetworkDelayDTO(BaseModel): + timebin: datetime + startpoint_type: str + startpoint_name: str + startpoint_af: int + endpoint_type: str + endpoint_name: str + endpoint_af: int + median: float + nbtracks: int + nbprobes: int + entropy: float + hop: int + nbrealrtts: int + + class Config: + from_attributes = True + + @staticmethod + def from_model(atlasDelay): + return NetworkDelayDTO( + timebin=atlasDelay.timebin, + startpoint_type=atlasDelay.startpoint_relation.type, + startpoint_name=atlasDelay.startpoint_relation.name, + startpoint_af=atlasDelay.startpoint_relation.af, + endpoint_type=atlasDelay.endpoint_relation.type, + endpoint_name=atlasDelay.endpoint_relation.name, + endpoint_af=atlasDelay.endpoint_relation.af, + median=atlasDelay.median, + nbtracks=atlasDelay.nbtracks, + nbprobes=atlasDelay.nbprobes, + entropy=atlasDelay.entropy, + hop=atlasDelay.hop, + nbrealrtts=atlasDelay.nbrealrtts, + ) diff --git a/models/atlas_delay.py b/models/atlas_delay.py index bc189ec..14b0d0d 100644 --- a/models/atlas_delay.py +++ b/models/atlas_delay.py @@ -67,4 +67,12 @@ class AtlasDelay(Base): endpoint_id = Column(Integer, nullable=False, - doc='Ending location for the delay estimation.') \ No newline at end of file + doc='Ending location for the delay estimation.') + + startpoint_relation = relationship('AtlasLocation', + primaryjoin='AtlasDelay.startpoint_id == AtlasLocation.id', + foreign_keys=[startpoint_id]) + + endpoint_relation = relationship('AtlasLocation', + primaryjoin='AtlasDelay.endpoint_id == AtlasLocation.id', + foreign_keys=[endpoint_id]) \ No newline at end of file diff --git a/repositories/atlas_delay_repository.py b/repositories/atlas_delay_repository.py new file mode 100644 index 0000000..4454db6 --- /dev/null +++ b/repositories/atlas_delay_repository.py @@ -0,0 +1,127 @@ +from sqlalchemy.orm import Session, aliased +from sqlalchemy import and_, or_ +from models.atlas_delay import AtlasDelay +from datetime import datetime +from typing import List, Optional, Tuple +from globals import page_size + + +class AtlasDelayRepository: + def get_delays( + self, + db: Session, + timebin: Optional[datetime] = None, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + startpoint_names: Optional[str] = None, + endpoint_names: Optional[str] = None, + startpoint_type: Optional[str] = None, + endpoint_type: Optional[str] = None, + startpoint_af: Optional[int] = None, + endpoint_af: Optional[int] = None, + median: Optional[float] = None, + median_gte: Optional[float] = None, + median_lte: Optional[float] = None, + startpoint_key: Optional[str] = None, + endpoint_key: Optional[str] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[AtlasDelay], int]: + """ + Get network delays with all possible filters. + """ + # Create SQLAlchemy aliases for the AtlasLocation table, used in both startpoint and endpoint relationships. + # This is necessary because we are joining the same table (AtlasLocation) twice in the query, + # and SQL requires different aliases for each instance to avoid ambiguity. + Startpoint = aliased( + AtlasDelay.startpoint_relation.property.mapper.class_) + Endpoint = aliased(AtlasDelay.endpoint_relation.property.mapper.class_) + + query = db.query(AtlasDelay)\ + .join(Startpoint, AtlasDelay.startpoint_relation)\ + .join(Endpoint, AtlasDelay.endpoint_relation) + + # Apply timebin filters + if timebin: + query = query.filter(AtlasDelay.timebin == timebin) + if timebin_gte: + query = query.filter(AtlasDelay.timebin >= timebin_gte) + if timebin_lte: + query = query.filter(AtlasDelay.timebin <= timebin_lte) + + if startpoint_names: + names = startpoint_names.split('|') + query = query.filter(Startpoint.name.in_(names)) + if startpoint_type: + query = query.filter(Startpoint.type == startpoint_type) + if startpoint_af: + query = query.filter(Startpoint.af == startpoint_af) + if startpoint_key: + startpoint_conditions = [] + for key in startpoint_key.split('|'): + if len(key) >= 2: + key_type = key[:2] + key_af = int(key[2]) if key[2].isdigit() else None + key_name = key[3:] if len(key) > 3 else None + + conditions = [] + if key_type: + conditions.append(Startpoint.type == key_type) + if key_af: + conditions.append(Startpoint.af == key_af) + if key_name: + conditions.append(Startpoint.name == key_name) + + if conditions: + startpoint_conditions.append(and_(*conditions)) + + if startpoint_conditions: + query = query.filter(or_(*startpoint_conditions)) + + if endpoint_names: + names = endpoint_names.split('|') + query = query.filter(Endpoint.name.in_(names)) + if endpoint_type: + query = query.filter(Endpoint.type == endpoint_type) + if endpoint_af: + query = query.filter(Endpoint.af == endpoint_af) + if endpoint_key: + endpoint_conditions = [] + for key in endpoint_key.split('|'): + if len(key) >= 2: + key_type = key[:2] + key_af = int(key[2]) if key[2].isdigit() else None + key_name = key[3:] if len(key) > 3 else None + + conditions = [] + if key_type: + conditions.append(Endpoint.type == key_type) + if key_af: + conditions.append(Endpoint.af == key_af) + if key_name: + conditions.append(Endpoint.name == key_name) + + if conditions: + endpoint_conditions.append(and_(*conditions)) + + if endpoint_conditions: + query = query.filter(or_(*endpoint_conditions)) + + if median: + query = query.filter(AtlasDelay.median == median) + if median_gte: + query = query.filter(AtlasDelay.median >= median_gte) + if median_lte: + query = query.filter(AtlasDelay.median <= median_lte) + + total_count = query.count() + + # Apply ordering + if order_by and hasattr(AtlasDelay, order_by): + query = query.order_by(getattr(AtlasDelay, order_by)) + + # Apply pagination + offset = (page - 1) * page_size + results = query.offset(offset).limit(page_size).all() + + return results, total_count diff --git a/services/network_delay_service.py b/services/network_delay_service.py index 6a66bc1..11e96e5 100644 --- a/services/network_delay_service.py +++ b/services/network_delay_service.py @@ -1,12 +1,16 @@ from sqlalchemy.orm import Session from repositories.atlas_location_repository import AtlasLocationRepository +from repositories.atlas_delay_repository import AtlasDelayRepository from dtos.network_delay_locations_dto import NetworkDelayLocationsDTO +from dtos.network_delay_dto import NetworkDelayDTO from typing import Optional, List, Tuple +from datetime import datetime class NetworkDelayService: def __init__(self): self.atlas_location_repository = AtlasLocationRepository() + self.atlas_delay_repository = AtlasDelayRepository() def get_network_delay_locations( self, @@ -34,3 +38,48 @@ def get_network_delay_locations( name=location.name, af=location.af ) for location in locations], total_count + + def get_network_delays( + self, + db: Session, + timebin: Optional[datetime] = None, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + startpoint_names: Optional[str] = None, + endpoint_names: Optional[str] = None, + startpoint_type: Optional[str] = None, + endpoint_type: Optional[str] = None, + startpoint_af: Optional[int] = None, + endpoint_af: Optional[int] = None, + median: Optional[float] = None, + median_gte: Optional[float] = None, + median_lte: Optional[float] = None, + startpoint_key: Optional[str] = None, + endpoint_key: Optional[str] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[NetworkDelayDTO], int]: + """ + Get network delays with all possible filters. + """ + atlasDelays, total_count = self.atlas_delay_repository.get_delays( + db, + timebin=timebin, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + startpoint_names=startpoint_names, + endpoint_names=endpoint_names, + startpoint_type=startpoint_type, + endpoint_type=endpoint_type, + startpoint_af=startpoint_af, + endpoint_af=endpoint_af, + median=median, + median_gte=median_gte, + median_lte=median_lte, + startpoint_key=startpoint_key, + endpoint_key=endpoint_key, + page=page, + order_by=order_by + ) + + return [NetworkDelayDTO.from_model(atlasDelay) for atlasDelay in atlasDelays], total_count From f7e0068ff783499f04b619a936c5838d417247f4 Mon Sep 17 00:00:00 2001 From: ibraam Date: Wed, 9 Jul 2025 21:28:26 +0300 Subject: [PATCH 25/43] Added /network_delay/alarms endpoint --- controllers/network_delay_controller.py | 106 ++++++++++++++++ dtos/network_delay_alarms_dto.py | 29 +++++ models/atlas_delay_alarms.py | 8 ++ repositories/atlas_delay_alarms_repository.py | 119 ++++++++++++++++++ services/network_delay_service.py | 46 +++++++ 5 files changed, 308 insertions(+) create mode 100644 dtos/network_delay_alarms_dto.py create mode 100644 repositories/atlas_delay_alarms_repository.py diff --git a/controllers/network_delay_controller.py b/controllers/network_delay_controller.py index 320ce33..ad2e359 100644 --- a/controllers/network_delay_controller.py +++ b/controllers/network_delay_controller.py @@ -4,6 +4,7 @@ from dtos.generic_response_dto import GenericResponseDTO, build_url from dtos.network_delay_locations_dto import NetworkDelayLocationsDTO from dtos.network_delay_dto import NetworkDelayDTO +from dtos.network_delay_alarms_dto import NetworkDelayAlarmsDTO from config.database import get_db from typing import Optional, List from datetime import datetime @@ -156,3 +157,108 @@ async def get_network_delays( previous=build_url(request, prev_page), results=delays ) + + @staticmethod + @router.get("/alarms", response_model=GenericResponseDTO[NetworkDelayAlarmsDTO]) + async def get_network_delay_alarms( + request: Request, + db: Session = Depends(get_db), + timebin: Optional[datetime] = Query( + None, + description="Timestamp of reported alarm." + ), + timebin_gte: Optional[datetime] = Query( + None, + description="Timestamp of reported alarm." + ), + timebin_lte: Optional[datetime] = Query( + None, + description="Timestamp of reported alarm." + ), + startpoint_name: Optional[str] = Query( + None, + description="Starting location name. It can be a single value or a list of values separated by the pipe character (i.e. | ). The meaning of values dependend on the location type:
    • type=AS: ASN
    • type=CT: city name, region name, country code
    • type=PB: Atlas Probe ID
    • type=IP: IP version (4 or 6)
    " + ), + endpoint_name: Optional[str] = Query( + None, + description="Ending location name. It can be a single value or a list of values separated by the pipe character (i.e. | ). The meaning of values dependend on the location type:
    • type=AS: ASN
    • type=CT: city name, region name, country code
    • type=PB: Atlas Probe ID
    • type=IP: IP version (4 or 6)
    " + ), + startpoint_type: Optional[str] = Query( + None, + description="Type of starting location. Possible values are:
    • AS: Autonomous System
    • CT: City
    • PB: Atlas Probe
    • IP: Whole IP space
    " + ), + endpoint_type: Optional[str] = Query( + None, + description="Type of ending location. Possible values are:
    • AS: Autonomous System
    • CT: City
    • PB: Atlas Probe
    • IP: Whole IP space
    " + ), + startpoint_af: Optional[int] = Query( + None, + description="Address Family (IP version), values are either 4 or 6." + ), + endpoint_af: Optional[int] = Query( + None, + description="Address Family (IP version), values are either 4 or 6." + ), + startpoint_key: Optional[str] = Query( + None, + description="List of starting location key, separated by the pip character (i.e. | ). A location key is a concatenation of a type, af, and name. For example, CT4New York City, New York, US|AS4174." + ), + endpoint_key: Optional[str] = Query( + None, + description="List of ending location key, separated by the pip character (i.e. | ). A location key is a concatenation of a type, af, and name. For example, CT4New York City, New York, US|AS4174." + ), + deviation_gte: Optional[float] = Query( + None, + description="Significance of the AS Hegemony change." + ), + deviation_lte: Optional[float] = Query( + None, + description="Significance of the AS Hegemony change." + ), + page: Optional[int] = Query( + 1, ge=1, + description="A page number within the paginated result set" + ), + ordering: Optional[str] = Query( + None, + description="Which field to use when ordering the results." + ) + ) -> GenericResponseDTO[NetworkDelayAlarmsDTO]: + """ + List significant network delay changes detected by IHR anomaly detector. +
      +
    • Required parameters: timebin or a range of timebins (using the two parameters timebin_lte and timebin_gte).
    • +
    • Limitations: At most 7 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
    • +
    + """ + timebin_gte, timebin_lte = validate_timebin_params( + timebin, timebin_gte, timebin_lte) + + alarms, total_count = NetworkDelayController.service.get_network_delay_alarms( + db, + timebin=timebin, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + startpoint_names=startpoint_name, + endpoint_names=endpoint_name, + startpoint_type=startpoint_type, + endpoint_type=endpoint_type, + startpoint_af=startpoint_af, + endpoint_af=endpoint_af, + startpoint_key=startpoint_key, + endpoint_key=endpoint_key, + deviation_gte=deviation_gte, + deviation_lte=deviation_lte, + page=page, + order_by=ordering + ) + + next_page = page + 1 if (page * page_size) < total_count else None + prev_page = page - 1 if page > 1 else None + + return GenericResponseDTO( + count=total_count, + next=build_url(request, next_page), + previous=build_url(request, prev_page), + results=alarms + ) diff --git a/dtos/network_delay_alarms_dto.py b/dtos/network_delay_alarms_dto.py new file mode 100644 index 0000000..6416fd7 --- /dev/null +++ b/dtos/network_delay_alarms_dto.py @@ -0,0 +1,29 @@ +from pydantic import BaseModel +from datetime import datetime + + +class NetworkDelayAlarmsDTO(BaseModel): + timebin: datetime + startpoint_type: str + startpoint_name: str + startpoint_af: int + endpoint_type: str + endpoint_name: str + endpoint_af: int + deviation: float + + class Config: + from_attributes = True + + @staticmethod + def from_model(atlas_delay_alarm): + return NetworkDelayAlarmsDTO( + timebin=atlas_delay_alarm.timebin, + startpoint_type=atlas_delay_alarm.startpoint_relation.type, + startpoint_name=atlas_delay_alarm.startpoint_relation.name, + startpoint_af=atlas_delay_alarm.startpoint_relation.af, + endpoint_type=atlas_delay_alarm.endpoint_relation.type, + endpoint_name=atlas_delay_alarm.endpoint_relation.name, + endpoint_af=atlas_delay_alarm.endpoint_relation.af, + deviation=atlas_delay_alarm.deviation + ) diff --git a/models/atlas_delay_alarms.py b/models/atlas_delay_alarms.py index 673beb3..ef71bf7 100644 --- a/models/atlas_delay_alarms.py +++ b/models/atlas_delay_alarms.py @@ -42,3 +42,11 @@ class AtlasDelayAlarms(Base): endpoint_id = Column(Integer, nullable=False, doc='Ending location reported as anomalous.') + + startpoint_relation = relationship('AtlasLocation', + primaryjoin='AtlasDelayAlarms.startpoint_id == AtlasLocation.id', + foreign_keys=[startpoint_id]) + + endpoint_relation = relationship('AtlasLocation', + primaryjoin='AtlasDelayAlarms.endpoint_id == AtlasLocation.id', + foreign_keys=[endpoint_id]) \ No newline at end of file diff --git a/repositories/atlas_delay_alarms_repository.py b/repositories/atlas_delay_alarms_repository.py new file mode 100644 index 0000000..af6473b --- /dev/null +++ b/repositories/atlas_delay_alarms_repository.py @@ -0,0 +1,119 @@ +from sqlalchemy.orm import Session, aliased +from sqlalchemy import and_, or_ +from models.atlas_delay_alarms import AtlasDelayAlarms +from datetime import datetime +from typing import List, Optional, Tuple +from globals import page_size + + +class AtlasDelayAlarmsRepository: + def get_alarms( + self, + db: Session, + timebin: Optional[datetime] = None, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + startpoint_names: Optional[str] = None, + endpoint_names: Optional[str] = None, + startpoint_type: Optional[str] = None, + endpoint_type: Optional[str] = None, + startpoint_af: Optional[int] = None, + endpoint_af: Optional[int] = None, + startpoint_key: Optional[str] = None, + endpoint_key: Optional[str] = None, + deviation_gte: Optional[float] = None, + deviation_lte: Optional[float] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[AtlasDelayAlarms], int]: + """ + Get network delay alarms with all possible filters. + """ + Startpoint = aliased( + AtlasDelayAlarms.startpoint_relation.property.mapper.class_) + Endpoint = aliased( + AtlasDelayAlarms.endpoint_relation.property.mapper.class_) + + query = db.query(AtlasDelayAlarms)\ + .join(Startpoint, AtlasDelayAlarms.startpoint_relation)\ + .join(Endpoint, AtlasDelayAlarms.endpoint_relation) + + if timebin: + query = query.filter(AtlasDelayAlarms.timebin == timebin) + if timebin_gte: + query = query.filter(AtlasDelayAlarms.timebin >= timebin_gte) + if timebin_lte: + query = query.filter(AtlasDelayAlarms.timebin <= timebin_lte) + + if startpoint_names: + names = startpoint_names.split('|') + query = query.filter(Startpoint.name.in_(names)) + if startpoint_type: + query = query.filter(Startpoint.type == startpoint_type) + if startpoint_af: + query = query.filter(Startpoint.af == startpoint_af) + if startpoint_key: + startpoint_conditions = [] + for key in startpoint_key.split('|'): + if len(key) >= 2: + key_type = key[:2] + key_af = int(key[2]) if key[2].isdigit() else None + key_name = key[3:] if len(key) > 3 else None + + conditions = [] + if key_type: + conditions.append(Startpoint.type == key_type) + if key_af: + conditions.append(Startpoint.af == key_af) + if key_name: + conditions.append(Startpoint.name == key_name) + + if conditions: + startpoint_conditions.append(and_(*conditions)) + + if startpoint_conditions: + query = query.filter(or_(*startpoint_conditions)) + + if endpoint_names: + names = endpoint_names.split('|') + query = query.filter(Endpoint.name.in_(names)) + if endpoint_type: + query = query.filter(Endpoint.type == endpoint_type) + if endpoint_af: + query = query.filter(Endpoint.af == endpoint_af) + if endpoint_key: + endpoint_conditions = [] + for key in endpoint_key.split('|'): + if len(key) >= 2: + key_type = key[:2] + key_af = int(key[2]) if key[2].isdigit() else None + key_name = key[3:] if len(key) > 3 else None + + conditions = [] + if key_type: + conditions.append(Endpoint.type == key_type) + if key_af: + conditions.append(Endpoint.af == key_af) + if key_name: + conditions.append(Endpoint.name == key_name) + + if conditions: + endpoint_conditions.append(and_(*conditions)) + + if endpoint_conditions: + query = query.filter(or_(*endpoint_conditions)) + + if deviation_gte: + query = query.filter(AtlasDelayAlarms.deviation >= deviation_gte) + if deviation_lte: + query = query.filter(AtlasDelayAlarms.deviation <= deviation_lte) + + total_count = query.count() + + if order_by and hasattr(AtlasDelayAlarms, order_by): + query = query.order_by(getattr(AtlasDelayAlarms, order_by)) + + offset = (page - 1) * page_size + results = query.offset(offset).limit(page_size).all() + + return results, total_count diff --git a/services/network_delay_service.py b/services/network_delay_service.py index 11e96e5..5119b0c 100644 --- a/services/network_delay_service.py +++ b/services/network_delay_service.py @@ -1,8 +1,10 @@ from sqlalchemy.orm import Session from repositories.atlas_location_repository import AtlasLocationRepository from repositories.atlas_delay_repository import AtlasDelayRepository +from repositories.atlas_delay_alarms_repository import AtlasDelayAlarmsRepository from dtos.network_delay_locations_dto import NetworkDelayLocationsDTO from dtos.network_delay_dto import NetworkDelayDTO +from dtos.network_delay_alarms_dto import NetworkDelayAlarmsDTO from typing import Optional, List, Tuple from datetime import datetime @@ -11,6 +13,7 @@ class NetworkDelayService: def __init__(self): self.atlas_location_repository = AtlasLocationRepository() self.atlas_delay_repository = AtlasDelayRepository() + self.atlas_delay_alarms_repository = AtlasDelayAlarmsRepository() def get_network_delay_locations( self, @@ -83,3 +86,46 @@ def get_network_delays( ) return [NetworkDelayDTO.from_model(atlasDelay) for atlasDelay in atlasDelays], total_count + + def get_network_delay_alarms( + self, + db: Session, + timebin: Optional[datetime] = None, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + startpoint_names: Optional[str] = None, + endpoint_names: Optional[str] = None, + startpoint_type: Optional[str] = None, + endpoint_type: Optional[str] = None, + startpoint_af: Optional[int] = None, + endpoint_af: Optional[int] = None, + startpoint_key: Optional[str] = None, + endpoint_key: Optional[str] = None, + deviation_gte: Optional[float] = None, + deviation_lte: Optional[float] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[NetworkDelayAlarmsDTO], int]: + """ + Get network delay alarms with all possible filters. + """ + alarms, total_count = self.atlas_delay_alarms_repository.get_alarms( + db, + timebin=timebin, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + startpoint_names=startpoint_names, + endpoint_names=endpoint_names, + startpoint_type=startpoint_type, + endpoint_type=endpoint_type, + startpoint_af=startpoint_af, + endpoint_af=endpoint_af, + startpoint_key=startpoint_key, + endpoint_key=endpoint_key, + deviation_gte=deviation_gte, + deviation_lte=deviation_lte, + page=page, + order_by=order_by + ) + + return [NetworkDelayAlarmsDTO.from_model(alarm) for alarm in alarms], total_count From 23949e1707c48882244c0825bbcb53dc9079f1d6 Mon Sep 17 00:00:00 2001 From: ibraam Date: Wed, 9 Jul 2025 22:06:42 +0300 Subject: [PATCH 26/43] Change _lte and _gte in controller to __lte and __gte --- controllers/hegemony_cone_controller.py | 10 +++--- controllers/link_controller.py | 24 +++++++------- controllers/metis_controller.py | 20 ++++++------ controllers/network_delay_controller.py | 42 ++++++++++++------------- controllers/networks_controller.py | 8 ++--- 5 files changed, 52 insertions(+), 52 deletions(-) diff --git a/controllers/hegemony_cone_controller.py b/controllers/hegemony_cone_controller.py index 27be814..7a7b316 100644 --- a/controllers/hegemony_cone_controller.py +++ b/controllers/hegemony_cone_controller.py @@ -22,9 +22,9 @@ async def get_hegemony_cones( db: Session = Depends(get_db), timebin: Optional[datetime] = Query( None, description="Get results for exact timestamp"), - timebin_gte: Optional[datetime] = Query( + timebin__gte: Optional[datetime] = Query( None, description="Get results after or equal to this timestamp"), - timebin_lte: Optional[datetime] = Query( + timebin__lte: Optional[datetime] = Query( None, description="Get results before or equal to this timestamp"), asn: Optional[str] = Query( None, description="Autonomous System Number (ASN). Can be a single value or a list of comma separated values."), @@ -43,15 +43,15 @@ async def get_hegemony_cones( networks). """ - timebin_gte, timebin_lte = validate_timebin_params(timebin, timebin_gte, timebin_lte) + timebin__gte, timebin__lte = validate_timebin_params(timebin, timebin__gte, timebin__lte) # Convert comma-separated ASNs to list asn_list = [int(x.strip()) for x in asn.split(",")] if asn else None cones, total_count = HegemonyConeController.service.get_hegemony_cones( db, - timebin_gte=timebin_gte, - timebin_lte=timebin_lte, + timebin_gte=timebin__gte, + timebin_lte=timebin__lte, asn_ids=asn_list, af=af, page=page, diff --git a/controllers/link_controller.py b/controllers/link_controller.py index cb0ab4f..9aaceb0 100644 --- a/controllers/link_controller.py +++ b/controllers/link_controller.py @@ -23,9 +23,9 @@ async def get_link_delays( db: Session = Depends(get_db), timebin: Optional[datetime] = Query( None, description="Timestamp of reported value."), - timebin_gte: Optional[datetime] = Query( + timebin__gte: Optional[datetime] = Query( None, description="Timestamp of reported value."), - timebin_lte: Optional[datetime] = Query( + timebin__lte: Optional[datetime] = Query( None, description="Timestamp of reported value."), asn: Optional[str] = Query( None, description="ASN or IXP ID of the monitored network (see number in /network/). Can be a single value or a list of comma separated values."), @@ -44,16 +44,16 @@ async def get_link_delays(
  • Limitations: At most 7 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
  • """ - timebin_gte, timebin_lte = validate_timebin_params( - timebin, timebin_gte, timebin_lte) + timebin__gte, timebin__lte = validate_timebin_params( + timebin, timebin__gte, timebin__lte) # Convert comma-separated ASNs to list asn_list = [int(x.strip()) for x in asn.split(",")] if asn else None delays, total_count = LinkController.service.get_link_delays( db, - timebin_gte=timebin_gte, - timebin_lte=timebin_lte, + timebin_gte=timebin__gte, + timebin_lte=timebin__lte, asn_ids=asn_list, magnitude=magnitude, page=page, @@ -78,9 +78,9 @@ async def get_link_forwardings( db: Session = Depends(get_db), timebin: Optional[datetime] = Query( None, description="Timestamp of reported value."), - timebin_gte: Optional[datetime] = Query( + timebin__gte: Optional[datetime] = Query( None, description="Timestamp of reported value."), - timebin_lte: Optional[datetime] = Query( + timebin__lte: Optional[datetime] = Query( None, description="Timestamp of reported value."), asn: Optional[str] = Query( None, description="ASN or IXP ID of the monitored network (see number in /network/). Can be a single value or a list of comma separated values."), @@ -102,16 +102,16 @@ async def get_link_forwardings(
  • Limitations: At most 7 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
  • """ - timebin_gte, timebin_lte = validate_timebin_params( - timebin, timebin_gte, timebin_lte) + timebin__gte, timebin__lte = validate_timebin_params( + timebin, timebin__gte, timebin__lte) # Convert comma-separated ASNs to list asn_list = [int(x.strip()) for x in asn.split(",")] if asn else None forwardings, total_count = LinkController.service.get_link_forwardings( db, - timebin_gte=timebin_gte, - timebin_lte=timebin_lte, + timebin_gte=timebin__gte, + timebin_lte=timebin__lte, asn_ids=asn_list, magnitude=magnitude, page=page, diff --git a/controllers/metis_controller.py b/controllers/metis_controller.py index e6ab756..edba5fc 100644 --- a/controllers/metis_controller.py +++ b/controllers/metis_controller.py @@ -22,15 +22,15 @@ async def get_metis_atlas_deployments( db: Session = Depends(get_db), timebin: Optional[datetime] = Query( None, description="Time when the ranking is computed. The ranking uses 24 weeks of data, hence 2022-05-23T00:00 means the ranking using data from 2021-12-06T00:00 to 2022-05-23T00:00."), - timebin_gte: Optional[datetime] = Query( + timebin__gte: Optional[datetime] = Query( None, description="Time when the ranking is computed. The ranking uses 24 weeks of data, hence 2022-05-23T00:00 means the ranking using data from 2021-12-06T00:00 to 2022-05-23T00:00."), - timebin_lte: Optional[datetime] = Query( + timebin__lte: Optional[datetime] = Query( None, description="Time when the ranking is computed. The ranking uses 24 weeks of data, hence 2022-05-23T00:00 means the ranking using data from 2021-12-06T00:00 to 2022-05-23T00:00."), rank: Optional[int] = Query( None, description="Selecting all ASes with rank less than equal to 10 (i.e. rank__lte=10), gives the 10 most diverse ASes in terms of the selected metric."), - rank_lte: Optional[int] = Query( + rank__lte: Optional[int] = Query( None, description="Selecting all ASes with rank less than equal to 10 (i.e. rank__lte=10), gives the 10 most diverse ASes in terms of the selected metric."), - rank_gte: Optional[int] = Query( + rank__gte: Optional[int] = Query( None, description="Selecting all ASes with rank less than equal to 10 (i.e. rank__lte=10), gives the 10 most diverse ASes in terms of the selected metric."), metric: Optional[str] = Query( None, description="Distance metric used to compute diversity, possible values are: 'as_path_length', 'ip_hops', 'rtt'"), @@ -47,17 +47,17 @@ async def get_metis_atlas_deployments(
  • Limitations: At most 31 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
  • """ - timebin_gte, timebin_lte = prepare_timebin_range( - timebin, timebin_gte, timebin_lte, max_days=31) + timebin__gte, timebin__lte = prepare_timebin_range( + timebin, timebin__gte, timebin__lte, max_days=31) deployments, total_count = MetisController.service.get_metis_atlas_deployments( db, timebin=timebin, - timebin_gte=timebin_gte, - timebin_lte=timebin_lte, + timebin_gte=timebin__gte, + timebin_lte=timebin__lte, rank=rank, - rank_lte=rank_lte, - rank_gte=rank_gte, + rank_lte=rank__lte, + rank_gte=rank__gte, metric=metric, af=af, page=page, diff --git a/controllers/network_delay_controller.py b/controllers/network_delay_controller.py index ad2e359..cd25572 100644 --- a/controllers/network_delay_controller.py +++ b/controllers/network_delay_controller.py @@ -68,11 +68,11 @@ async def get_network_delays( None, description="Timestamp of reported value." ), - timebin_gte: Optional[datetime] = Query( + timebin__gte: Optional[datetime] = Query( None, description="Timestamp of reported value." ), - timebin_lte: Optional[datetime] = Query( + timebin__lte: Optional[datetime] = Query( None, description="Timestamp of reported value." ), @@ -108,9 +108,9 @@ async def get_network_delays( None, description="List of ending location key, separated by the pip character (i.e. | ). A location key is a concatenation of a type, af, and name. For example, CT4New York City, New York, US|AS4174 (yes, the last key corresponds to AS174!)." ), - median_gte: Optional[float] = Query( + median__gte: Optional[float] = Query( None, description="Estimated median RTT. RTT values are directly extracted from traceroute (a.k.a. realrtts) and estimated via differential RTTs."), - median_lte: Optional[float] = Query( + median__lte: Optional[float] = Query( None, description="Estimated median RTT. RTT values are directly extracted from traceroute (a.k.a. realrtts) and estimated via differential RTTs."), median: Optional[float] = Query( None, description="Estimated median RTT. RTT values are directly extracted from traceroute (a.k.a. realrtts) and estimated via differential RTTs."), @@ -126,13 +126,13 @@ async def get_network_delays(
  • Limitations: At most 7 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
  • """ - timebin_gte, timebin_lte = validate_timebin_params( - timebin, timebin_gte, timebin_lte) + timebin__gte, timebin__lte = validate_timebin_params( + timebin, timebin__gte, timebin__lte) delays, total_count = NetworkDelayController.service.get_network_delays( db, timebin=timebin, - timebin_gte=timebin_gte, - timebin_lte=timebin_lte, + timebin_gte=timebin__gte, + timebin_lte=timebin__lte, startpoint_names=startpoint_name, endpoint_names=endpoint_name, startpoint_type=startpoint_type, @@ -140,8 +140,8 @@ async def get_network_delays( startpoint_af=startpoint_af, endpoint_af=endpoint_af, median=median, - median_gte=median_gte, - median_lte=median_lte, + median_gte=median__gte, + median_lte=median__lte, startpoint_key=startpoint_key, endpoint_key=endpoint_key, page=page, @@ -167,11 +167,11 @@ async def get_network_delay_alarms( None, description="Timestamp of reported alarm." ), - timebin_gte: Optional[datetime] = Query( + timebin__gte: Optional[datetime] = Query( None, description="Timestamp of reported alarm." ), - timebin_lte: Optional[datetime] = Query( + timebin__lte: Optional[datetime] = Query( None, description="Timestamp of reported alarm." ), @@ -207,11 +207,11 @@ async def get_network_delay_alarms( None, description="List of ending location key, separated by the pip character (i.e. | ). A location key is a concatenation of a type, af, and name. For example, CT4New York City, New York, US|AS4174." ), - deviation_gte: Optional[float] = Query( + deviation__gte: Optional[float] = Query( None, description="Significance of the AS Hegemony change." ), - deviation_lte: Optional[float] = Query( + deviation__lte: Optional[float] = Query( None, description="Significance of the AS Hegemony change." ), @@ -227,18 +227,18 @@ async def get_network_delay_alarms( """ List significant network delay changes detected by IHR anomaly detector.
      -
    • Required parameters: timebin or a range of timebins (using the two parameters timebin_lte and timebin_gte).
    • +
    • Required parameters: timebin or a range of timebins (using the two parameters timebin__lte and timebin__gte).
    • Limitations: At most 7 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
    """ - timebin_gte, timebin_lte = validate_timebin_params( - timebin, timebin_gte, timebin_lte) + timebin__gte, timebin__lte = validate_timebin_params( + timebin, timebin__gte, timebin__lte) alarms, total_count = NetworkDelayController.service.get_network_delay_alarms( db, timebin=timebin, - timebin_gte=timebin_gte, - timebin_lte=timebin_lte, + timebin_gte=timebin__gte, + timebin_lte=timebin__lte, startpoint_names=startpoint_name, endpoint_names=endpoint_name, startpoint_type=startpoint_type, @@ -247,8 +247,8 @@ async def get_network_delay_alarms( endpoint_af=endpoint_af, startpoint_key=startpoint_key, endpoint_key=endpoint_key, - deviation_gte=deviation_gte, - deviation_lte=deviation_lte, + deviation_gte=deviation__gte, + deviation_lte=deviation__lte, page=page, order_by=ordering ) diff --git a/controllers/networks_controller.py b/controllers/networks_controller.py index 1e2570d..3c1200e 100644 --- a/controllers/networks_controller.py +++ b/controllers/networks_controller.py @@ -22,9 +22,9 @@ async def get_networks( None, description="Search for a substring in networks name"), number: Optional[str] = Query( None, description="Search by ASN or IXP ID. It can be either a single value (e.g. 2497) or a list of comma separated values (e.g. 2497,2500,2501)"), - number_gte: Optional[int] = Query( + number__gte: Optional[int] = Query( None, description="Autonomous System Number (ASN) or IXP ID. Note that IXP ID are negative to avoid colision."), - number_lte: Optional[int] = Query( + number__lte: Optional[int] = Query( None, description="Autonomous System Number (ASN) or IXP ID. Note that IXP ID are negative to avoid colision."), search: Optional[str] = Query( None, description="Search for both ASN/IXPID and substring in names"), @@ -44,8 +44,8 @@ async def get_networks( db, name=name, numbers=number_list, - number_gte=number_gte, - number_lte=number_lte, + number_gte=number__gte, + number_lte=number__lte, search=search, page=page, order_by=ordering From 9bf2ffc18492e38fd87b124d5c9ca0ff9e46894e Mon Sep 17 00:00:00 2001 From: ibraam Date: Wed, 9 Jul 2025 23:20:39 +0300 Subject: [PATCH 27/43] Added /metis/atlas/selection endpoint --- controllers/metis_controller.py | 61 +++++++++++++++++++ dtos/metis_atlas_selection_dto.py | 14 +++++ models/metis_atlas_selection.py | 7 ++- .../metis_atlas_selection_repository.py | 54 ++++++++++++++++ services/metis_service.py | 47 +++++++++++++- 5 files changed, 180 insertions(+), 3 deletions(-) create mode 100644 dtos/metis_atlas_selection_dto.py create mode 100644 repositories/metis_atlas_selection_repository.py diff --git a/controllers/metis_controller.py b/controllers/metis_controller.py index edba5fc..4a5365e 100644 --- a/controllers/metis_controller.py +++ b/controllers/metis_controller.py @@ -4,6 +4,7 @@ from services.metis_service import MetisService from dtos.generic_response_dto import GenericResponseDTO, build_url from dtos.metis_atlas_deployment_dto import MetisAtlasDeploymentDTO +from dtos.metis_atlas_selection_dto import MetisAtlasSelectionDTO from config.database import get_db from typing import Optional from globals import page_size @@ -74,3 +75,63 @@ async def get_metis_atlas_deployments( previous=build_url(request, prev_page), results=deployments ) + + @staticmethod + @router.get("/selection", response_model=GenericResponseDTO[MetisAtlasSelectionDTO]) + async def get_metis_atlas_selections( + request: Request, + db: Session = Depends(get_db), + timebin: Optional[datetime] = Query( + None, description="Time when the ranking is computed. The ranking uses four weeks of data, hence 2022-03-28T00:00 means the ranking using data from 2022-02-28T00:00 to 2022-03-28T00:00."), + timebin__gte: Optional[datetime] = Query( + None, description="Time when the ranking is computed. The ranking uses four weeks of data, hence 2022-03-28T00:00 means the ranking using data from 2022-02-28T00:00 to 2022-03-28T00:00."), + timebin__lte: Optional[datetime] = Query( + None, description="Time when the ranking is computed. The ranking uses four weeks of data, hence 2022-03-28T00:00 means the ranking using data from 2022-02-28T00:00 to 2022-03-28T00:00."), + rank: Optional[int] = Query( + None, description="Selecting all ASes with rank less than equal to 10 (i.e. rank__lte=10), gives the 10 most diverse ASes in terms of the selected metric."), + rank__lte: Optional[int] = Query( + None, description="Selecting all ASes with rank less than equal to 10 (i.e. rank__lte=10), gives the 10 most diverse ASes in terms of the selected metric."), + rank__gte: Optional[int] = Query( + None, description="Selecting all ASes with rank less than equal to 10 (i.e. rank__lte=10), gives the 10 most diverse ASes in terms of the selected metric."), + metric: Optional[str] = Query( + None, description="Distance metric used to compute diversity, possible values are: 'as_path_length', 'ip_hops', 'rtt'"), + af: Optional[int] = Query( + None, description="Address Family (IP version), values are either 4 or 6"), + page: Optional[int] = Query( + 1, ge=1, description="A page number within the paginated result set"), + ordering: Optional[str] = Query( + None, description="Which field to use when ordering the results.") + ) -> GenericResponseDTO[MetisAtlasSelectionDTO]: + """ + Metis helps to select a set of diverse Atlas probes in terms of different topological metrics. (e.g. AS path, RTT) + + """ + timebin__gte, timebin__lte = prepare_timebin_range( + timebin, timebin__gte, timebin__lte, max_days=31) + + selections, total_count = MetisController.service.get_metis_atlas_selections( + db, + timebin=timebin, + timebin_gte=timebin__gte, + timebin_lte=timebin__lte, + rank=rank, + rank_lte=rank__lte, + rank_gte=rank__gte, + metric=metric, + af=af, + page=page, + order_by=ordering + ) + + # Calculate pagination + next_page = page + 1 if (page * page_size) < total_count else None + prev_page = page - 1 if page > 1 else None + + return GenericResponseDTO( + count=total_count, + next=build_url(request, next_page), + previous=build_url(request, prev_page), + results=selections + ) diff --git a/dtos/metis_atlas_selection_dto.py b/dtos/metis_atlas_selection_dto.py new file mode 100644 index 0000000..4b582de --- /dev/null +++ b/dtos/metis_atlas_selection_dto.py @@ -0,0 +1,14 @@ +from pydantic import BaseModel +from datetime import datetime + + +class MetisAtlasSelectionDTO(BaseModel): + timebin: datetime + metric: str + rank: int + asn: int + af: int + asn_name: str + + class Config: + from_attributes = True diff --git a/models/metis_atlas_selection.py b/models/metis_atlas_selection.py index 5ed105f..a75c21d 100644 --- a/models/metis_atlas_selection.py +++ b/models/metis_atlas_selection.py @@ -40,7 +40,12 @@ class MetisAtlasSelection(Base): mean = Column(Float, default=0.0, nullable=False, doc='The mean distance value (e.g., AS-path length) we get when using all ASes up to this rank. This decreases with increasing rank, since lower ranks represent closer ASes.') - asn_id = Column(BigInteger, + asn = Column('asn_id',BigInteger, nullable=False, doc="Atlas probes' Autonomous System Number.") + + asn_relation = relationship('ASN', + primaryjoin='MetisAtlasSelection.asn == ASN.number', + foreign_keys=[asn], + backref='metis_selections') diff --git a/repositories/metis_atlas_selection_repository.py b/repositories/metis_atlas_selection_repository.py new file mode 100644 index 0000000..53cc5cc --- /dev/null +++ b/repositories/metis_atlas_selection_repository.py @@ -0,0 +1,54 @@ +from datetime import datetime +from sqlalchemy.orm import Session +from models.metis_atlas_selection import MetisAtlasSelection +from typing import Optional, List, Tuple +from globals import page_size + + +class MetisAtlasSelectionRepository: + def get_all( + self, + db: Session, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + timebin: Optional[datetime] = None, + rank: Optional[int] = None, + rank_lte: Optional[int] = None, + rank_gte: Optional[int] = None, + metric: Optional[str] = None, + af: Optional[int] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[MetisAtlasSelection], int]: + query = db.query(MetisAtlasSelection).join( + MetisAtlasSelection.asn_relation) + + # Apply filters + if timebin: + query = query.filter(MetisAtlasSelection.timebin == timebin) + if timebin_gte: + query = query.filter(MetisAtlasSelection.timebin >= timebin_gte) + if timebin_lte: + query = query.filter(MetisAtlasSelection.timebin <= timebin_lte) + if rank: + query = query.filter(MetisAtlasSelection.rank == rank) + if rank_lte: + query = query.filter(MetisAtlasSelection.rank <= rank_lte) + if rank_gte: + query = query.filter(MetisAtlasSelection.rank >= rank_gte) + if metric: + query = query.filter(MetisAtlasSelection.metric == metric) + if af: + query = query.filter(MetisAtlasSelection.af == af) + + total_count = query.count() + + # Apply ordering + if order_by and hasattr(MetisAtlasSelection, order_by): + query = query.order_by(getattr(MetisAtlasSelection, order_by)) + + # Apply pagination + offset = (page - 1) * page_size + results = query.offset(offset).limit(page_size).all() + + return results, total_count diff --git a/services/metis_service.py b/services/metis_service.py index 57476f3..e35271c 100644 --- a/services/metis_service.py +++ b/services/metis_service.py @@ -1,13 +1,16 @@ from sqlalchemy.orm import Session from repositories.metis_atlas_deployment_repository import MetisAtlasDeploymentRepository +from repositories.metis_atlas_selection_repository import MetisAtlasSelectionRepository from dtos.metis_atlas_deployment_dto import MetisAtlasDeploymentDTO +from dtos.metis_atlas_selection_dto import MetisAtlasSelectionDTO from typing import Optional, List, Tuple from datetime import datetime class MetisService: def __init__(self): - self.metis_repository = MetisAtlasDeploymentRepository() + self.metis_atlas_deployment_repository = MetisAtlasDeploymentRepository() + self.metis_atlas_selection_repository = MetisAtlasSelectionRepository() def get_metis_atlas_deployments( self, @@ -26,7 +29,7 @@ def get_metis_atlas_deployments( """ Get Metis Atlas deployment data with filtering. """ - deployments, total_count = self.metis_repository.get_all( + deployments, total_count = self.metis_atlas_deployment_repository.get_all( db, timebin=timebin, timebin_gte=timebin_gte, @@ -49,3 +52,43 @@ def get_metis_atlas_deployments( nbsamples=deployment.nbsamples, asn_name=deployment.asn_relation.name if deployment.asn_relation else None ) for deployment in deployments], total_count + + def get_metis_atlas_selections( + self, + db: Session, + timebin: Optional[datetime] = None, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + rank: Optional[int] = None, + rank_lte: Optional[int] = None, + rank_gte: Optional[int] = None, + metric: Optional[str] = None, + af: Optional[int] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[MetisAtlasSelectionDTO], int]: + """ + Get Metis Atlas selection data with filtering. + """ + selections, total_count = self.metis_atlas_selection_repository.get_all( + db, + timebin=timebin, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + rank=rank, + rank_lte=rank_lte, + rank_gte=rank_gte, + metric=metric, + af=af, + page=page, + order_by=order_by + ) + + return [MetisAtlasSelectionDTO( + timebin=selection.timebin, + metric=selection.metric, + rank=selection.rank, + asn=selection.asn, + af=selection.af, + asn_name=selection.asn_relation.name if selection.asn_relation else None + ) for selection in selections], total_count From 74f8bea89694890f1a08d6f4c91359cfbcfe95aa Mon Sep 17 00:00:00 2001 From: ibraam Date: Mon, 14 Jul 2025 13:37:15 +0300 Subject: [PATCH 28/43] Added link/delay/alarms endpoint --- controllers/link_controller.py | 88 +++++++++++++++++++++++++ dtos/link_delay_alarms_dto.py | 18 +++++ models/delay_alarms.py | 7 +- repositories/delay_alarms_repository.py | 74 +++++++++++++++++++++ services/link_service.py | 56 ++++++++++++++++ 5 files changed, 241 insertions(+), 2 deletions(-) create mode 100644 dtos/link_delay_alarms_dto.py create mode 100644 repositories/delay_alarms_repository.py diff --git a/controllers/link_controller.py b/controllers/link_controller.py index 9aaceb0..c4ca388 100644 --- a/controllers/link_controller.py +++ b/controllers/link_controller.py @@ -5,6 +5,7 @@ from dtos.generic_response_dto import GenericResponseDTO, build_url from dtos.link_delay_dto import LinkDelayDTO from dtos.link_forwarding_dto import LinkForwardingDTO +from dtos.link_delay_alarms_dto import LinkDelayAlarmsDTO from config.database import get_db from typing import Optional from globals import page_size @@ -128,3 +129,90 @@ async def get_link_forwardings( previous=build_url(request, prev_page), results=forwardings ) + + @staticmethod + @router.get("/delay/alarms", response_model=GenericResponseDTO[LinkDelayAlarmsDTO]) + async def get_link_delay_alarms( + request: Request, + db: Session = Depends(get_db), + timebin: Optional[datetime] = Query( + None, description="Timestamp of reported alarm."), + timebin__gte: Optional[datetime] = Query( + None, description="Timestamp of reported alarm."), + timebin__lte: Optional[datetime] = Query( + None, description="Timestamp of reported alarm."), + asn: Optional[str] = Query( + None, description="ASN or IXP ID of the monitored network (see number in /network/). Can be a single value or a list of comma separated values."), + deviation: Optional[float] = Query( + None, description="Distance between observed delays and the past usual values normalized by median absolute deviation."), + deviation__gte: Optional[float] = Query( + None, description="Distance between observed delays and the past usual values normalized by median absolute deviation."), + deviation__lte: Optional[float] = Query( + None, description="Distance between observed delays and the past usual values normalized by median absolute deviation."), + diffmedian: Optional[float] = Query( + None, description="Difference between the link usual median RTT and the median RTT observed during the alarm."), + diffmedian__gte: Optional[float] = Query( + None, description="Difference between the link usual median RTT and the median RTT observed during the alarm."), + diffmedian__lte: Optional[float] = Query( + None, description="Difference between the link usual median RTT and the median RTT observed during the alarm."), + medianrtt: Optional[float] = Query( + None, description="Median differential RTT observed during the alarm."), + medianrtt__gte: Optional[float] = Query( + None, description="Median differential RTT observed during the alarm."), + medianrtt__lte: Optional[float] = Query( + None, description="Median differential RTT observed during the alarm."), + nbprobes: Optional[int] = Query( + None, description="Number of Atlas probes monitoring this link at the reported time window."), + nbprobes__gte: Optional[int] = Query( + None, description="Number of Atlas probes monitoring this link at the reported time window."), + nbprobes__lte: Optional[int] = Query( + None, description="Number of Atlas probes monitoring this link at the reported time window."), + link: Optional[str] = Query(None, description="Pair of IP addresses corresponding to the reported link."), + link__contains: Optional[str] = Query( + None, description="Pair of IP addresses corresponding to the reported link."), + page: Optional[int] = Query( + 1, ge=1, description="A page number within the paginated result set"), + ordering: Optional[str] = Query( + None, description="Which field to use when ordering the results.") + ) -> GenericResponseDTO[LinkDelayAlarmsDTO]: + """ + List detected link delay changes. +
      +
    • Required parameters: timebin or a range of timebins (using the two parameters timebin__lte and timebin__gte).
    • +
    • Limitations: At most 7 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
    • +
    + """ + timebin__gte, timebin__lte = validate_timebin_params( + timebin, timebin__gte, timebin__lte) + + # Convert comma-separated ASNs to list + asn_list = [int(x.strip()) for x in asn.split(",")] if asn else None + + alarms, total_count = LinkController.service.get_link_delay_alarms( + db, + timebin_gte=timebin__gte, + timebin_lte=timebin__lte, + asn_ids=asn_list, + deviation_gte=deviation__gte, + deviation_lte=deviation__lte, + diffmedian_gte=diffmedian__gte, + diffmedian_lte=diffmedian__lte, + medianrtt_gte=medianrtt__gte, + medianrtt_lte=medianrtt__lte, + nbprobes_gte=nbprobes__gte, + nbprobes_lte=nbprobes__lte, + link=link, + link_contains=link__contains, + page=page, + order_by=ordering + ) + + next_page = page + 1 if (page * page_size) < total_count else None + prev_page = page - 1 if page > 1 else None + + return GenericResponseDTO( + count=total_count, + next=build_url(request, next_page), + previous=build_url(request, prev_page), + results=alarms + ) diff --git a/dtos/link_delay_alarms_dto.py b/dtos/link_delay_alarms_dto.py new file mode 100644 index 0000000..bd0b771 --- /dev/null +++ b/dtos/link_delay_alarms_dto.py @@ -0,0 +1,18 @@ +from pydantic import BaseModel +from datetime import datetime +from typing import Optional, Dict, Any + + +class LinkDelayAlarmsDTO(BaseModel): + timebin: datetime + asn: int + asn_name: Optional[str] + link: str + medianrtt: float + diffmedian: float + deviation: float + nbprobes: int + msm_prb_ids: Optional[Dict[str, Any]] + + class Config: + from_attributes = True diff --git a/models/delay_alarms.py b/models/delay_alarms.py index 8b6e0a8..a3f7266 100644 --- a/models/delay_alarms.py +++ b/models/delay_alarms.py @@ -10,7 +10,6 @@ class DelayAlarms(Base): __tablename__ = 'ihr_delay_alarms' - __table_args__ = ( PrimaryKeyConstraint('id', 'timebin'), ) @@ -72,9 +71,13 @@ class DelayAlarms(Base): default=None, doc='List of Atlas measurement IDs and probe IDs used to compute this alarm.' ) - asn_id = Column( + asn = Column( + "asn_id", BigInteger, nullable=False, doc='ASN or IXPID of the reported network.' ) + asn_relation = relationship('ASN', + primaryjoin='DelayAlarms.asn == ASN.number', + foreign_keys=[asn]) diff --git a/repositories/delay_alarms_repository.py b/repositories/delay_alarms_repository.py new file mode 100644 index 0000000..9eea97b --- /dev/null +++ b/repositories/delay_alarms_repository.py @@ -0,0 +1,74 @@ +from datetime import datetime +from sqlalchemy.orm import Session +from models.delay_alarms import DelayAlarms +from typing import Optional, List, Tuple +from globals import page_size + + +class DelayAlarmsRepository: + def get_all( + self, + db: Session, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + asn_ids: Optional[List[int]] = None, + deviation_gte: Optional[float] = None, + deviation_lte: Optional[float] = None, + diffmedian_gte: Optional[float] = None, + diffmedian_lte: Optional[float] = None, + medianrtt_gte: Optional[float] = None, + medianrtt_lte: Optional[float] = None, + nbprobes_gte: Optional[int] = None, + nbprobes_lte: Optional[int] = None, + link: Optional[str] = None, + link_contains: Optional[str] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[DelayAlarms], int]: + query = db.query(DelayAlarms).join(DelayAlarms.asn_relation) + + # Apply filters + if timebin_gte: + query = query.filter(DelayAlarms.timebin >= timebin_gte) + if timebin_lte: + query = query.filter(DelayAlarms.timebin <= timebin_lte) + if asn_ids: + query = query.filter(DelayAlarms.asn.in_(asn_ids)) + if deviation_gte: + query = query.filter(DelayAlarms.deviation >= deviation_gte) + if deviation_lte: + query = query.filter(DelayAlarms.deviation <= deviation_lte) + if diffmedian_gte: + query = query.filter(DelayAlarms.diffmedian >= diffmedian_gte) + if diffmedian_lte: + query = query.filter(DelayAlarms.diffmedian <= diffmedian_lte) + if medianrtt_gte: + query = query.filter(DelayAlarms.medianrtt >= medianrtt_gte) + if medianrtt_lte: + query = query.filter(DelayAlarms.medianrtt <= medianrtt_lte) + if nbprobes_gte: + query = query.filter(DelayAlarms.nbprobes >= nbprobes_gte) + if nbprobes_lte: + query = query.filter(DelayAlarms.nbprobes <= nbprobes_lte) + if link: + query = query.filter(DelayAlarms.link == link) + if link_contains: + query = query.filter(DelayAlarms.link.contains(link_contains)) + + total_count = query.count() + + # Apply ordering + if order_by and hasattr(DelayAlarms, order_by.replace('-', '')): + if order_by.startswith('-'): + query = query.order_by( + getattr(DelayAlarms, order_by[1:]).desc()) + else: + query = query.order_by(getattr(DelayAlarms, order_by)) + else: + query = query.order_by(DelayAlarms.timebin.desc()) + + # Apply pagination + offset = (page - 1) * page_size + results = query.offset(offset).limit(page_size).all() + + return results, total_count diff --git a/services/link_service.py b/services/link_service.py index b379660..1f7da3d 100644 --- a/services/link_service.py +++ b/services/link_service.py @@ -5,12 +5,15 @@ from dtos.link_forwarding_dto import LinkForwardingDTO from typing import Optional, List, Tuple from datetime import datetime +from repositories.delay_alarms_repository import DelayAlarmsRepository +from dtos.link_delay_alarms_dto import LinkDelayAlarmsDTO class LinkService: def __init__(self): self.delay_repository = DelayRepository() self.forwarding_repository = ForwardingRepository() + self.delay_alarms_repository = DelayAlarmsRepository() def get_link_delays( self, @@ -71,3 +74,56 @@ def get_link_forwardings( magnitude=forwarding.magnitude, asn_name=forwarding.asn_relation.name if forwarding.asn_relation else None ) for forwarding in forwardings], total_count + + def get_link_delay_alarms( + self, + db: Session, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + asn_ids: Optional[List[int]] = None, + deviation_gte: Optional[float] = None, + deviation_lte: Optional[float] = None, + diffmedian_gte: Optional[float] = None, + diffmedian_lte: Optional[float] = None, + medianrtt_gte: Optional[float] = None, + medianrtt_lte: Optional[float] = None, + nbprobes_gte: Optional[int] = None, + nbprobes_lte: Optional[int] = None, + link: Optional[str] = None, + link_contains: Optional[str] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[LinkDelayAlarmsDTO], int]: + """ + Get link delay alarms data with filtering. + """ + alarms, total_count = self.delay_alarms_repository.get_all( + db, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + asn_ids=asn_ids, + deviation_gte=deviation_gte, + deviation_lte=deviation_lte, + diffmedian_gte=diffmedian_gte, + diffmedian_lte=diffmedian_lte, + medianrtt_gte=medianrtt_gte, + medianrtt_lte=medianrtt_lte, + nbprobes_gte=nbprobes_gte, + nbprobes_lte=nbprobes_lte, + link=link, + link_contains=link_contains, + page=page, + order_by=order_by + ) + + return [LinkDelayAlarmsDTO( + timebin=alarm.timebin, + asn=alarm.asn, + asn_name=alarm.asn_relation.name if alarm.asn_relation else None, + link=alarm.link, + medianrtt=alarm.medianrtt, + diffmedian=alarm.diffmedian, + deviation=alarm.deviation, + nbprobes=alarm.nbprobes, + msm_prb_ids=alarm.msm_prb_ids + ) for alarm in alarms], total_count From 33a6a4cacff5666b7f564709a6939174e064b5a4 Mon Sep 17 00:00:00 2001 From: ibraam Date: Mon, 14 Jul 2025 13:45:10 +0300 Subject: [PATCH 29/43] Removed link endpoints --- controllers/link_controller.py | 218 ------------------------ dtos/link_delay_alarms_dto.py | 18 -- dtos/link_delay_dto.py | 12 -- dtos/link_forwarding_dto.py | 12 -- repositories/delay_alarms_repository.py | 74 -------- repositories/delay_repository.py | 44 ----- repositories/forwarding_repository.py | 43 ----- services/link_service.py | 129 -------------- 8 files changed, 550 deletions(-) delete mode 100644 controllers/link_controller.py delete mode 100644 dtos/link_delay_alarms_dto.py delete mode 100644 dtos/link_delay_dto.py delete mode 100644 dtos/link_forwarding_dto.py delete mode 100644 repositories/delay_alarms_repository.py delete mode 100644 repositories/delay_repository.py delete mode 100644 repositories/forwarding_repository.py delete mode 100644 services/link_service.py diff --git a/controllers/link_controller.py b/controllers/link_controller.py deleted file mode 100644 index c4ca388..0000000 --- a/controllers/link_controller.py +++ /dev/null @@ -1,218 +0,0 @@ -from fastapi import APIRouter, Depends, Query, Request, HTTPException -from datetime import datetime -from sqlalchemy.orm import Session -from services.link_service import LinkService -from dtos.generic_response_dto import GenericResponseDTO, build_url -from dtos.link_delay_dto import LinkDelayDTO -from dtos.link_forwarding_dto import LinkForwardingDTO -from dtos.link_delay_alarms_dto import LinkDelayAlarmsDTO -from config.database import get_db -from typing import Optional -from globals import page_size -from utils import validate_timebin_params - -router = APIRouter(prefix="/link", tags=["Link"]) - - -class LinkController: - service = LinkService() - - @staticmethod - @router.get("/delay", response_model=GenericResponseDTO[LinkDelayDTO]) - async def get_link_delays( - request: Request, - db: Session = Depends(get_db), - timebin: Optional[datetime] = Query( - None, description="Timestamp of reported value."), - timebin__gte: Optional[datetime] = Query( - None, description="Timestamp of reported value."), - timebin__lte: Optional[datetime] = Query( - None, description="Timestamp of reported value."), - asn: Optional[str] = Query( - None, description="ASN or IXP ID of the monitored network (see number in /network/). Can be a single value or a list of comma separated values."), - magnitude: Optional[float] = Query( - None, description="Cumulated link delay deviation. Values close to zero represent usual delays for the network, whereas higher values stand for significant links congestion in the monitored network."), - page: Optional[int] = Query( - 1, ge=1, description="A page number within the paginated result set"), - ordering: Optional[str] = Query( - None, description="Which field to use when ordering the results.") - ) -> GenericResponseDTO[LinkDelayDTO]: - """ - List cumulated link delay changes (magnitude) for each monitored network. Magnitude values close to zero represent usual delays for the network, whereas higher values stand for significant links congestion in the monitored network. - The details of each congested link is available in /delay/alarms/. -
      -
    • Required parameters: timebin or a range of timebins (using the two parameters timebin__lte and timebin__gte).
    • -
    • Limitations: At most 7 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
    • -
    - """ - timebin__gte, timebin__lte = validate_timebin_params( - timebin, timebin__gte, timebin__lte) - - # Convert comma-separated ASNs to list - asn_list = [int(x.strip()) for x in asn.split(",")] if asn else None - - delays, total_count = LinkController.service.get_link_delays( - db, - timebin_gte=timebin__gte, - timebin_lte=timebin__lte, - asn_ids=asn_list, - magnitude=magnitude, - page=page, - order_by=ordering - ) - - # Calculate pagination - next_page = page + 1 if (page * page_size) < total_count else None - prev_page = page - 1 if page > 1 else None - - return GenericResponseDTO( - count=total_count, - next=build_url(request, next_page), - previous=build_url(request, prev_page), - results=delays - ) - - @staticmethod - @router.get("/forwarding", response_model=GenericResponseDTO[LinkForwardingDTO]) - async def get_link_forwardings( - request: Request, - db: Session = Depends(get_db), - timebin: Optional[datetime] = Query( - None, description="Timestamp of reported value."), - timebin__gte: Optional[datetime] = Query( - None, description="Timestamp of reported value."), - timebin__lte: Optional[datetime] = Query( - None, description="Timestamp of reported value."), - asn: Optional[str] = Query( - None, description="ASN or IXP ID of the monitored network (see number in /network/). Can be a single value or a list of comma separated values."), - magnitude: Optional[float] = Query( - None, description="Cumulated forwarding anomaly deviation for each monitored network. Values close to zero represent usual forwarding paths for the network, whereas higher positive (resp. negative) values stand for an increasing (resp. decreasing) number of paths passing through the monitored network."), - page: Optional[int] = Query( - 1, ge=1, description="A page number within the paginated result set"), - ordering: Optional[str] = Query( - None, description="Which field to use when ordering the results.") - ) -> GenericResponseDTO[LinkForwardingDTO]: - """ - List cumulated forwarding anomaly deviation (magnitude) for each monitored network. - Magnitude values close to zero represent usual forwarding paths for the network, whereas - higher positive (resp. negative) values stand for an increasing (resp. decreasing) - number of paths passing through the monitored network. - The details of each forwarding anomaly is available in /forwarding/alarms/. -
      -
    • Required parameters: timebin or a range of timebins (using the two parameters timebin__lte and timebin__gte).
    • -
    • Limitations: At most 7 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
    • -
    - """ - timebin__gte, timebin__lte = validate_timebin_params( - timebin, timebin__gte, timebin__lte) - - # Convert comma-separated ASNs to list - asn_list = [int(x.strip()) for x in asn.split(",")] if asn else None - - forwardings, total_count = LinkController.service.get_link_forwardings( - db, - timebin_gte=timebin__gte, - timebin_lte=timebin__lte, - asn_ids=asn_list, - magnitude=magnitude, - page=page, - order_by=ordering - ) - - # Calculate pagination - next_page = page + 1 if (page * page_size) < total_count else None - prev_page = page - 1 if page > 1 else None - - return GenericResponseDTO( - count=total_count, - next=build_url(request, next_page), - previous=build_url(request, prev_page), - results=forwardings - ) - - @staticmethod - @router.get("/delay/alarms", response_model=GenericResponseDTO[LinkDelayAlarmsDTO]) - async def get_link_delay_alarms( - request: Request, - db: Session = Depends(get_db), - timebin: Optional[datetime] = Query( - None, description="Timestamp of reported alarm."), - timebin__gte: Optional[datetime] = Query( - None, description="Timestamp of reported alarm."), - timebin__lte: Optional[datetime] = Query( - None, description="Timestamp of reported alarm."), - asn: Optional[str] = Query( - None, description="ASN or IXP ID of the monitored network (see number in /network/). Can be a single value or a list of comma separated values."), - deviation: Optional[float] = Query( - None, description="Distance between observed delays and the past usual values normalized by median absolute deviation."), - deviation__gte: Optional[float] = Query( - None, description="Distance between observed delays and the past usual values normalized by median absolute deviation."), - deviation__lte: Optional[float] = Query( - None, description="Distance between observed delays and the past usual values normalized by median absolute deviation."), - diffmedian: Optional[float] = Query( - None, description="Difference between the link usual median RTT and the median RTT observed during the alarm."), - diffmedian__gte: Optional[float] = Query( - None, description="Difference between the link usual median RTT and the median RTT observed during the alarm."), - diffmedian__lte: Optional[float] = Query( - None, description="Difference between the link usual median RTT and the median RTT observed during the alarm."), - medianrtt: Optional[float] = Query( - None, description="Median differential RTT observed during the alarm."), - medianrtt__gte: Optional[float] = Query( - None, description="Median differential RTT observed during the alarm."), - medianrtt__lte: Optional[float] = Query( - None, description="Median differential RTT observed during the alarm."), - nbprobes: Optional[int] = Query( - None, description="Number of Atlas probes monitoring this link at the reported time window."), - nbprobes__gte: Optional[int] = Query( - None, description="Number of Atlas probes monitoring this link at the reported time window."), - nbprobes__lte: Optional[int] = Query( - None, description="Number of Atlas probes monitoring this link at the reported time window."), - link: Optional[str] = Query(None, description="Pair of IP addresses corresponding to the reported link."), - link__contains: Optional[str] = Query( - None, description="Pair of IP addresses corresponding to the reported link."), - page: Optional[int] = Query( - 1, ge=1, description="A page number within the paginated result set"), - ordering: Optional[str] = Query( - None, description="Which field to use when ordering the results.") - ) -> GenericResponseDTO[LinkDelayAlarmsDTO]: - """ - List detected link delay changes. -
      -
    • Required parameters: timebin or a range of timebins (using the two parameters timebin__lte and timebin__gte).
    • -
    • Limitations: At most 7 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
    • -
    - """ - timebin__gte, timebin__lte = validate_timebin_params( - timebin, timebin__gte, timebin__lte) - - # Convert comma-separated ASNs to list - asn_list = [int(x.strip()) for x in asn.split(",")] if asn else None - - alarms, total_count = LinkController.service.get_link_delay_alarms( - db, - timebin_gte=timebin__gte, - timebin_lte=timebin__lte, - asn_ids=asn_list, - deviation_gte=deviation__gte, - deviation_lte=deviation__lte, - diffmedian_gte=diffmedian__gte, - diffmedian_lte=diffmedian__lte, - medianrtt_gte=medianrtt__gte, - medianrtt_lte=medianrtt__lte, - nbprobes_gte=nbprobes__gte, - nbprobes_lte=nbprobes__lte, - link=link, - link_contains=link__contains, - page=page, - order_by=ordering - ) - - next_page = page + 1 if (page * page_size) < total_count else None - prev_page = page - 1 if page > 1 else None - - return GenericResponseDTO( - count=total_count, - next=build_url(request, next_page), - previous=build_url(request, prev_page), - results=alarms - ) diff --git a/dtos/link_delay_alarms_dto.py b/dtos/link_delay_alarms_dto.py deleted file mode 100644 index bd0b771..0000000 --- a/dtos/link_delay_alarms_dto.py +++ /dev/null @@ -1,18 +0,0 @@ -from pydantic import BaseModel -from datetime import datetime -from typing import Optional, Dict, Any - - -class LinkDelayAlarmsDTO(BaseModel): - timebin: datetime - asn: int - asn_name: Optional[str] - link: str - medianrtt: float - diffmedian: float - deviation: float - nbprobes: int - msm_prb_ids: Optional[Dict[str, Any]] - - class Config: - from_attributes = True diff --git a/dtos/link_delay_dto.py b/dtos/link_delay_dto.py deleted file mode 100644 index 7e38e48..0000000 --- a/dtos/link_delay_dto.py +++ /dev/null @@ -1,12 +0,0 @@ -from pydantic import BaseModel -from datetime import datetime - - -class LinkDelayDTO(BaseModel): - timebin: datetime - asn: int - magnitude: float - asn_name: str - - class Config: - from_attributes = True diff --git a/dtos/link_forwarding_dto.py b/dtos/link_forwarding_dto.py deleted file mode 100644 index 98453c6..0000000 --- a/dtos/link_forwarding_dto.py +++ /dev/null @@ -1,12 +0,0 @@ -from pydantic import BaseModel -from datetime import datetime - - -class LinkForwardingDTO(BaseModel): - timebin: datetime - asn: int - magnitude: float - asn_name: str - - class Config: - from_attributes = True diff --git a/repositories/delay_alarms_repository.py b/repositories/delay_alarms_repository.py deleted file mode 100644 index 9eea97b..0000000 --- a/repositories/delay_alarms_repository.py +++ /dev/null @@ -1,74 +0,0 @@ -from datetime import datetime -from sqlalchemy.orm import Session -from models.delay_alarms import DelayAlarms -from typing import Optional, List, Tuple -from globals import page_size - - -class DelayAlarmsRepository: - def get_all( - self, - db: Session, - timebin_gte: Optional[datetime] = None, - timebin_lte: Optional[datetime] = None, - asn_ids: Optional[List[int]] = None, - deviation_gte: Optional[float] = None, - deviation_lte: Optional[float] = None, - diffmedian_gte: Optional[float] = None, - diffmedian_lte: Optional[float] = None, - medianrtt_gte: Optional[float] = None, - medianrtt_lte: Optional[float] = None, - nbprobes_gte: Optional[int] = None, - nbprobes_lte: Optional[int] = None, - link: Optional[str] = None, - link_contains: Optional[str] = None, - page: int = 1, - order_by: Optional[str] = None - ) -> Tuple[List[DelayAlarms], int]: - query = db.query(DelayAlarms).join(DelayAlarms.asn_relation) - - # Apply filters - if timebin_gte: - query = query.filter(DelayAlarms.timebin >= timebin_gte) - if timebin_lte: - query = query.filter(DelayAlarms.timebin <= timebin_lte) - if asn_ids: - query = query.filter(DelayAlarms.asn.in_(asn_ids)) - if deviation_gte: - query = query.filter(DelayAlarms.deviation >= deviation_gte) - if deviation_lte: - query = query.filter(DelayAlarms.deviation <= deviation_lte) - if diffmedian_gte: - query = query.filter(DelayAlarms.diffmedian >= diffmedian_gte) - if diffmedian_lte: - query = query.filter(DelayAlarms.diffmedian <= diffmedian_lte) - if medianrtt_gte: - query = query.filter(DelayAlarms.medianrtt >= medianrtt_gte) - if medianrtt_lte: - query = query.filter(DelayAlarms.medianrtt <= medianrtt_lte) - if nbprobes_gte: - query = query.filter(DelayAlarms.nbprobes >= nbprobes_gte) - if nbprobes_lte: - query = query.filter(DelayAlarms.nbprobes <= nbprobes_lte) - if link: - query = query.filter(DelayAlarms.link == link) - if link_contains: - query = query.filter(DelayAlarms.link.contains(link_contains)) - - total_count = query.count() - - # Apply ordering - if order_by and hasattr(DelayAlarms, order_by.replace('-', '')): - if order_by.startswith('-'): - query = query.order_by( - getattr(DelayAlarms, order_by[1:]).desc()) - else: - query = query.order_by(getattr(DelayAlarms, order_by)) - else: - query = query.order_by(DelayAlarms.timebin.desc()) - - # Apply pagination - offset = (page - 1) * page_size - results = query.offset(offset).limit(page_size).all() - - return results, total_count diff --git a/repositories/delay_repository.py b/repositories/delay_repository.py deleted file mode 100644 index 3136267..0000000 --- a/repositories/delay_repository.py +++ /dev/null @@ -1,44 +0,0 @@ -from datetime import datetime -from sqlalchemy.orm import Session -from models.delay import Delay -from typing import Optional, List, Tuple -from globals import page_size -from sqlalchemy.orm import joinedload - - -class DelayRepository: - def get_all( - self, - db: Session, - timebin_gte: Optional[datetime] = None, - timebin_lte: Optional[datetime] = None, - asn_ids: Optional[List[int]] = None, - magnitude: Optional[float] = None, - page: int = 1, - order_by: Optional[str] = None - ) -> Tuple[List[Delay], int]: - query = db.query(Delay).join(Delay.asn_relation) - - # Apply filters - if timebin_gte: - query = query.filter(Delay.timebin >= timebin_gte) - if timebin_lte: - query = query.filter(Delay.timebin <= timebin_lte) - if asn_ids: - query = query.filter(Delay.asn.in_(asn_ids)) - if magnitude is not None: - query = query.filter(Delay.magnitude == magnitude) - - total_count = query.count() - - # Apply ordering - if order_by and hasattr(Delay, order_by): - query = query.order_by(getattr(Delay, order_by)) - else: - query = query.order_by(Delay.timebin) - - # Apply pagination - offset = (page - 1) * page_size - results = query.offset(offset).limit(page_size).all() - - return results, total_count diff --git a/repositories/forwarding_repository.py b/repositories/forwarding_repository.py deleted file mode 100644 index 46fde4e..0000000 --- a/repositories/forwarding_repository.py +++ /dev/null @@ -1,43 +0,0 @@ -from datetime import datetime -from sqlalchemy.orm import Session -from models.forwarding import Forwarding -from typing import Optional, List, Tuple -from globals import page_size - - -class ForwardingRepository: - def get_all( - self, - db: Session, - timebin_gte: Optional[datetime] = None, - timebin_lte: Optional[datetime] = None, - asn_ids: Optional[List[int]] = None, - magnitude: Optional[float] = None, - page: int = 1, - order_by: Optional[str] = None - ) -> Tuple[List[Forwarding], int]: - query = db.query(Forwarding).join(Forwarding.asn_relation) - - # Apply filters - if timebin_gte: - query = query.filter(Forwarding.timebin >= timebin_gte) - if timebin_lte: - query = query.filter(Forwarding.timebin <= timebin_lte) - if asn_ids: - query = query.filter(Forwarding.asn.in_(asn_ids)) - if magnitude is not None: - query = query.filter(Forwarding.magnitude == magnitude) - - total_count = query.count() - - # Apply ordering - if order_by and hasattr(Forwarding, order_by): - query = query.order_by(getattr(Forwarding, order_by)) - else: - query = query.order_by(Forwarding.timebin) - - # Apply pagination - offset = (page - 1) * page_size - results = query.offset(offset).limit(page_size).all() - - return results, total_count diff --git a/services/link_service.py b/services/link_service.py deleted file mode 100644 index 1f7da3d..0000000 --- a/services/link_service.py +++ /dev/null @@ -1,129 +0,0 @@ -from sqlalchemy.orm import Session -from repositories.delay_repository import DelayRepository -from dtos.link_delay_dto import LinkDelayDTO -from repositories.forwarding_repository import ForwardingRepository -from dtos.link_forwarding_dto import LinkForwardingDTO -from typing import Optional, List, Tuple -from datetime import datetime -from repositories.delay_alarms_repository import DelayAlarmsRepository -from dtos.link_delay_alarms_dto import LinkDelayAlarmsDTO - - -class LinkService: - def __init__(self): - self.delay_repository = DelayRepository() - self.forwarding_repository = ForwardingRepository() - self.delay_alarms_repository = DelayAlarmsRepository() - - def get_link_delays( - self, - db: Session, - timebin_gte: Optional[datetime] = None, - timebin_lte: Optional[datetime] = None, - asn_ids: Optional[List[int]] = None, - magnitude: Optional[float] = None, - page: int = 1, - order_by: Optional[str] = None - ) -> Tuple[List[LinkDelayDTO], int]: - """ - Get link delay data with filtering. - """ - delays, total_count = self.delay_repository.get_all( - db, - timebin_gte=timebin_gte, - timebin_lte=timebin_lte, - asn_ids=asn_ids, - magnitude=magnitude, - page=page, - order_by=order_by - ) - - return [LinkDelayDTO( - timebin=delay.timebin, - asn=delay.asn, - magnitude=delay.magnitude, - asn_name=delay.asn_relation.name if delay.asn_relation else None - ) for delay in delays], total_count - - def get_link_forwardings( - self, - db: Session, - timebin_gte: Optional[datetime] = None, - timebin_lte: Optional[datetime] = None, - asn_ids: Optional[List[int]] = None, - magnitude: Optional[float] = None, - page: int = 1, - order_by: Optional[str] = None - ) -> Tuple[List[LinkForwardingDTO], int]: - """ - Get link forwarding data with filtering. - """ - forwardings, total_count = self.forwarding_repository.get_all( - db, - timebin_gte=timebin_gte, - timebin_lte=timebin_lte, - asn_ids=asn_ids, - magnitude=magnitude, - page=page, - order_by=order_by - ) - - return [LinkForwardingDTO( - timebin=forwarding.timebin, - asn=forwarding.asn, - magnitude=forwarding.magnitude, - asn_name=forwarding.asn_relation.name if forwarding.asn_relation else None - ) for forwarding in forwardings], total_count - - def get_link_delay_alarms( - self, - db: Session, - timebin_gte: Optional[datetime] = None, - timebin_lte: Optional[datetime] = None, - asn_ids: Optional[List[int]] = None, - deviation_gte: Optional[float] = None, - deviation_lte: Optional[float] = None, - diffmedian_gte: Optional[float] = None, - diffmedian_lte: Optional[float] = None, - medianrtt_gte: Optional[float] = None, - medianrtt_lte: Optional[float] = None, - nbprobes_gte: Optional[int] = None, - nbprobes_lte: Optional[int] = None, - link: Optional[str] = None, - link_contains: Optional[str] = None, - page: int = 1, - order_by: Optional[str] = None - ) -> Tuple[List[LinkDelayAlarmsDTO], int]: - """ - Get link delay alarms data with filtering. - """ - alarms, total_count = self.delay_alarms_repository.get_all( - db, - timebin_gte=timebin_gte, - timebin_lte=timebin_lte, - asn_ids=asn_ids, - deviation_gte=deviation_gte, - deviation_lte=deviation_lte, - diffmedian_gte=diffmedian_gte, - diffmedian_lte=diffmedian_lte, - medianrtt_gte=medianrtt_gte, - medianrtt_lte=medianrtt_lte, - nbprobes_gte=nbprobes_gte, - nbprobes_lte=nbprobes_lte, - link=link, - link_contains=link_contains, - page=page, - order_by=order_by - ) - - return [LinkDelayAlarmsDTO( - timebin=alarm.timebin, - asn=alarm.asn, - asn_name=alarm.asn_relation.name if alarm.asn_relation else None, - link=alarm.link, - medianrtt=alarm.medianrtt, - diffmedian=alarm.diffmedian, - deviation=alarm.deviation, - nbprobes=alarm.nbprobes, - msm_prb_ids=alarm.msm_prb_ids - ) for alarm in alarms], total_count From 6b17dfec1be415d9468136a92ff72897a81f3f7c Mon Sep 17 00:00:00 2001 From: ibraam Date: Mon, 14 Jul 2025 16:23:31 +0300 Subject: [PATCH 30/43] Added /tr-hegemony endpoint --- controllers/tr_hegemony_controller.py | 90 ++++++++++++++++++++++++++ dtos/tr_hegemony_dto.py | 33 ++++++++++ models/tr_hegemony.py | 15 ++++- repositories/tr_hegemony_repository.py | 79 ++++++++++++++++++++++ services/tr_hegemony_service.py | 51 +++++++++++++++ 5 files changed, 265 insertions(+), 3 deletions(-) create mode 100644 controllers/tr_hegemony_controller.py create mode 100644 dtos/tr_hegemony_dto.py create mode 100644 repositories/tr_hegemony_repository.py create mode 100644 services/tr_hegemony_service.py diff --git a/controllers/tr_hegemony_controller.py b/controllers/tr_hegemony_controller.py new file mode 100644 index 0000000..15be054 --- /dev/null +++ b/controllers/tr_hegemony_controller.py @@ -0,0 +1,90 @@ +from fastapi import APIRouter, Depends, Query, Request +from sqlalchemy.orm import Session +from services.tr_hegemony_service import TRHegemonyService +from dtos.generic_response_dto import GenericResponseDTO, build_url +from dtos.tr_hegemony_dto import TRHegemonyDTO +from config.database import get_db +from typing import Optional +from datetime import datetime +from globals import page_size +from utils import prepare_timebin_range + +router = APIRouter(prefix="/tr_hegemony", tags=["TR Hegemony"]) + + +class TRHegemonyController: + service = TRHegemonyService() + + @staticmethod + @router.get("/", response_model=GenericResponseDTO[TRHegemonyDTO]) + async def get_hegemony( + request: Request, + db: Session = Depends(get_db), + timebin: Optional[datetime] = Query( + None, description="Timestamp of reported value. The computation uses four weeks of data, hence 2022-03-28T00:00 means the values are based on data from 2022-02-28T00:00 to 2022-03-28T00:00."), + timebin__gte: Optional[datetime] = Query( + None, description="Timestamp of reported value. The computation uses four weeks of data, hence 2022-03-28T00:00 means the values are based on data from 2022-02-28T00:00 to 2022-03-28T00:00."), + timebin__lte: Optional[datetime] = Query( + None, description="Timestamp of reported value. The computation uses four weeks of data, hence 2022-03-28T00:00 means the values are based on data from 2022-02-28T00:00 to 2022-03-28T00:00."), + origin_name: Optional[str] = Query( + None, description="Origin name. It can be a single value or a list of values separated by the pipe character (i.e. | ). The meaning of values depends on the identifier type:
    • type=AS: ASN
    • type=IX: PeeringDB IX ID
    • type=MB: IXP member (format: ix_id;asn)
    • type=IP: Interface IP of an IXP member
    "), + dependency_name: Optional[str] = Query( + None, description="Dependency name. It can be a single value or a list of values separated by the pipe character (i.e. | ). The meaning of values depends on the identifier type:
    • type=AS: ASN
    • type=IX: PeeringDB IX ID
    • type=MB: IXP member (format: ix_id;asn)
    • type=IP: Interface IP of an IXP member
    "), + origin_type: Optional[str] = Query( + None, description="Type of the origin. Possible values are:
    • AS: Autonomous System
    • IX: IXP
    • MB: IXP member
    • IP: IXP member IP
    "), + dependency_type: Optional[str] = Query( + None, description="Type of the dependency. Possible values are:
    • AS: Autonomous System
    • IX: IXP
    • MB: IXP member
    • IP: IXP member IP
    "), + origin_af: Optional[int] = Query( + None, description="Address family (IP version) of the origin. Values are either 4 or 6."), + dependency_af: Optional[int] = Query( + None, description="Address family (IP version) of the dependency. Values are either 4 or 6."), + hege: Optional[float] = Query( + None, description="AS Hegemony is the estimated fraction of paths towards the origin. The values range between 0 and 1, low values represent a small number of path (low dependency) and values close to 1 represent strong dependencies."), + hege__gte: Optional[float] = Query( + None, description="AS Hegemony is the estimated fraction of paths towards the origin. The values range between 0 and 1, low values represent a small number of path (low dependency) and values close to 1 represent strong dependencies."), + hege__lte: Optional[float] = Query( + None, description="AS Hegemony is the estimated fraction of paths towards the origin. The values range between 0 and 1, low values represent a small number of path (low dependency) and values close to 1 represent strong dependencies."), + af: Optional[int] = Query( + None, description="Address family (IP version), values are either 4 or 6."), + page: Optional[int] = Query( + 1, ge=1, description="A page number within the paginated result set"), + ordering: Optional[str] = Query( + None, description="Which field to use when ordering the results.") + ) -> GenericResponseDTO[TRHegemonyDTO]: + """ + List AS and IXP dependencies for all ASes visible in monitored traceroute data. + + """ + timebin__gte, timebin__lte = prepare_timebin_range( + timebin, timebin__gte, timebin__lte, max_days=31) + + hegemony_data, total_count = TRHegemonyController.service.get_tr_hegemony( + db, + timebin=timebin, + timebin_gte=timebin__gte, + timebin_lte=timebin__lte, + origin_names=origin_name, + dependency_names=dependency_name, + origin_type=origin_type, + dependency_type=dependency_type, + origin_af=origin_af, + dependency_af=dependency_af, + hege=hege, + hege_gte=hege__gte, + hege_lte=hege__lte, + af=af, + page=page, + order_by=ordering + ) + + next_page = page + 1 if (page * page_size) < total_count else None + prev_page = page - 1 if page > 1 else None + + return GenericResponseDTO( + count=total_count, + next=build_url(request, next_page), + previous=build_url(request, prev_page), + results=hegemony_data + ) diff --git a/dtos/tr_hegemony_dto.py b/dtos/tr_hegemony_dto.py new file mode 100644 index 0000000..9c5c809 --- /dev/null +++ b/dtos/tr_hegemony_dto.py @@ -0,0 +1,33 @@ +from pydantic import BaseModel +from datetime import datetime + + +class TRHegemonyDTO(BaseModel): + timebin: datetime + origin_type: str + origin_name: str + origin_af: int + dependency_type: str + dependency_name: str + dependency_af: int + hege: float + af: int + nbsamples: int + + class Config: + from_attributes = True + + @staticmethod + def from_model(tr_hegemony): + return TRHegemonyDTO( + timebin=tr_hegemony.timebin, + origin_type=tr_hegemony.origin_relation.type, + origin_name=tr_hegemony.origin_relation.name, + origin_af=tr_hegemony.origin_relation.af, + dependency_type=tr_hegemony.dependency_relation.type, + dependency_name=tr_hegemony.dependency_relation.name, + dependency_af=tr_hegemony.dependency_relation.af, + hege=tr_hegemony.hege, + af=tr_hegemony.af, + nbsamples=tr_hegemony.nbsamples + ) \ No newline at end of file diff --git a/models/tr_hegemony.py b/models/tr_hegemony.py index 6be8e6c..85c6ec8 100644 --- a/models/tr_hegemony.py +++ b/models/tr_hegemony.py @@ -1,6 +1,7 @@ +from models.tr_hegemony_identifier import TRHegemonyIdentifier from sqlalchemy import Column, BigInteger, Integer, Float, ForeignKey, PrimaryKeyConstraint from sqlalchemy.dialects.postgresql import TIMESTAMP -from sqlalchemy.orm import relationship +from sqlalchemy.orm import relationship, foreign, remote from config.database import Base @@ -8,7 +9,7 @@ class TRHegemony(Base): __tablename__ = 'ihr_tr_hegemony' __table_args__ = ( - PrimaryKeyConstraint('id','timebin'), + PrimaryKeyConstraint('id', 'timebin'), ) __hypertable__ = { @@ -49,4 +50,12 @@ class TRHegemony(Base): nullable=False, doc='Dependent network, it can be any public ASN. Retrieve all dependencies of a network by setting only this parameter and a timebin.') - \ No newline at end of file + dependency_relation = relationship('TRHegemonyIdentifier', + primaryjoin=lambda: foreign(TRHegemony.dependency_id) == remote( + TRHegemonyIdentifier.id), + foreign_keys=[dependency_id]) + + origin_relation = relationship('TRHegemonyIdentifier', + primaryjoin=lambda: foreign(TRHegemony.origin_id) == remote( + TRHegemonyIdentifier.id), + foreign_keys=[origin_id]) diff --git a/repositories/tr_hegemony_repository.py b/repositories/tr_hegemony_repository.py new file mode 100644 index 0000000..a6f802b --- /dev/null +++ b/repositories/tr_hegemony_repository.py @@ -0,0 +1,79 @@ +from sqlalchemy.orm import Session, aliased +from sqlalchemy import and_, or_ +from models.tr_hegemony import TRHegemony +from datetime import datetime +from typing import List, Optional, Tuple +from globals import page_size + + +class TRHegemonyRepository: + def get_tr_hegemony( + self, + db: Session, + timebin: Optional[datetime] = None, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + origin_names: Optional[str] = None, + dependency_names: Optional[str] = None, + origin_type: Optional[str] = None, + dependency_type: Optional[str] = None, + origin_af: Optional[int] = None, + dependency_af: Optional[int] = None, + hege: Optional[float] = None, + hege_gte: Optional[float] = None, + hege_lte: Optional[float] = None, + af: Optional[int] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[TRHegemony], int]: + + Origin = aliased(TRHegemony.origin_relation.property.mapper.class_) + Dependency = aliased( + TRHegemony.dependency_relation.property.mapper.class_) + + query = db.query(TRHegemony)\ + .join(Origin, TRHegemony.origin_relation)\ + .join(Dependency, TRHegemony.dependency_relation) + + if timebin: + query = query.filter(TRHegemony.timebin == timebin) + if timebin_gte: + query = query.filter(TRHegemony.timebin >= timebin_gte) + if timebin_lte: + query = query.filter(TRHegemony.timebin <= timebin_lte) + + if origin_names: + names = origin_names.split('|') + query = query.filter(Origin.name.in_(names)) + if origin_type: + query = query.filter(Origin.type == origin_type) + if origin_af: + query = query.filter(Origin.af == origin_af) + + if dependency_names: + names = dependency_names.split('|') + query = query.filter(Dependency.name.in_(names)) + if dependency_type: + query = query.filter(Dependency.type == dependency_type) + if dependency_af: + query = query.filter(Dependency.af == dependency_af) + + if hege: + query = query.filter(TRHegemony.hege == hege) + if hege_gte: + query = query.filter(TRHegemony.hege >= hege_gte) + if hege_lte: + query = query.filter(TRHegemony.hege <= hege_lte) + + if af: + query = query.filter(TRHegemony.af == af) + + total_count = query.count() + + if order_by and hasattr(TRHegemony, order_by): + query = query.order_by(getattr(TRHegemony, order_by)) + + offset = (page - 1) * page_size + results = query.offset(offset).limit(page_size).all() + + return results, total_count diff --git a/services/tr_hegemony_service.py b/services/tr_hegemony_service.py new file mode 100644 index 0000000..a93bc5e --- /dev/null +++ b/services/tr_hegemony_service.py @@ -0,0 +1,51 @@ +from sqlalchemy.orm import Session +from repositories.tr_hegemony_repository import TRHegemonyRepository +from dtos.tr_hegemony_dto import TRHegemonyDTO +from typing import Optional, List, Tuple +from datetime import datetime + + +class TRHegemonyService: + def __init__(self): + self.tr_hegemony_repository = TRHegemonyRepository() + + def get_tr_hegemony( + self, + db: Session, + timebin: Optional[datetime] = None, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + origin_names: Optional[str] = None, + dependency_names: Optional[str] = None, + origin_type: Optional[str] = None, + dependency_type: Optional[str] = None, + origin_af: Optional[int] = None, + dependency_af: Optional[int] = None, + hege: Optional[float] = None, + hege_gte: Optional[float] = None, + hege_lte: Optional[float] = None, + af: Optional[int] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[TRHegemonyDTO], int]: + + hegemony_data, total_count = self.tr_hegemony_repository.get_tr_hegemony( + db, + timebin=timebin, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + origin_names=origin_names, + dependency_names=dependency_names, + origin_type=origin_type, + dependency_type=dependency_type, + origin_af=origin_af, + dependency_af=dependency_af, + hege=hege, + hege_gte=hege_gte, + hege_lte=hege_lte, + af=af, + page=page, + order_by=order_by + ) + + return [TRHegemonyDTO.from_model(hegemony) for hegemony in hegemony_data], total_count \ No newline at end of file From 3c567303454e6e255117d7bfa12c00b60bf452ac Mon Sep 17 00:00:00 2001 From: ibraam Date: Mon, 14 Jul 2025 21:51:37 +0300 Subject: [PATCH 31/43] Added /disco/events endpoint --- controllers/disco_controller.py | 99 +++++++++++++++++++++++++ dtos/disco_events_dto.py | 35 +++++++++ dtos/disco_probes_dto.py | 17 +++++ models/disco_events.py | 6 +- models/disco_probes.py | 15 ++-- repositories/disco_events_repository.py | 90 ++++++++++++++++++++++ services/disco_service.py | 61 +++++++++++++++ 7 files changed, 313 insertions(+), 10 deletions(-) create mode 100644 controllers/disco_controller.py create mode 100644 dtos/disco_events_dto.py create mode 100644 dtos/disco_probes_dto.py create mode 100644 repositories/disco_events_repository.py create mode 100644 services/disco_service.py diff --git a/controllers/disco_controller.py b/controllers/disco_controller.py new file mode 100644 index 0000000..82e5493 --- /dev/null +++ b/controllers/disco_controller.py @@ -0,0 +1,99 @@ +from fastapi import APIRouter, Depends, Query, Request +from sqlalchemy.orm import Session +from services.disco_service import DiscoService +from dtos.generic_response_dto import GenericResponseDTO, build_url +from dtos.disco_events_dto import DiscoEventsDTO +from config.database import get_db +from typing import Optional +from datetime import datetime +from globals import page_size + +router = APIRouter(prefix="/disco", tags=["Disco"]) + + +class DiscoController: + service = DiscoService() + + @staticmethod + @router.get("/events", response_model=GenericResponseDTO[DiscoEventsDTO]) + async def get_events( + request: Request, + db: Session = Depends(get_db), + streamname: Optional[str] = Query( + None, description="Name of the topological (ASN) or geographical area where the network disconnection happened."), + streamtype: Optional[str] = Query( + None, description="Granularity of the detected event. The possible values are asn, country, admin1, and admin2. Admin1 represents a wider area than admin2, the exact definition might change from one country to another. For example 'California, US' is an admin1 stream and 'San Francisco County, California, US' is an admin2 stream."), + starttime: Optional[datetime] = Query( + None, description="Estimated start time of the network disconnection."), + starttime__gte: Optional[datetime] = Query( + None, description="Estimated start time of the network disconnection."), + starttime__lte: Optional[datetime] = Query( + None, description="Estimated start time of the network disconnection."), + endtime: Optional[datetime] = Query( + None, description="Estimated end time of the network disconnection. Equal to starttime if the end of the event is unknown."), + endtime__gte: Optional[datetime] = Query( + None, description="Estimated end time of the network disconnection. Equal to starttime if the end of the event is unknown."), + endtime__lte: Optional[datetime] = Query( + None, description="Estimated end time of the network disconnection. Equal to starttime if the end of the event is unknown."), + avglevel: Optional[float] = Query( + None, description="Score representing the coordination of disconnected probes. Higher values stand for a large number of Atlas probes that disconnected in a very short time frame. Events with an avglevel lower than 10 are likely to be false positives detection."), + avglevel__gte: Optional[float] = Query( + None, description="Score representing the coordination of disconnected probes. Higher values stand for a large number of Atlas probes that disconnected in a very short time frame. Events with an avglevel lower than 10 are likely to be false positives detection."), + avglevel__lte: Optional[float] = Query( + None, description="Score representing the coordination of disconnected probes. Higher values stand for a large number of Atlas probes that disconnected in a very short time frame. Events with an avglevel lower than 10 are likely to be false positives detection."), + nbdiscoprobes: Optional[int] = Query( + None, description="NNumber of Atlas probes that disconnected around the reported start time."), + nbdiscoprobes__gte: Optional[int] = Query( + None, description="Number of Atlas probes that disconnected around the reported start time."), + nbdiscoprobes__lte: Optional[int] = Query( + None, description="Number of Atlas probes that disconnected around the reported start time."), + totalprobes: Optional[int] = Query( + None, description="Total number of Atlas probes active in the reported stream (ASN, Country, or geographical area)."), + totalprobes__gte: Optional[int] = Query( + None, description="Total number of Atlas probes active in the reported stream (ASN, Country, or geographical area)."), + totalprobes__lte: Optional[int] = Query( + None, description="Total number of Atlas probes active in the reported stream (ASN, Country, or geographical area)."), + ongoing: Optional[str] = Query( + None, description="Deprecated, this value is unused"), + page: Optional[int] = Query(1, ge=1, description="A page number within the paginated result set."), + ordering: Optional[str] = Query( + None, description="Which field to use when ordering the results") + ) -> GenericResponseDTO[DiscoEventsDTO]: + """ + List network disconnections detected with RIPE Atlas. + These events have different levels of granularity - it can be at a network level (AS), city, or country level. + """ + + events_data, total_count = DiscoController.service.get_disco_events( + db, + streamname=streamname, + streamtype=streamtype, + starttime=starttime, + starttime_gte=starttime__gte, + starttime_lte=starttime__lte, + endtime=endtime, + endtime_gte=endtime__gte, + endtime_lte=endtime__lte, + avglevel=avglevel, + avglevel_gte=avglevel__gte, + avglevel_lte=avglevel__lte, + nbdiscoprobes=nbdiscoprobes, + nbdiscoprobes_gte=nbdiscoprobes__gte, + nbdiscoprobes_lte=nbdiscoprobes__lte, + totalprobes=totalprobes, + totalprobes_gte=totalprobes__gte, + totalprobes_lte=totalprobes__lte, + ongoing=ongoing, + page=page, + order_by=ordering + ) + + next_page = page + 1 if (page * page_size) < total_count else None + prev_page = page - 1 if page > 1 else None + + return GenericResponseDTO( + count=total_count, + next=build_url(request, next_page), + previous=build_url(request, prev_page), + results=events_data + ) diff --git a/dtos/disco_events_dto.py b/dtos/disco_events_dto.py new file mode 100644 index 0000000..f6aeaad --- /dev/null +++ b/dtos/disco_events_dto.py @@ -0,0 +1,35 @@ +from dtos.disco_probes_dto import DiscoProbesDTO +from pydantic import BaseModel +from datetime import datetime +from typing import List, Optional + +class DiscoEventsDTO(BaseModel): + id: int + streamtype: str + streamname: str + starttime: datetime + endtime: datetime + avglevel: float + nbdiscoprobes: int + totalprobes: int + ongoing: bool + discoprobes: List[DiscoProbesDTO] + + class Config: + from_attributes = True + + @staticmethod + def from_model(disco_event): + return DiscoEventsDTO( + id=disco_event.id, + streamtype=disco_event.streamtype, + streamname=disco_event.streamname, + starttime=disco_event.starttime, + endtime=disco_event.endtime, + avglevel=disco_event.avglevel, + nbdiscoprobes=disco_event.nbdiscoprobes, + totalprobes=disco_event.totalprobes, + ongoing=disco_event.ongoing, + discoprobes=[DiscoProbesDTO.from_orm( + probe) for probe in disco_event.probes] + ) diff --git a/dtos/disco_probes_dto.py b/dtos/disco_probes_dto.py new file mode 100644 index 0000000..030b9ff --- /dev/null +++ b/dtos/disco_probes_dto.py @@ -0,0 +1,17 @@ +from pydantic import BaseModel +from datetime import datetime + + +class DiscoProbesDTO(BaseModel): + probe_id: int + starttime: datetime + endtime: datetime + level: float + ipv4: str + prefixv4: str + event: int + lat: float + lon: float + + class Config: + from_attributes = True diff --git a/models/disco_events.py b/models/disco_events.py index 656872b..8450c65 100644 --- a/models/disco_events.py +++ b/models/disco_events.py @@ -3,6 +3,7 @@ ) from config.database import Base from sqlalchemy.dialects.postgresql import TIMESTAMP +from sqlalchemy.orm import relationship class DiscoEvents(Base): @@ -16,7 +17,6 @@ class DiscoEvents(Base): 'columns': ['streamtype', 'streamname', 'starttime', 'endtime'] }] - id = Column(Integer, primary_key=True, autoincrement=True) mongoid = Column( String(24), @@ -79,4 +79,6 @@ class DiscoEvents(Base): doc='Deprecated, this value is unused' ) - + probes = relationship("DiscoProbes", back_populates="event_relation") + +from models.disco_probes import DiscoProbes diff --git a/models/disco_probes.py b/models/disco_probes.py index 59a4a1d..b781dbf 100644 --- a/models/disco_probes.py +++ b/models/disco_probes.py @@ -38,11 +38,10 @@ class DiscoProbes(Base): lon = Column(Float, default=0.0, nullable=False, doc='Longitude of the probe during the network detection as reported by RIPE Atlas.') - event_id = Column(Integer, - ForeignKey('ihr_disco_events.id', ondelete='CASCADE', - name='fk_disco_probes_event_id'), - nullable=False, - doc='ID of the network disconnection event where this probe is reported.') - - event = relationship('DiscoEvents', foreign_keys=[ - event_id], backref='discoprobes') + event = Column('event_id', Integer, + ForeignKey('ihr_disco_events.id', ondelete='CASCADE', + name='fk_disco_probes_event_id'), + nullable=False, + doc='ID of the network disconnection event where this probe is reported.') + + event_relation = relationship("DiscoEvents", back_populates="probes") diff --git a/repositories/disco_events_repository.py b/repositories/disco_events_repository.py new file mode 100644 index 0000000..d6e6c00 --- /dev/null +++ b/repositories/disco_events_repository.py @@ -0,0 +1,90 @@ +from sqlalchemy.orm import Session, joinedload +from sqlalchemy import and_ +from models.disco_events import DiscoEvents +from datetime import datetime +from typing import List, Optional, Tuple +from globals import page_size + + +class DiscoEventsRepository: + def get_disco_events( + self, + db: Session, + streamname: Optional[str] = None, + streamtype: Optional[str] = None, + starttime: Optional[datetime] = None, + starttime_gte: Optional[datetime] = None, + starttime_lte: Optional[datetime] = None, + endtime: Optional[datetime] = None, + endtime_gte: Optional[datetime] = None, + endtime_lte: Optional[datetime] = None, + avglevel: Optional[float] = None, + avglevel_gte: Optional[float] = None, + avglevel_lte: Optional[float] = None, + nbdiscoprobes: Optional[int] = None, + nbdiscoprobes_gte: Optional[int] = None, + nbdiscoprobes_lte: Optional[int] = None, + totalprobes: Optional[int] = None, + totalprobes_gte: Optional[int] = None, + totalprobes_lte: Optional[int] = None, + ongoing: Optional[str] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[DiscoEvents], int]: + + query = db.query(DiscoEvents) + + if streamname: + query = query.filter(DiscoEvents.streamname == streamname) + if streamtype: + query = query.filter(DiscoEvents.streamtype == streamtype) + + if starttime: + query = query.filter(DiscoEvents.starttime == starttime) + if starttime_gte: + query = query.filter(DiscoEvents.starttime >= starttime_gte) + if starttime_lte: + query = query.filter(DiscoEvents.starttime <= starttime_lte) + + if endtime: + query = query.filter(DiscoEvents.endtime == endtime) + if endtime_gte: + query = query.filter(DiscoEvents.endtime >= endtime_gte) + if endtime_lte: + query = query.filter(DiscoEvents.endtime <= endtime_lte) + + if avglevel: + query = query.filter(DiscoEvents.avglevel == avglevel) + if avglevel_gte: + query = query.filter(DiscoEvents.avglevel >= avglevel_gte) + if avglevel_lte: + query = query.filter(DiscoEvents.avglevel <= avglevel_lte) + + if nbdiscoprobes: + query = query.filter(DiscoEvents.nbdiscoprobes == nbdiscoprobes) + if nbdiscoprobes_gte: + query = query.filter( + DiscoEvents.nbdiscoprobes >= nbdiscoprobes_gte) + if nbdiscoprobes_lte: + query = query.filter( + DiscoEvents.nbdiscoprobes <= nbdiscoprobes_lte) + + if totalprobes: + query = query.filter(DiscoEvents.totalprobes == totalprobes) + if totalprobes_gte: + query = query.filter(DiscoEvents.totalprobes >= totalprobes_gte) + if totalprobes_lte: + query = query.filter(DiscoEvents.totalprobes <= totalprobes_lte) + + if ongoing: + query = query.filter(DiscoEvents.ongoing == ongoing) + + total_count = query.count() + + if order_by and hasattr(DiscoEvents, order_by): + query = query.order_by(getattr(DiscoEvents, order_by)) + + offset = (page - 1) * page_size + results = query.offset(offset).limit(page_size).all() + + return results, total_count diff --git a/services/disco_service.py b/services/disco_service.py new file mode 100644 index 0000000..8a965dc --- /dev/null +++ b/services/disco_service.py @@ -0,0 +1,61 @@ +from sqlalchemy.orm import Session +from repositories.disco_events_repository import DiscoEventsRepository +from dtos.disco_events_dto import DiscoEventsDTO +from typing import List, Optional, Tuple +from datetime import datetime + + +class DiscoService: + def __init__(self): + self.disco_events_repository = DiscoEventsRepository() + + def get_disco_events( + self, + db: Session, + streamname: Optional[str] = None, + streamtype: Optional[str] = None, + starttime: Optional[datetime] = None, + starttime_gte: Optional[datetime] = None, + starttime_lte: Optional[datetime] = None, + endtime: Optional[datetime] = None, + endtime_gte: Optional[datetime] = None, + endtime_lte: Optional[datetime] = None, + avglevel: Optional[float] = None, + avglevel_gte: Optional[float] = None, + avglevel_lte: Optional[float] = None, + nbdiscoprobes: Optional[int] = None, + nbdiscoprobes_gte: Optional[int] = None, + nbdiscoprobes_lte: Optional[int] = None, + totalprobes: Optional[int] = None, + totalprobes_gte: Optional[int] = None, + totalprobes_lte: Optional[int] = None, + ongoing: Optional[str] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[DiscoEventsDTO], int]: + + events_data, total_count = self.disco_events_repository.get_disco_events( + db, + streamname=streamname, + streamtype=streamtype, + starttime=starttime, + starttime_gte=starttime_gte, + starttime_lte=starttime_lte, + endtime=endtime, + endtime_gte=endtime_gte, + endtime_lte=endtime_lte, + avglevel=avglevel, + avglevel_gte=avglevel_gte, + avglevel_lte=avglevel_lte, + nbdiscoprobes=nbdiscoprobes, + nbdiscoprobes_gte=nbdiscoprobes_gte, + nbdiscoprobes_lte=nbdiscoprobes_lte, + totalprobes=totalprobes, + totalprobes_gte=totalprobes_gte, + totalprobes_lte=totalprobes_lte, + ongoing=ongoing, + page=page, + order_by=order_by + ) + + return [DiscoEventsDTO.from_model(event) for event in events_data], total_count From a62fda6dc5d991b9b9aa2aea2561b3a4cdd695e5 Mon Sep 17 00:00:00 2001 From: ibraam Date: Mon, 14 Jul 2025 23:27:31 +0300 Subject: [PATCH 32/43] Added /hegemony/alarms endpoints --- controllers/hegemony_cone_controller.py | 70 ----------- controllers/hegemony_controller.py | 137 +++++++++++++++++++++ dtos/hegemony_alarms_dto.py | 15 +++ models/hegemony_alarms.py | 16 ++- repositories/hegemony_alarms_repository.py | 51 ++++++++ services/hegemony_cone_service.py | 40 ------ services/hegemony_service.py | 82 ++++++++++++ 7 files changed, 297 insertions(+), 114 deletions(-) delete mode 100644 controllers/hegemony_cone_controller.py create mode 100644 controllers/hegemony_controller.py create mode 100644 dtos/hegemony_alarms_dto.py create mode 100644 repositories/hegemony_alarms_repository.py delete mode 100644 services/hegemony_cone_service.py create mode 100644 services/hegemony_service.py diff --git a/controllers/hegemony_cone_controller.py b/controllers/hegemony_cone_controller.py deleted file mode 100644 index 7a7b316..0000000 --- a/controllers/hegemony_cone_controller.py +++ /dev/null @@ -1,70 +0,0 @@ -from fastapi import APIRouter, Depends, Query, Request, Response, HTTPException -from datetime import datetime, timedelta -from sqlalchemy.orm import Session -from services.hegemony_cone_service import HegemonyConeService -from dtos.generic_response_dto import GenericResponseDTO, build_url -from dtos.hegemony_cone_dto import HegemonyConeDTO -from config.database import get_db -from typing import Optional, List -from globals import page_size -from utils import * - -router = APIRouter(prefix="/hegemony/cones", tags=["Hegemony Cones"]) - - -class HegemonyConeController: - service = HegemonyConeService() - - @staticmethod - @router.get("/", response_model=GenericResponseDTO[HegemonyConeDTO]) - async def get_hegemony_cones( - request: Request, - db: Session = Depends(get_db), - timebin: Optional[datetime] = Query( - None, description="Get results for exact timestamp"), - timebin__gte: Optional[datetime] = Query( - None, description="Get results after or equal to this timestamp"), - timebin__lte: Optional[datetime] = Query( - None, description="Get results before or equal to this timestamp"), - asn: Optional[str] = Query( - None, description="Autonomous System Number (ASN). Can be a single value or a list of comma separated values."), - af: Optional[int] = Query( - None, description="Address Family (IP version) either 4 or 6"), - page: Optional[int] = Query( - 1, ge=1, description="A page number within the paginated result set"), - ordering: Optional[str] = Query( - None, description="Which field to use when ordering the results") - ) -> GenericResponseDTO[HegemonyConeDTO]: - """ - The number of networks that depend on a given network. This is similar to CAIDA's customer cone size. -
      -
    • Required parameters: timebin or a range of timebins (using the two parameters timebin__lte and timebin__gte).
    • -
    • Limitations: At most 7 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
    • -
    - networks). - """ - timebin__gte, timebin__lte = validate_timebin_params(timebin, timebin__gte, timebin__lte) - - # Convert comma-separated ASNs to list - asn_list = [int(x.strip()) for x in asn.split(",")] if asn else None - - cones, total_count = HegemonyConeController.service.get_hegemony_cones( - db, - timebin_gte=timebin__gte, - timebin_lte=timebin__lte, - asn_ids=asn_list, - af=af, - page=page, - order_by=ordering - ) - - # Calculate pagination - next_page = page + 1 if (page * page_size) < total_count else None - prev_page = page - 1 if page > 1 else None - - return GenericResponseDTO( - count=total_count, - next=build_url(request, next_page), - previous=build_url(request, prev_page), - results=cones - ) diff --git a/controllers/hegemony_controller.py b/controllers/hegemony_controller.py new file mode 100644 index 0000000..8e95ee6 --- /dev/null +++ b/controllers/hegemony_controller.py @@ -0,0 +1,137 @@ +from fastapi import APIRouter, Depends, Query, Request, Response, HTTPException +from datetime import datetime, timedelta +from sqlalchemy.orm import Session +from services.hegemony_service import HegemonyService +from dtos.generic_response_dto import GenericResponseDTO, build_url +from dtos.hegemony_cone_dto import HegemonyConeDTO +from dtos.hegemony_alarms_dto import HegemonyAlarmsDTO +from config.database import get_db +from typing import Optional, List +from globals import page_size +from utils import * + +router = APIRouter(prefix="/hegemony", tags=["Hegemony"]) + + +class HegemonyController: + service = HegemonyService() + + @staticmethod + @router.get("/cones", response_model=GenericResponseDTO[HegemonyConeDTO]) + async def get_hegemony_cones( + request: Request, + db: Session = Depends(get_db), + timebin: Optional[datetime] = Query( + None, description="Get results for exact timestamp"), + timebin__gte: Optional[datetime] = Query( + None, description="Get results after or equal to this timestamp"), + timebin__lte: Optional[datetime] = Query( + None, description="Get results before or equal to this timestamp"), + asn: Optional[str] = Query( + None, description="Autonomous System Number (ASN). Can be a single value or a list of comma separated values."), + af: Optional[int] = Query( + None, description="Address Family (IP version) either 4 or 6"), + page: Optional[int] = Query( + 1, ge=1, description="A page number within the paginated result set"), + ordering: Optional[str] = Query( + None, description="Which field to use when ordering the results") + ) -> GenericResponseDTO[HegemonyConeDTO]: + """ + The number of networks that depend on a given network. This is similar to CAIDA's customer cone size. +
      +
    • Required parameters: timebin or a range of timebins (using the two parameters timebin__lte and timebin__gte).
    • +
    • Limitations: At most 7 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
    • +
    + networks). + """ + timebin__gte, timebin__lte = validate_timebin_params( + timebin, timebin__gte, timebin__lte) + + # Convert comma-separated ASNs to list + asn_list = [int(x.strip()) for x in asn.split(",")] if asn else None + + cones, total_count = HegemonyController.service.get_hegemony_cones( + db, + timebin_gte=timebin__gte, + timebin_lte=timebin__lte, + asn_ids=asn_list, + af=af, + page=page, + order_by=ordering + ) + + # Calculate pagination + next_page = page + 1 if (page * page_size) < total_count else None + prev_page = page - 1 if page > 1 else None + + return GenericResponseDTO( + count=total_count, + next=build_url(request, next_page), + previous=build_url(request, prev_page), + results=cones + ) + + @staticmethod + @router.get("/alarms", response_model=GenericResponseDTO[HegemonyAlarmsDTO]) + async def get_hegemony_alarms( + request: Request, + db: Session = Depends(get_db), + timebin: Optional[datetime] = Query( + None, description="Timestamp of reported alarm."), + timebin__gte: Optional[datetime] = Query( + None, description="Timestamp of reported alarm."), + timebin__lte: Optional[datetime] = Query( + None, description="Timestamp of reported alarm."), + asn: Optional[str] = Query( + None, description="ASN of the anomalous dependency (transit network). Can be a single value or a list of comma separated values."), + originasn: Optional[str] = Query( + None, description="ASN of the reported dependent network. Can be a single value or a list of comma separated values."), + af: Optional[int] = Query( + None, description="Address Family (IP version), values are either 4 or 6."), + deviation__gte: Optional[float] = Query( + None, description="Significance of the AS Hegemony change."), + deviation__lte: Optional[float] = Query( + None, description="Significance of the AS Hegemony change."), + page: Optional[int] = Query( + 1, ge=1, description="A page number within the paginated result set"), + ordering: Optional[str] = Query( + None, description="Which field to use when ordering the results") + ) -> GenericResponseDTO[HegemonyAlarmsDTO]: + """ + List significant AS dependency changes detected by IHR anomaly detector. +
      +
    • Required parameters: timebin or a range of timebins (using the two parameters timebin__lte and timebin__gte).
    • +
    • Limitations: At most 7 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
    • +
    + """ + timebin__gte, timebin__lte = validate_timebin_params( + timebin, timebin__gte, timebin__lte) + + # Convert comma-separated ASNs to lists + asn_list = [int(x.strip()) for x in asn.split(",")] if asn else None + originasn_list = [int(x.strip()) + for x in originasn.split(",")] if originasn else None + + alarms, total_count = HegemonyController.service.get_hegemony_alarms( + db, + timebin_gte=timebin__gte, + timebin_lte=timebin__lte, + asn_ids=asn_list, + originasn_ids=originasn_list, + af=af, + deviation_gte=deviation__gte, + deviation_lte=deviation__lte, + page=page, + order_by=ordering + ) + + # Calculate pagination + next_page = page + 1 if (page * page_size) < total_count else None + prev_page = page - 1 if page > 1 else None + + return GenericResponseDTO( + count=total_count, + next=build_url(request, next_page), + previous=build_url(request, prev_page), + results=alarms + ) diff --git a/dtos/hegemony_alarms_dto.py b/dtos/hegemony_alarms_dto.py new file mode 100644 index 0000000..c9469eb --- /dev/null +++ b/dtos/hegemony_alarms_dto.py @@ -0,0 +1,15 @@ +from pydantic import BaseModel +from datetime import datetime + + +class HegemonyAlarmsDTO(BaseModel): + timebin: datetime + originasn: int + asn: int + deviation: float + af: int + asn_name: str + originasn_name: str + + class Config: + from_attributes = True diff --git a/models/hegemony_alarms.py b/models/hegemony_alarms.py index e2261d5..5e69ef0 100644 --- a/models/hegemony_alarms.py +++ b/models/hegemony_alarms.py @@ -38,10 +38,18 @@ class HegemonyAlarms(Base): af = Column(Integer, nullable=False, doc='Address Family (IP version), values are either 4 or 6.') - asn_id = Column(BigInteger, - nullable=False, - doc='ASN of the anomalous dependency (transit network).') + asn = Column('asn_id', BigInteger, + nullable=False, + doc='ASN of the anomalous dependency (transit network).') - originasn_id = Column(BigInteger, + originasn= Column('originasn_id',BigInteger, nullable=False, doc='ASN of the reported dependent network.') + + asn_relation = relationship('ASN', + primaryjoin='HegemonyAlarms.asn == ASN.number', + foreign_keys=[asn]) + + originasn_relation = relationship('ASN', + primaryjoin='HegemonyAlarms.originasn == ASN.number', + foreign_keys=[originasn]) diff --git a/repositories/hegemony_alarms_repository.py b/repositories/hegemony_alarms_repository.py new file mode 100644 index 0000000..bc4c957 --- /dev/null +++ b/repositories/hegemony_alarms_repository.py @@ -0,0 +1,51 @@ +from datetime import datetime +from sqlalchemy.orm import Session +from models.hegemony_alarms import HegemonyAlarms +from typing import Optional, List, Tuple +from globals import page_size + + +class HegemonyAlarmsRepository: + def get_all( + self, + db: Session, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + asn_ids: Optional[List[int]] = None, + originasn_ids: Optional[List[int]] = None, + af: Optional[int] = None, + deviation_gte: Optional[float] = None, + deviation_lte: Optional[float] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[HegemonyAlarms], int]: + query = db.query(HegemonyAlarms) + + # Apply filters + if timebin_gte: + query = query.filter(HegemonyAlarms.timebin >= timebin_gte) + if timebin_lte: + query = query.filter(HegemonyAlarms.timebin <= timebin_lte) + if asn_ids: + query = query.filter(HegemonyAlarms.asn.in_(asn_ids)) + if originasn_ids: + query = query.filter( + HegemonyAlarms.originasn.in_(originasn_ids)) + if af is not None: + query = query.filter(HegemonyAlarms.af == af) + if deviation_gte: + query = query.filter(HegemonyAlarms.deviation >= deviation_gte) + if deviation_lte: + query = query.filter(HegemonyAlarms.deviation <= deviation_lte) + + total_count = query.count() + + # Apply ordering + if order_by and hasattr(HegemonyAlarms, order_by): + query = query.order_by(getattr(HegemonyAlarms, order_by)) + + # Apply pagination + offset = (page - 1) * page_size + results = query.offset(offset).limit(page_size).all() + + return results, total_count diff --git a/services/hegemony_cone_service.py b/services/hegemony_cone_service.py deleted file mode 100644 index 4ac278d..0000000 --- a/services/hegemony_cone_service.py +++ /dev/null @@ -1,40 +0,0 @@ -from sqlalchemy.orm import Session -from repositories.hegemony_cone_repository import HegemonyConeRepository -from dtos.hegemony_cone_dto import HegemonyConeDTO -from typing import Optional, List, Tuple -from datetime import datetime - - -class HegemonyConeService: - def __init__(self): - self.repository = HegemonyConeRepository() - - def get_hegemony_cones( - self, - db: Session, - timebin_gte: Optional[datetime] = None, - timebin_lte: Optional[datetime] = None, - asn_ids: Optional[List[int]] = None, - af: Optional[int] = None, - page: int = 1, - order_by: Optional[str] = None - ) -> Tuple[List[HegemonyConeDTO], int]: - """ - Get hegemony cone data with time-based filtering. - """ - cones, total_count = self.repository.get_all( - db, - timebin_gte=timebin_gte, - timebin_lte=timebin_lte, - asn_ids=asn_ids, - af=af, - page=page, - order_by=order_by - ) - - return [HegemonyConeDTO( - timebin=cone.timebin, - asn=cone.asn, - conesize=cone.conesize, - af=cone.af - ) for cone in cones], total_count diff --git a/services/hegemony_service.py b/services/hegemony_service.py new file mode 100644 index 0000000..1e3bf89 --- /dev/null +++ b/services/hegemony_service.py @@ -0,0 +1,82 @@ +from sqlalchemy.orm import Session +from repositories.hegemony_cone_repository import HegemonyConeRepository +from dtos.hegemony_cone_dto import HegemonyConeDTO +from repositories.hegemony_alarms_repository import HegemonyAlarmsRepository +from dtos.hegemony_alarms_dto import HegemonyAlarmsDTO +from typing import Optional, List, Tuple +from datetime import datetime + + +class HegemonyService: + def __init__(self): + self.hegemony_cone_repository = HegemonyConeRepository() + self.hegemony_alarms_repository = HegemonyAlarmsRepository() + + def get_hegemony_cones( + self, + db: Session, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + asn_ids: Optional[List[int]] = None, + af: Optional[int] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[HegemonyConeDTO], int]: + """ + Get hegemony cone data with time-based filtering. + """ + cones, total_count = self.hegemony_cone_repository.get_all( + db, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + asn_ids=asn_ids, + af=af, + page=page, + order_by=order_by + ) + + return [HegemonyConeDTO( + timebin=cone.timebin, + asn=cone.asn, + conesize=cone.conesize, + af=cone.af + ) for cone in cones], total_count + + def get_hegemony_alarms( + self, + db: Session, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + asn_ids: Optional[List[int]] = None, + originasn_ids: Optional[List[int]] = None, + af: Optional[int] = None, + deviation_gte: Optional[float] = None, + deviation_lte: Optional[float] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[HegemonyAlarmsDTO], int]: + """ + Get hegemony alarms data with filtering. + """ + alarms, total_count = self.hegemony_alarms_repository.get_all( + db, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + asn_ids=asn_ids, + originasn_ids=originasn_ids, + af=af, + deviation_gte=deviation_gte, + deviation_lte=deviation_lte, + page=page, + order_by=order_by + ) + + return [HegemonyAlarmsDTO( + timebin=alarm.timebin, + originasn=alarm.originasn, + asn=alarm.asn, + deviation=alarm.deviation, + af=alarm.af, + asn_name=alarm.asn_relation.name if alarm.asn_relation else None, + originasn_name=alarm.originasn_relation.name if alarm.originasn_relation else None + ) for alarm in alarms], total_count From b96500da1ec7c08fbdd885d464dc5f9b9af70414 Mon Sep 17 00:00:00 2001 From: ibraam Date: Tue, 15 Jul 2025 13:09:22 +0300 Subject: [PATCH 33/43] Added /hegemony/countries endpoint --- controllers/hegemony_controller.py | 84 ++++++++++++++++++++- dtos/hegemony_country_dto.py | 17 +++++ models/hegemony_country.py | 24 +++--- repositories/hegemony_country_repository.py | 59 +++++++++++++++ services/hegemony_service.py | 50 ++++++++++++ 5 files changed, 224 insertions(+), 10 deletions(-) create mode 100644 dtos/hegemony_country_dto.py create mode 100644 repositories/hegemony_country_repository.py diff --git a/controllers/hegemony_controller.py b/controllers/hegemony_controller.py index 8e95ee6..abdb908 100644 --- a/controllers/hegemony_controller.py +++ b/controllers/hegemony_controller.py @@ -1,4 +1,5 @@ -from fastapi import APIRouter, Depends, Query, Request, Response, HTTPException +from dtos.hegemony_country_dto import HegemonyCountryDTO +from fastapi import APIRouter, Depends, Query, Request, Response, HTTPException, status from datetime import datetime, timedelta from sqlalchemy.orm import Session from services.hegemony_service import HegemonyService @@ -135,3 +136,84 @@ async def get_hegemony_alarms( previous=build_url(request, prev_page), results=alarms ) + + @staticmethod + @router.get("/countries", response_model=GenericResponseDTO[HegemonyCountryDTO]) + async def get_hegemony_countries( + request: Request, + db: Session = Depends(get_db), + timebin: Optional[datetime] = Query( + None, description="Timestamp of reported value."), + timebin__gte: Optional[datetime] = Query( + None, description="Timestamp of reported value."), + timebin__lte: Optional[datetime] = Query( + None, description="Timestamp of reported value."), + asn: Optional[str] = Query( + None, description="Dependency. Network commonly seen in BGP paths towards monitored country. Can be a single value or a list of comma separated values."), + country: Optional[str] = Query( + None, description="Monitored country or region (e.g. EU and AP) as defined by its set of ASes registered in registeries delegated files. Can be a single value or a list of comma separated values. Retrieve all dependencies of a country by setting a single value and a timebin."), + af: Optional[int] = Query( + None, description="Address Family (IP version), values are either 4 or 6."), + weightscheme: Optional[str] = Query( + None, description="Scheme used to aggregate AS Hegemony scores. 'as' gives equal weight to each AS, 'eyeball' put emphasis on large eyeball networks."), + transitonly: Optional[bool] = Query( + None, description="True means that the last AS (origin AS) in BGP paths is ignored, thus focusing only on transit ASes."), + hege: Optional[float] = Query( + None, description="AS Hegemony is the estimated fraction of paths towards the monitored country. The values range between 0 and 1, low values represent a small number of path (low dependency) and values close to 1 represent strong dependencies."), + hege__gte: Optional[float] = Query( + None, description="AS Hegemony is the estimated fraction of paths towards the monitored country. The values range between 0 and 1, low values represent a small number of path (low dependency) and values close to 1 represent strong dependencies."), + hege__lte: Optional[float] = Query( + None, description="AS Hegemony is the estimated fraction of paths towards the monitored country. The values range between 0 and 1, low values represent a small number of path (low dependency) and values close to 1 represent strong dependencies."), + page: Optional[int] = Query( + 1, ge=1, description="A page number within the paginated result set"), + ordering: Optional[str] = Query( + None, description="Which field to use when ordering the results") + ) -> GenericResponseDTO[HegemonyCountryDTO]: + """ + List AS dependencies of countries. A country infrastructure is defined by its ASes registed in RIRs delegated files. Emphasis can be put on eyeball users with the eyeball weighting scheme (i.e. weightscheme='eyeball'). +
      +
    • Required parameters: timebin or a range of timebins (using the two parameters timebin__lte and timebin__gte).
    • +
    • Limitations: At most 31 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
    • +
    + """ + timebin__gte, timebin__lte = validate_timebin_params( + timebin, timebin__gte, timebin__lte, max_days=31) + + # Ensure either `asn` or `country` is provided + if not asn and not country: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Required parameter missing. Please provide one of the following parameters: ['country', 'asn']" + ) + + # Convert comma-separated values to lists + asn_list = [int(x.strip()) for x in asn.split(",")] if asn else None + country_list = [x.strip() + for x in country.split(",")] if country else None + + countries, total_count = HegemonyController.service.get_hegemony_countries( + db, + timebin_gte=timebin__gte, + timebin_lte=timebin__lte, + asn_ids=asn_list, + countries=country_list, + af=af, + weightscheme=weightscheme, + transitonly=transitonly, + hege=hege, + hege_gte=hege__gte, + hege_lte=hege__lte, + page=page, + order_by=ordering + ) + + # Calculate pagination + next_page = page + 1 if (page * page_size) < total_count else None + prev_page = page - 1 if page > 1 else None + + return GenericResponseDTO( + count=total_count, + next=build_url(request, next_page), + previous=build_url(request, prev_page), + results=countries + ) diff --git a/dtos/hegemony_country_dto.py b/dtos/hegemony_country_dto.py new file mode 100644 index 0000000..43d6717 --- /dev/null +++ b/dtos/hegemony_country_dto.py @@ -0,0 +1,17 @@ +from pydantic import BaseModel +from datetime import datetime + + +class HegemonyCountryDTO(BaseModel): + timebin: datetime + country: str + asn: int + hege: float + af: int + asn_name: str + weight: float + weightscheme: str + transitonly: bool + + class Config: + from_attributes = True diff --git a/models/hegemony_country.py b/models/hegemony_country.py index 38c5c96..543a08d 100644 --- a/models/hegemony_country.py +++ b/models/hegemony_country.py @@ -1,4 +1,4 @@ -from sqlalchemy import Column, Integer, Float, String, Boolean, ForeignKey, BigInteger,PrimaryKeyConstraint +from sqlalchemy import Column, Integer, Float, String, Boolean, ForeignKey, BigInteger, PrimaryKeyConstraint from sqlalchemy.dialects.postgresql import TIMESTAMP from sqlalchemy.orm import relationship from config.database import Base @@ -8,7 +8,7 @@ class HegemonyCountry(Base): __tablename__ = 'ihr_hegemony_country' __table_args__ = ( - PrimaryKeyConstraint('id','timebin'), + PrimaryKeyConstraint('id', 'timebin'), ) __hypertable__ = { @@ -47,12 +47,18 @@ class HegemonyCountry(Base): transitonly = Column(Boolean, default=False, nullable=False, doc='If True, then origin ASNs of BGP path are ignored (focus only on transit networks).') - asn_id = Column(BigInteger, - nullable=False, - doc='Dependency. Network commonly seen in BGP paths towards monitored country.') + asn = Column('asn_id', BigInteger, + nullable=False, + doc='Dependency. Network commonly seen in BGP paths towards monitored country.') - country_id = Column(String(4), - nullable=False, - doc='Monitored country. Retrieve all dependencies of a country by setting only this parameter and a timebin.') + country = Column('country_id', String(4), + nullable=False, + doc='Monitored country. Retrieve all dependencies of a country by setting only this parameter and a timebin.') - \ No newline at end of file + asn_relation = relationship('ASN', + primaryjoin='HegemonyCountry.asn == ASN.number', + foreign_keys=[asn]) + + country_relation = relationship('Country', + primaryjoin='HegemonyCountry.country == Country.code', + foreign_keys=[country]) diff --git a/repositories/hegemony_country_repository.py b/repositories/hegemony_country_repository.py new file mode 100644 index 0000000..075a664 --- /dev/null +++ b/repositories/hegemony_country_repository.py @@ -0,0 +1,59 @@ +from datetime import datetime +from sqlalchemy.orm import Session +from models.hegemony_country import HegemonyCountry +from typing import Optional, List, Tuple +from globals import page_size + + +class HegemonyCountryRepository: + def get_all( + self, + db: Session, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + asn_ids: Optional[List[int]] = None, + countries: Optional[List[str]] = None, + af: Optional[int] = None, + weightscheme: Optional[str] = None, + transitonly: Optional[bool] = None, + hege: Optional[float] = None, + hege_gte: Optional[float] = None, + hege_lte: Optional[float] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[HegemonyCountry], int]: + query = db.query(HegemonyCountry) + + # Apply filters + if timebin_gte: + query = query.filter(HegemonyCountry.timebin >= timebin_gte) + if timebin_lte: + query = query.filter(HegemonyCountry.timebin <= timebin_lte) + if asn_ids: + query = query.filter(HegemonyCountry.asn.in_(asn_ids)) + if countries: + query = query.filter(HegemonyCountry.country.in_(countries)) + if af is not None: + query = query.filter(HegemonyCountry.af == af) + if weightscheme is not None: + query = query.filter(HegemonyCountry.weightscheme == weightscheme) + if transitonly is not None: + query = query.filter(HegemonyCountry.transitonly == transitonly) + if hege is not None: + query = query.filter(HegemonyCountry.hege == hege) + if hege_gte is not None: + query = query.filter(HegemonyCountry.hege >= hege_gte) + if hege_lte is not None: + query = query.filter(HegemonyCountry.hege <= hege_lte) + + total_count = query.count() + + # Apply ordering + if order_by and hasattr(HegemonyCountry, order_by): + query = query.order_by(getattr(HegemonyCountry, order_by)) + + # Apply pagination + offset = (page - 1) * page_size + results = query.offset(offset).limit(page_size).all() + + return results, total_count diff --git a/services/hegemony_service.py b/services/hegemony_service.py index 1e3bf89..aad008a 100644 --- a/services/hegemony_service.py +++ b/services/hegemony_service.py @@ -3,6 +3,8 @@ from dtos.hegemony_cone_dto import HegemonyConeDTO from repositories.hegemony_alarms_repository import HegemonyAlarmsRepository from dtos.hegemony_alarms_dto import HegemonyAlarmsDTO +from repositories.hegemony_country_repository import HegemonyCountryRepository +from dtos.hegemony_country_dto import HegemonyCountryDTO from typing import Optional, List, Tuple from datetime import datetime @@ -11,6 +13,7 @@ class HegemonyService: def __init__(self): self.hegemony_cone_repository = HegemonyConeRepository() self.hegemony_alarms_repository = HegemonyAlarmsRepository() + self.hegemony_country_repository = HegemonyCountryRepository() def get_hegemony_cones( self, @@ -80,3 +83,50 @@ def get_hegemony_alarms( asn_name=alarm.asn_relation.name if alarm.asn_relation else None, originasn_name=alarm.originasn_relation.name if alarm.originasn_relation else None ) for alarm in alarms], total_count + + def get_hegemony_countries( + self, + db: Session, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + asn_ids: Optional[List[int]] = None, + countries: Optional[List[str]] = None, + af: Optional[int] = None, + weightscheme: Optional[str] = None, + transitonly: Optional[bool] = None, + hege: Optional[float] = None, + hege_gte: Optional[float] = None, + hege_lte: Optional[float] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[HegemonyCountryDTO], int]: + """ + Get hegemony country data with filtering. + """ + countries_data, total_count = self.hegemony_country_repository.get_all( + db, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + asn_ids=asn_ids, + countries=countries, + af=af, + weightscheme=weightscheme, + transitonly=transitonly, + hege=hege, + hege_gte=hege_gte, + hege_lte=hege_lte, + page=page, + order_by=order_by + ) + + return [HegemonyCountryDTO( + timebin=country.timebin, + country=country.country, + asn=country.asn, + hege=country.hege, + af=country.af, + asn_name=country.asn_relation.name if country.asn_relation else None, + weight=country.weight, + weightscheme=country.weightscheme, + transitonly=country.transitonly + ) for country in countries_data], total_count From 8905a2dea288c6952ec7487b2a003fba17d67fe9 Mon Sep 17 00:00:00 2001 From: ibraam Date: Tue, 15 Jul 2025 14:01:29 +0300 Subject: [PATCH 34/43] Added /hegemony endpoint --- controllers/hegemony_controller.py | 76 +++++++++++++++++++++++++++++ dtos/hegemony_dto.py | 15 ++++++ models/hegemony.py | 19 +++++--- repositories/hegemony_repository.py | 53 ++++++++++++++++++++ services/hegemony_service.py | 44 +++++++++++++++++ 5 files changed, 200 insertions(+), 7 deletions(-) create mode 100644 dtos/hegemony_dto.py create mode 100644 repositories/hegemony_repository.py diff --git a/controllers/hegemony_controller.py b/controllers/hegemony_controller.py index abdb908..8364428 100644 --- a/controllers/hegemony_controller.py +++ b/controllers/hegemony_controller.py @@ -1,4 +1,5 @@ from dtos.hegemony_country_dto import HegemonyCountryDTO +from dtos.hegemony_dto import HegemonyDTO from fastapi import APIRouter, Depends, Query, Request, Response, HTTPException, status from datetime import datetime, timedelta from sqlalchemy.orm import Session @@ -17,6 +18,81 @@ class HegemonyController: service = HegemonyService() + @staticmethod + @router.get("/", response_model=GenericResponseDTO[HegemonyDTO]) + async def get_hegemony( + request: Request, + db: Session = Depends(get_db), + timebin: Optional[datetime] = Query( + None, description="Timestamp of reported value."), + timebin__gte: Optional[datetime] = Query( + None, description="Timestamp of reported value."), + timebin__lte: Optional[datetime] = Query( + None, description="Timestamp of reported value."), + asn: Optional[str] = Query( + None, description="Dependency. Transit network commonly seen in BGP paths towards originasn. Can be a single value or a list of comma separated values."), + originasn: Optional[str] = Query( + None, description="Dependent network, it can be any public ASN. Can be a single value or a list of comma separated values. Retrieve all dependencies of a network by setting a single value and a timebin."), + af: Optional[int] = Query( + None, description="Address Family (IP version), values are either 4 or 6."), + hege: Optional[float] = Query( + None, description="AS Hegemony is the estimated fraction of paths towards the originasn. The values range between 0 and 1, low values represent a small number of path (low dependency) and values close to 1 represent strong dependencies."), + hege__gte: Optional[float] = Query( + None, description="AS Hegemony is the estimated fraction of paths towards the originasn. The values range between 0 and 1, low values represent a small number of path (low dependency) and values close to 1 represent strong dependencies."), + hege__lte: Optional[float] = Query( + None, description="AS Hegemony is the estimated fraction of paths towards the originasn. The values range between 0 and 1, low values represent a small number of path (low dependency) and values close to 1 represent strong dependencies."), + page: Optional[int] = Query( + 1, ge=1, description="A page number within the paginated result set"), + ordering: Optional[str] = Query( + None, description="Which field to use when ordering the results") + ) -> GenericResponseDTO[HegemonyDTO]: + """ + List AS dependencies for all ASes visible in monitored BGP data. This endpoint also provides the AS dependency to the entire IP space (a.k.a. global graph) which is available by setting the originasn parameter to 0. +
      +
    • Required parameters: timebin or a range of timebins (using the two parameters timebin__lte and timebin__gte).
    • +
    • Limitations: At most 7 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
    • +
    + """ + timebin__gte, timebin__lte = validate_timebin_params( + timebin, timebin__gte, timebin__lte) + + # Convert comma-separated ASNs to lists + asn_list = [int(x.strip()) for x in asn.split(",")] if asn else None + originasn_list = [int(x.strip()) + for x in originasn.split(",")] if originasn else None + + # Ensure either asn or originasn is provided + if not asn and not originasn: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Required parameter missing. Please provide one of the following parameters: ['originasn', 'asn']" + ) + + hegemony_data, total_count = HegemonyController.service.get_hegemony( + db, + timebin_gte=timebin__gte, + timebin_lte=timebin__lte, + asn_ids=asn_list, + originasn_ids=originasn_list, + af=af, + hege=hege, + hege_gte=hege__gte, + hege_lte=hege__lte, + page=page, + order_by=ordering + ) + + # Calculate pagination + next_page = page + 1 if (page * page_size) < total_count else None + prev_page = page - 1 if page > 1 else None + + return GenericResponseDTO( + count=total_count, + next=build_url(request, next_page), + previous=build_url(request, prev_page), + results=hegemony_data + ) + @staticmethod @router.get("/cones", response_model=GenericResponseDTO[HegemonyConeDTO]) async def get_hegemony_cones( diff --git a/dtos/hegemony_dto.py b/dtos/hegemony_dto.py new file mode 100644 index 0000000..efb4d51 --- /dev/null +++ b/dtos/hegemony_dto.py @@ -0,0 +1,15 @@ +from pydantic import BaseModel +from datetime import datetime + + +class HegemonyDTO(BaseModel): + timebin: datetime + originasn: int + asn: int + hege: float + af: int + asn_name: str + originasn_name: str + + class Config: + from_attributes = True diff --git a/models/hegemony.py b/models/hegemony.py index db3f22b..07ce68d 100644 --- a/models/hegemony.py +++ b/models/hegemony.py @@ -1,4 +1,4 @@ -from sqlalchemy import Column, BigInteger, Float, Integer, ForeignKey, PrimaryKeyConstraint +from sqlalchemy import Column, BigInteger, Float, Integer, PrimaryKeyConstraint from sqlalchemy.dialects.postgresql import TIMESTAMP from sqlalchemy.orm import relationship from config.database import Base @@ -47,13 +47,18 @@ class Hegemony(Base): af = Column(Integer, default=0, nullable=False, doc='Address Family (IP version), values are either 4 or 6.') - asn_id = Column(BigInteger, - nullable=False, - doc='Dependency. Transit network commonly seen in BGP paths towards originasn.') + asn = Column('asn_id', BigInteger, + nullable=False, + doc='Dependency. Transit network commonly seen in BGP paths towards originasn.') - originasn_id = Column(BigInteger, + originasn = Column('originasn_id',BigInteger, nullable=False, doc='Dependent network, it can be any public ASN. Retrieve all dependencies of a network by setting only this parameter and a timebin.') - - + asn_relation = relationship('ASN', + primaryjoin='Hegemony.asn == ASN.number', + foreign_keys=[asn]) + + originasn_relation = relationship('ASN', + primaryjoin='Hegemony.originasn == ASN.number', + foreign_keys=[originasn]) diff --git a/repositories/hegemony_repository.py b/repositories/hegemony_repository.py new file mode 100644 index 0000000..4014eb5 --- /dev/null +++ b/repositories/hegemony_repository.py @@ -0,0 +1,53 @@ +from datetime import datetime +from sqlalchemy.orm import Session +from models.hegemony import Hegemony +from typing import Optional, List, Tuple +from globals import page_size + + +class HegemonyRepository: + def get_all( + self, + db: Session, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + asn_ids: Optional[List[int]] = None, + originasn_ids: Optional[List[int]] = None, + af: Optional[int] = None, + hege: Optional[float] = None, + hege_gte: Optional[float] = None, + hege_lte: Optional[float] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[Hegemony], int]: + query = db.query(Hegemony) + + # Apply filters + if timebin_gte: + query = query.filter(Hegemony.timebin >= timebin_gte) + if timebin_lte: + query = query.filter(Hegemony.timebin <= timebin_lte) + if asn_ids: + query = query.filter(Hegemony.asn.in_(asn_ids)) + if originasn_ids: + query = query.filter(Hegemony.originasn.in_(originasn_ids)) + if af is not None: + query = query.filter(Hegemony.af == af) + if hege is not None: + query = query.filter(Hegemony.hege == hege) + if hege_gte: + query = query.filter(Hegemony.hege >= hege_gte) + if hege_lte: + query = query.filter(Hegemony.hege <= hege_lte) + + total_count = query.count() + + # Apply ordering + if order_by and hasattr(Hegemony, order_by): + query = query.order_by(getattr(Hegemony, order_by)) + + # Apply pagination + offset = (page - 1) * page_size + results = query.offset(offset).limit(page_size).all() + + return results, total_count diff --git a/services/hegemony_service.py b/services/hegemony_service.py index aad008a..aa4e8ee 100644 --- a/services/hegemony_service.py +++ b/services/hegemony_service.py @@ -5,6 +5,8 @@ from dtos.hegemony_alarms_dto import HegemonyAlarmsDTO from repositories.hegemony_country_repository import HegemonyCountryRepository from dtos.hegemony_country_dto import HegemonyCountryDTO +from repositories.hegemony_repository import HegemonyRepository +from dtos.hegemony_dto import HegemonyDTO from typing import Optional, List, Tuple from datetime import datetime @@ -14,6 +16,7 @@ def __init__(self): self.hegemony_cone_repository = HegemonyConeRepository() self.hegemony_alarms_repository = HegemonyAlarmsRepository() self.hegemony_country_repository = HegemonyCountryRepository() + self.hegemony_repository = HegemonyRepository() def get_hegemony_cones( self, @@ -130,3 +133,44 @@ def get_hegemony_countries( weightscheme=country.weightscheme, transitonly=country.transitonly ) for country in countries_data], total_count + + def get_hegemony( + self, + db: Session, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + asn_ids: Optional[List[int]] = None, + originasn_ids: Optional[List[int]] = None, + af: Optional[int] = None, + hege: Optional[float] = None, + hege_gte: Optional[float] = None, + hege_lte: Optional[float] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[HegemonyDTO], int]: + """ + Get hegemony data with filtering. + """ + hegemony_data, total_count = self.hegemony_repository.get_all( + db, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + asn_ids=asn_ids, + originasn_ids=originasn_ids, + af=af, + hege=hege, + hege_gte=hege_gte, + hege_lte=hege_lte, + page=page, + order_by=order_by + ) + + return [HegemonyDTO( + timebin=hegemony.timebin, + originasn=hegemony.originasn, + asn=hegemony.asn, + hege=hegemony.hege, + af=hegemony.af, + asn_name=hegemony.asn_relation.name if hegemony.asn_relation else None, + originasn_name=hegemony.originasn_relation.name if hegemony.originasn_relation else None + ) for hegemony in hegemony_data], total_count From 52337436487ce43abe8a3394a94f8a59309518ac Mon Sep 17 00:00:00 2001 From: ibraam Date: Tue, 15 Jul 2025 16:01:20 +0300 Subject: [PATCH 35/43] Added /hegemony/prefixes endpoint --- controllers/hegemony_controller.py | 102 +++++++++++++++++++++ dtos/hegemony_prefix_dto.py | 24 +++++ models/hegemony_prefix.py | 37 +++++--- repositories/hegemony_prefix_repository.py | 79 ++++++++++++++++ services/hegemony_service.py | 67 ++++++++++++++ 5 files changed, 297 insertions(+), 12 deletions(-) create mode 100644 dtos/hegemony_prefix_dto.py create mode 100644 repositories/hegemony_prefix_repository.py diff --git a/controllers/hegemony_controller.py b/controllers/hegemony_controller.py index 8364428..67e2963 100644 --- a/controllers/hegemony_controller.py +++ b/controllers/hegemony_controller.py @@ -1,5 +1,6 @@ from dtos.hegemony_country_dto import HegemonyCountryDTO from dtos.hegemony_dto import HegemonyDTO +from dtos.hegemony_prefix_dto import HegemonyPrefixDTO from fastapi import APIRouter, Depends, Query, Request, Response, HTTPException, status from datetime import datetime, timedelta from sqlalchemy.orm import Session @@ -293,3 +294,104 @@ async def get_hegemony_countries( previous=build_url(request, prev_page), results=countries ) + + @staticmethod + @router.get("/prefixes", response_model=GenericResponseDTO[HegemonyPrefixDTO]) + async def get_hegemony_prefixes( + request: Request, + db: Session = Depends(get_db), + timebin: Optional[datetime] = Query( + None, description="Timestamp of reported value."), + timebin__gte: Optional[datetime] = Query( + None, description="Timestamp of reported value."), + timebin__lte: Optional[datetime] = Query( + None, description="Timestamp of reported value."), + prefix: Optional[str] = Query( + None, description="Monitored prefix, it can be any globally reachable prefix. Can be a single value or a list of comma separated values."), + asn: Optional[str] = Query( + None, description="Dependency. Network commonly seen in BGP paths towards monitored prefix. Can be a single value or a list of comma separated values."), + originasn: Optional[str] = Query( + None, description="Origin network, it can be any public ASN. Can be a single value or a list of comma separated values."), + country: Optional[str] = Query( + None, description="Country code for prefixes as reported by Maxmind's Geolite2 geolocation database. Can be a single value or a list of comma separated values. Retrieve all dependencies of a country by setting a single value and a timebin."), + rpki_status: Optional[str] = Query( + None, description="Route origin validation state for the monitored prefix and origin AS using RPKI."), + irr_status: Optional[str] = Query( + None, description="Route origin validation state for the monitored prefix and origin AS using IRR."), + delegated_prefix_status: Optional[str] = Query( + None, description="Status of the monitored prefix in the RIR's delegated stats. Status other than 'assigned' are usually considered as bogons."), + delegated_asn_status: Optional[str] = Query( + None, description="Status of the origin ASN in the RIR's delegated stats. Status other than 'assigned' are usually considered as bogons."), + af: Optional[int] = Query( + None, description="Address Family (IP version), values are either 4 or 6."), + hege: Optional[float] = Query( + None, description="AS Hegemony is the estimated fraction of paths towards the monitored prefix. The values range between 0 and 1, low values represent a small number of path (low dependency) and values close to 1 represent strong dependencies."), + hege__gte: Optional[float] = Query( + None, description="AS Hegemony is the estimated fraction of paths towards the monitored prefix. The values range between 0 and 1, low values represent a small number of path (low dependency) and values close to 1 represent strong dependencies."), + hege__lte: Optional[float] = Query( + None, description="AS Hegemony is the estimated fraction of paths towards the monitored prefix. The values range between 0 and 1, low values represent a small number of path (low dependency) and values close to 1 represent strong dependencies."), + origin_only: Optional[bool] = Query( + None, description="Filter out dependency results and provide only prefix/origin ASN results"), + page: Optional[int] = Query( + 1, ge=1, description="A page number within the paginated result set"), + ordering: Optional[str] = Query( + None, description="Which field to use when ordering the results") + ) -> GenericResponseDTO[HegemonyPrefixDTO]: + """ + List AS dependencies of prefixes. +
      +
    • Required parameters: timebin or a range of timebins (using the two parameters timebin__lte and timebin__gte). And one of the following: prefix, originasn, country, rpki_status, irr_status, delegated_prefix_status, delegated_asn_status.
    • +
    • Limitations: At most 3 days of data can be fetched per request. For bulk downloads see: https://ihr-archive.iijlab.net/.
    • +
    + """ + # Ensure at least one filter is provided + if not any([prefix, originasn, country, rpki_status, irr_status, + delegated_prefix_status, delegated_asn_status]): + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Required parameter missing. Please provide one of the following parameter: ['prefix', 'originasn', 'country', 'rpki_status', 'irr_status', 'delegated_prefix_status', 'delegated_asn_status']" + ) + + timebin__gte, timebin__lte = validate_timebin_params( + timebin, timebin__gte, timebin__lte, max_days=3) + + # Convert comma-separated values to lists + prefix_list = [x.strip() + for x in prefix.split(",")] if prefix else None + asn_list = [int(x.strip()) for x in asn.split(",")] if asn else None + originasn_list = [int(x.strip()) + for x in originasn.split(",")] if originasn else None + country_list = [x.strip() + for x in country.split(",")] if country else None + + prefixes, total_count = HegemonyController.service.get_hegemony_prefixes( + db, + timebin_gte=timebin__gte, + timebin_lte=timebin__lte, + prefixes=prefix_list, + asn_ids=asn_list, + originasn_ids=originasn_list, + countries=country_list, + rpki_status=rpki_status, + irr_status=irr_status, + delegated_prefix_status=delegated_prefix_status, + delegated_asn_status=delegated_asn_status, + af=af, + hege=hege, + hege_gte=hege__gte, + hege_lte=hege__lte, + origin_only=origin_only, + page=page, + order_by=ordering + ) + + # Calculate pagination + next_page = page + 1 if (page * page_size) < total_count else None + prev_page = page - 1 if page > 1 else None + + return GenericResponseDTO( + count=total_count, + next=build_url(request, next_page), + previous=build_url(request, prev_page), + results=prefixes + ) diff --git a/dtos/hegemony_prefix_dto.py b/dtos/hegemony_prefix_dto.py new file mode 100644 index 0000000..0264ce7 --- /dev/null +++ b/dtos/hegemony_prefix_dto.py @@ -0,0 +1,24 @@ +from pydantic import BaseModel +from datetime import datetime + + +class HegemonyPrefixDTO(BaseModel): + timebin: datetime + prefix: str + originasn: int + country: str + asn: int + hege: float + af: int + visibility: float + rpki_status: str + irr_status: str + delegated_prefix_status: str + delegated_asn_status: str + descr: str + moas: bool + originasn_name: str + asn_name: str + + class Config: + from_attributes = True diff --git a/models/hegemony_prefix.py b/models/hegemony_prefix.py index 62df125..1487916 100644 --- a/models/hegemony_prefix.py +++ b/models/hegemony_prefix.py @@ -8,7 +8,7 @@ class HegemonyPrefix(Base): __tablename__ = 'ihr_hegemony_prefix' __table_args__ = ( - PrimaryKeyConstraint('id','timebin'), + PrimaryKeyConstraint('id', 'timebin'), ) __hypertable__ = { @@ -62,7 +62,7 @@ class HegemonyPrefix(Base): doc="Status of the monitored prefix in the RIR's delegated stats. Status other than 'assigned' are usually considered as bogons.") delegated_asn_status = Column(String(32), nullable=False, - doc="Status of the origin ASN in the RIR's delegated stats. Status other than 'assigned' are usually considered as bogons.") + doc="Status of the origin ASN in the RIR's delegated stats. Status other than 'assigned' are usually considered as bogons.") descr = Column(String(64), nullable=False, doc='Prefix description from IRR (maximum 64 characters).') @@ -70,13 +70,26 @@ class HegemonyPrefix(Base): moas = Column(Boolean, default=False, nullable=False, doc='True if the prefix is originated by multiple ASNs.') - asn_id = Column(BigInteger, - nullable=False, - doc='Dependency. Network commonly seen in BGP paths towards monitored prefix.') - originasn_id = Column(BigInteger, - nullable=False, - doc='Network seen as originating the monitored prefix.') - - country_id = Column(String(4), - nullable=False, - doc="Country for the monitored prefix identified by Maxmind's Geolite2 geolocation database.") + asn = Column('asn_id', BigInteger, + nullable=False, + doc='Dependency. Network commonly seen in BGP paths towards monitored prefix.') + + originasn = Column('originasn_id', BigInteger, + nullable=False, + doc='Network seen as originating the monitored prefix.') + + country = Column('country_id', String(4), + nullable=False, + doc="Country for the monitored prefix identified by Maxmind's Geolite2 geolocation database.") + + asn_relation = relationship('ASN', + primaryjoin='HegemonyPrefix.asn == ASN.number', + foreign_keys=[asn]) + + originasn_relation = relationship('ASN', + primaryjoin='HegemonyPrefix.originasn == ASN.number', + foreign_keys=[originasn]) + + country_relation = relationship('Country', + primaryjoin='HegemonyPrefix.country == Country.code', + foreign_keys=[country]) diff --git a/repositories/hegemony_prefix_repository.py b/repositories/hegemony_prefix_repository.py new file mode 100644 index 0000000..8a30099 --- /dev/null +++ b/repositories/hegemony_prefix_repository.py @@ -0,0 +1,79 @@ +from datetime import datetime +from sqlalchemy.orm import Session +from models.hegemony_prefix import HegemonyPrefix +from typing import Optional, List, Tuple +from globals import page_size + + +class HegemonyPrefixRepository: + def get_all( + self, + db: Session, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + prefixes: Optional[List[str]] = None, + asn_ids: Optional[List[int]] = None, + originasn_ids: Optional[List[int]] = None, + countries: Optional[List[str]] = None, + rpki_status: Optional[str] = None, + irr_status: Optional[str] = None, + delegated_prefix_status: Optional[str] = None, + delegated_asn_status: Optional[str] = None, + af: Optional[int] = None, + hege: Optional[float] = None, + hege_gte: Optional[float] = None, + hege_lte: Optional[float] = None, + origin_only: Optional[bool] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[HegemonyPrefix], int]: + query = db.query(HegemonyPrefix) + + # Apply filters + if timebin_gte: + query = query.filter(HegemonyPrefix.timebin >= timebin_gte) + if timebin_lte: + query = query.filter(HegemonyPrefix.timebin <= timebin_lte) + if prefixes: + query = query.filter(HegemonyPrefix.prefix.in_(prefixes)) + if asn_ids: + query = query.filter(HegemonyPrefix.asn.in_(asn_ids)) + if originasn_ids: + query = query.filter(HegemonyPrefix.originasn.in_(originasn_ids)) + if countries: + query = query.filter(HegemonyPrefix.country.in_(countries)) + if rpki_status: + query = query.filter( + HegemonyPrefix.rpki_status.contains(rpki_status)) + if irr_status: + query = query.filter( + HegemonyPrefix.irr_status.contains(irr_status)) + if delegated_prefix_status: + query = query.filter( + HegemonyPrefix.delegated_prefix_status.contains(delegated_prefix_status)) + if delegated_asn_status: + query = query.filter( + HegemonyPrefix.delegated_asn_status.contains(delegated_asn_status)) + if af is not None: + query = query.filter(HegemonyPrefix.af == af) + if hege is not None: + query = query.filter(HegemonyPrefix.hege == hege) + if hege_gte is not None: + query = query.filter(HegemonyPrefix.hege >= hege_gte) + if hege_lte is not None: + query = query.filter(HegemonyPrefix.hege <= hege_lte) + if origin_only: + query = query.filter( + HegemonyPrefix.originasn == HegemonyPrefix.asn) + + total_count = query.count() + + # Apply ordering + if order_by and hasattr(HegemonyPrefix, order_by): + query = query.order_by(getattr(HegemonyPrefix, order_by)) + + # Apply pagination + offset = (page - 1) * page_size + results = query.offset(offset).limit(page_size).all() + + return results, total_count diff --git a/services/hegemony_service.py b/services/hegemony_service.py index aa4e8ee..6b1eef4 100644 --- a/services/hegemony_service.py +++ b/services/hegemony_service.py @@ -7,6 +7,8 @@ from dtos.hegemony_country_dto import HegemonyCountryDTO from repositories.hegemony_repository import HegemonyRepository from dtos.hegemony_dto import HegemonyDTO +from repositories.hegemony_prefix_repository import HegemonyPrefixRepository +from dtos.hegemony_prefix_dto import HegemonyPrefixDTO from typing import Optional, List, Tuple from datetime import datetime @@ -17,6 +19,7 @@ def __init__(self): self.hegemony_alarms_repository = HegemonyAlarmsRepository() self.hegemony_country_repository = HegemonyCountryRepository() self.hegemony_repository = HegemonyRepository() + self.hegemony_prefix_repository = HegemonyPrefixRepository() def get_hegemony_cones( self, @@ -174,3 +177,67 @@ def get_hegemony( asn_name=hegemony.asn_relation.name if hegemony.asn_relation else None, originasn_name=hegemony.originasn_relation.name if hegemony.originasn_relation else None ) for hegemony in hegemony_data], total_count + + def get_hegemony_prefixes( + self, + db: Session, + timebin_gte: Optional[datetime] = None, + timebin_lte: Optional[datetime] = None, + prefixes: Optional[List[str]] = None, + asn_ids: Optional[List[int]] = None, + originasn_ids: Optional[List[int]] = None, + countries: Optional[List[str]] = None, + rpki_status: Optional[str] = None, + irr_status: Optional[str] = None, + delegated_prefix_status: Optional[str] = None, + delegated_asn_status: Optional[str] = None, + af: Optional[int] = None, + hege: Optional[float] = None, + hege_gte: Optional[float] = None, + hege_lte: Optional[float] = None, + origin_only: Optional[bool] = None, + page: int = 1, + order_by: Optional[str] = None + ) -> Tuple[List[HegemonyPrefixDTO], int]: + """ + Get hegemony prefix data with filtering. + """ + prefixes_data, total_count = self.hegemony_prefix_repository.get_all( + db, + timebin_gte=timebin_gte, + timebin_lte=timebin_lte, + prefixes=prefixes, + asn_ids=asn_ids, + originasn_ids=originasn_ids, + countries=countries, + rpki_status=rpki_status, + irr_status=irr_status, + delegated_prefix_status=delegated_prefix_status, + delegated_asn_status=delegated_asn_status, + af=af, + hege=hege, + hege_gte=hege_gte, + hege_lte=hege_lte, + origin_only=origin_only, + page=page, + order_by=order_by + ) + + return [HegemonyPrefixDTO( + timebin=prefix.timebin, + prefix=prefix.prefix, + originasn=prefix.originasn, + country=prefix.country, + asn=prefix.asn, + hege=prefix.hege, + af=prefix.af, + visibility=prefix.visibility, + rpki_status=prefix.rpki_status, + irr_status=prefix.irr_status, + delegated_prefix_status=prefix.delegated_prefix_status, + delegated_asn_status=prefix.delegated_asn_status, + descr=prefix.descr, + moas=prefix.moas, + originasn_name=prefix.originasn_relation.name if prefix.originasn_relation else None, + asn_name=prefix.asn_relation.name if prefix.asn_relation else None + ) for prefix in prefixes_data], total_count From b321230a154f92956392702c5b65f23a4a9fba95 Mon Sep 17 00:00:00 2001 From: ibraam Date: Fri, 25 Jul 2025 13:50:11 +0300 Subject: [PATCH 36/43] Added page size ti .env file --- README.md | 3 ++- controllers/country_controller.py | 4 ++-- controllers/disco_controller.py | 5 +++-- controllers/hegemony_controller.py | 2 +- controllers/metis_controller.py | 2 +- controllers/network_delay_controller.py | 2 +- controllers/networks_controller.py | 5 +++-- controllers/tr_hegemony_controller.py | 4 ++-- docs/project_structure.md | 2 +- globals.py | 2 -- repositories/atlas_delay_alarms_repository.py | 2 +- repositories/atlas_delay_repository.py | 2 +- repositories/atlas_location_repository.py | 3 +-- repositories/country_repository.py | 4 ++-- repositories/disco_events_repository.py | 2 +- repositories/hegemony_alarms_repository.py | 2 +- repositories/hegemony_cone_repository.py | 2 +- repositories/hegemony_country_repository.py | 2 +- repositories/hegemony_prefix_repository.py | 2 +- repositories/hegemony_repository.py | 2 +- repositories/metis_atlas_deployment_repository.py | 4 ++-- repositories/metis_atlas_selection_repository.py | 2 +- repositories/networks_repository.py | 6 +++--- repositories/tr_hegemony_repository.py | 2 +- utils.py | 9 +++++++++ 25 files changed, 43 insertions(+), 34 deletions(-) delete mode 100644 globals.py diff --git a/README.md b/README.md index 36c68da..177baec 100644 --- a/README.md +++ b/README.md @@ -14,13 +14,14 @@ git clone https://github.com/InternetHealthReport/ihr-api.git ### 2. Create a `.env` File -In the project root directory, create a new `.env` file to define your specific database connection string. +In the project root directory, create a new `.env` file to define your specific database connection string, proxy path and page size (number of results to return per page.). `.env` content: ```env DATABASE_URL=postgresql://:@:/ PROXY_PATH=api-dev +PAGE_SIZE = 100000 ``` diff --git a/controllers/country_controller.py b/controllers/country_controller.py index 2dbdeff..b9e1b7d 100644 --- a/controllers/country_controller.py +++ b/controllers/country_controller.py @@ -5,7 +5,7 @@ from dtos.country_dto import CountryDTO from config.database import get_db from typing import Optional -from globals import page_size +from utils import page_size # Define a router for all endpoints under /countries router = APIRouter(prefix="/countries", tags=["Countries"]) @@ -29,7 +29,7 @@ def get_all_countries( None, description="Which field to use when ordering the results") ) -> GenericResponseDTO[CountryDTO]: """Retrieves paginated countries with optional filters.""" - + page = page or 1 countries, total_count = CountryController.service.get_all_countries( db, diff --git a/controllers/disco_controller.py b/controllers/disco_controller.py index 82e5493..182a7a9 100644 --- a/controllers/disco_controller.py +++ b/controllers/disco_controller.py @@ -6,7 +6,7 @@ from config.database import get_db from typing import Optional from datetime import datetime -from globals import page_size +from utils import page_size router = APIRouter(prefix="/disco", tags=["Disco"]) @@ -55,7 +55,8 @@ async def get_events( None, description="Total number of Atlas probes active in the reported stream (ASN, Country, or geographical area)."), ongoing: Optional[str] = Query( None, description="Deprecated, this value is unused"), - page: Optional[int] = Query(1, ge=1, description="A page number within the paginated result set."), + page: Optional[int] = Query( + 1, ge=1, description="A page number within the paginated result set."), ordering: Optional[str] = Query( None, description="Which field to use when ordering the results") ) -> GenericResponseDTO[DiscoEventsDTO]: diff --git a/controllers/hegemony_controller.py b/controllers/hegemony_controller.py index 67e2963..ccfaf88 100644 --- a/controllers/hegemony_controller.py +++ b/controllers/hegemony_controller.py @@ -10,7 +10,7 @@ from dtos.hegemony_alarms_dto import HegemonyAlarmsDTO from config.database import get_db from typing import Optional, List -from globals import page_size +from utils import page_size from utils import * router = APIRouter(prefix="/hegemony", tags=["Hegemony"]) diff --git a/controllers/metis_controller.py b/controllers/metis_controller.py index 4a5365e..57266b2 100644 --- a/controllers/metis_controller.py +++ b/controllers/metis_controller.py @@ -7,7 +7,7 @@ from dtos.metis_atlas_selection_dto import MetisAtlasSelectionDTO from config.database import get_db from typing import Optional -from globals import page_size +from utils import page_size from utils import validate_timebin_params, prepare_timebin_range router = APIRouter(prefix="/metis/atlas", tags=["Metis"]) diff --git a/controllers/network_delay_controller.py b/controllers/network_delay_controller.py index cd25572..e4e2a57 100644 --- a/controllers/network_delay_controller.py +++ b/controllers/network_delay_controller.py @@ -8,7 +8,7 @@ from config.database import get_db from typing import Optional, List from datetime import datetime -from globals import page_size +from utils import page_size from utils import * router = APIRouter(prefix="/network_delay", tags=["Network Delay"]) diff --git a/controllers/networks_controller.py b/controllers/networks_controller.py index 3c1200e..b2c2a4b 100644 --- a/controllers/networks_controller.py +++ b/controllers/networks_controller.py @@ -5,7 +5,7 @@ from dtos.networks_dto import NetworksDTO from dtos.generic_response_dto import GenericResponseDTO, build_url from config.database import get_db -from globals import page_size +from utils import page_size router = APIRouter(prefix="/networks", tags=["Networks"]) @@ -28,7 +28,8 @@ async def get_networks( None, description="Autonomous System Number (ASN) or IXP ID. Note that IXP ID are negative to avoid colision."), search: Optional[str] = Query( None, description="Search for both ASN/IXPID and substring in names"), - page: Optional[int] = Query(1, ge=1, description="A page number within the paginated result set."), + page: Optional[int] = Query( + 1, ge=1, description="A page number within the paginated result set."), ordering: Optional[str] = Query( None, description="Which field to use when ordering the results.") ) -> GenericResponseDTO[NetworksDTO]: diff --git a/controllers/tr_hegemony_controller.py b/controllers/tr_hegemony_controller.py index 15be054..2bafb52 100644 --- a/controllers/tr_hegemony_controller.py +++ b/controllers/tr_hegemony_controller.py @@ -6,7 +6,7 @@ from config.database import get_db from typing import Optional from datetime import datetime -from globals import page_size +from utils import page_size from utils import prepare_timebin_range router = APIRouter(prefix="/tr_hegemony", tags=["TR Hegemony"]) @@ -59,7 +59,7 @@ async def get_hegemony( """ timebin__gte, timebin__lte = prepare_timebin_range( timebin, timebin__gte, timebin__lte, max_days=31) - + hegemony_data, total_count = TRHegemonyController.service.get_tr_hegemony( db, timebin=timebin, diff --git a/docs/project_structure.md b/docs/project_structure.md index 3a23999..8d7c317 100644 --- a/docs/project_structure.md +++ b/docs/project_structure.md @@ -18,7 +18,7 @@ This document provides an overview of the project's file and folder structure. E ├── .gitignore # Specifies intentionally untracked files to ignore ├── alembic.ini # Alembic configuration file ├── dockerfile # Docker image instructions to build the app container -├── globals.py # Global constants +├── utils.py # Utils ├── main.py # FastAPI entry point (starts the app) ├── README.md # Project documentation ├── requirements.txt # Python dependencies list for pip installation diff --git a/globals.py b/globals.py deleted file mode 100644 index 113770b..0000000 --- a/globals.py +++ /dev/null @@ -1,2 +0,0 @@ -# page size represents the number of objects returned by "results" field of GenericResponseDTO -page_size = 5 \ No newline at end of file diff --git a/repositories/atlas_delay_alarms_repository.py b/repositories/atlas_delay_alarms_repository.py index af6473b..782419a 100644 --- a/repositories/atlas_delay_alarms_repository.py +++ b/repositories/atlas_delay_alarms_repository.py @@ -3,7 +3,7 @@ from models.atlas_delay_alarms import AtlasDelayAlarms from datetime import datetime from typing import List, Optional, Tuple -from globals import page_size +from utils import page_size class AtlasDelayAlarmsRepository: diff --git a/repositories/atlas_delay_repository.py b/repositories/atlas_delay_repository.py index 4454db6..40c1ae6 100644 --- a/repositories/atlas_delay_repository.py +++ b/repositories/atlas_delay_repository.py @@ -3,7 +3,7 @@ from models.atlas_delay import AtlasDelay from datetime import datetime from typing import List, Optional, Tuple -from globals import page_size +from utils import page_size class AtlasDelayRepository: diff --git a/repositories/atlas_location_repository.py b/repositories/atlas_location_repository.py index 79e503d..4d30db9 100644 --- a/repositories/atlas_location_repository.py +++ b/repositories/atlas_location_repository.py @@ -1,7 +1,7 @@ from sqlalchemy.orm import Session from models.atlas_location import AtlasLocation from typing import Optional, List, Tuple -from globals import page_size +from utils import page_size class AtlasLocationRepository: @@ -29,7 +29,6 @@ def get_all( # Apply ordering if order_by and hasattr(AtlasLocation, order_by): query = query.order_by(getattr(AtlasLocation, order_by)) - # Apply pagination offset = (page - 1) * page_size diff --git a/repositories/country_repository.py b/repositories/country_repository.py index 9f56401..76a3b4a 100644 --- a/repositories/country_repository.py +++ b/repositories/country_repository.py @@ -2,7 +2,7 @@ from models.country import Country from typing import Optional, List, Tuple # Added Tuple for return type from sqlalchemy import asc -from globals import page_size +from utils import page_size class CountryRepository: @@ -27,7 +27,7 @@ def get_all( if name: query = query.filter(Country.name.ilike(f"%{name}%")) - #Executes getting total count of countries + # Executes getting total count of countries total_count = query.count() # Apply ordering if specified diff --git a/repositories/disco_events_repository.py b/repositories/disco_events_repository.py index d6e6c00..39d5a82 100644 --- a/repositories/disco_events_repository.py +++ b/repositories/disco_events_repository.py @@ -3,7 +3,7 @@ from models.disco_events import DiscoEvents from datetime import datetime from typing import List, Optional, Tuple -from globals import page_size +from utils import page_size class DiscoEventsRepository: diff --git a/repositories/hegemony_alarms_repository.py b/repositories/hegemony_alarms_repository.py index bc4c957..e628104 100644 --- a/repositories/hegemony_alarms_repository.py +++ b/repositories/hegemony_alarms_repository.py @@ -2,7 +2,7 @@ from sqlalchemy.orm import Session from models.hegemony_alarms import HegemonyAlarms from typing import Optional, List, Tuple -from globals import page_size +from utils import page_size class HegemonyAlarmsRepository: diff --git a/repositories/hegemony_cone_repository.py b/repositories/hegemony_cone_repository.py index 8e2b067..1eed074 100644 --- a/repositories/hegemony_cone_repository.py +++ b/repositories/hegemony_cone_repository.py @@ -2,7 +2,7 @@ from sqlalchemy.orm import Session from models.hegemony_cone import HegemonyCone from typing import Optional, List, Tuple -from globals import page_size +from utils import page_size class HegemonyConeRepository: diff --git a/repositories/hegemony_country_repository.py b/repositories/hegemony_country_repository.py index 075a664..906cb68 100644 --- a/repositories/hegemony_country_repository.py +++ b/repositories/hegemony_country_repository.py @@ -2,7 +2,7 @@ from sqlalchemy.orm import Session from models.hegemony_country import HegemonyCountry from typing import Optional, List, Tuple -from globals import page_size +from utils import page_size class HegemonyCountryRepository: diff --git a/repositories/hegemony_prefix_repository.py b/repositories/hegemony_prefix_repository.py index 8a30099..ee2b472 100644 --- a/repositories/hegemony_prefix_repository.py +++ b/repositories/hegemony_prefix_repository.py @@ -2,7 +2,7 @@ from sqlalchemy.orm import Session from models.hegemony_prefix import HegemonyPrefix from typing import Optional, List, Tuple -from globals import page_size +from utils import page_size class HegemonyPrefixRepository: diff --git a/repositories/hegemony_repository.py b/repositories/hegemony_repository.py index 4014eb5..ed7f902 100644 --- a/repositories/hegemony_repository.py +++ b/repositories/hegemony_repository.py @@ -2,7 +2,7 @@ from sqlalchemy.orm import Session from models.hegemony import Hegemony from typing import Optional, List, Tuple -from globals import page_size +from utils import page_size class HegemonyRepository: diff --git a/repositories/metis_atlas_deployment_repository.py b/repositories/metis_atlas_deployment_repository.py index df944c1..2e4edcf 100644 --- a/repositories/metis_atlas_deployment_repository.py +++ b/repositories/metis_atlas_deployment_repository.py @@ -2,7 +2,7 @@ from sqlalchemy.orm import Session from models.metis_atlas_deployment import MetisAtlasDeployment from typing import Optional, List, Tuple -from globals import page_size +from utils import page_size class MetisAtlasDeploymentRepository: @@ -46,7 +46,7 @@ def get_all( # Apply ordering if order_by and hasattr(MetisAtlasDeployment, order_by): query = query.order_by(getattr(MetisAtlasDeployment, order_by)) - + # Apply pagination offset = (page - 1) * page_size results = query.offset(offset).limit(page_size).all() diff --git a/repositories/metis_atlas_selection_repository.py b/repositories/metis_atlas_selection_repository.py index 53cc5cc..0d9d882 100644 --- a/repositories/metis_atlas_selection_repository.py +++ b/repositories/metis_atlas_selection_repository.py @@ -2,7 +2,7 @@ from sqlalchemy.orm import Session from models.metis_atlas_selection import MetisAtlasSelection from typing import Optional, List, Tuple -from globals import page_size +from utils import page_size class MetisAtlasSelectionRepository: diff --git a/repositories/networks_repository.py b/repositories/networks_repository.py index 9117875..d7add3e 100644 --- a/repositories/networks_repository.py +++ b/repositories/networks_repository.py @@ -1,8 +1,8 @@ from sqlalchemy.orm import Session -from sqlalchemy import or_,String +from sqlalchemy import or_, String from models.asn import ASN from typing import Optional, List, Tuple -from globals import page_size +from utils import page_size class NetworksRepository: @@ -47,7 +47,7 @@ def get_all( # Apply ordering if order_by and hasattr(ASN, order_by): query = query.order_by(getattr(ASN, order_by)) - + # Apply pagination offset = (page - 1) * page_size results = query.offset(offset).limit(page_size).all() diff --git a/repositories/tr_hegemony_repository.py b/repositories/tr_hegemony_repository.py index a6f802b..1a6f008 100644 --- a/repositories/tr_hegemony_repository.py +++ b/repositories/tr_hegemony_repository.py @@ -3,7 +3,7 @@ from models.tr_hegemony import TRHegemony from datetime import datetime from typing import List, Optional, Tuple -from globals import page_size +from utils import page_size class TRHegemonyRepository: diff --git a/utils.py b/utils.py index 90ee61a..f29bcde 100644 --- a/utils.py +++ b/utils.py @@ -3,7 +3,16 @@ from fastapi import HTTPException from datetime import datetime, timedelta from typing import Optional +from dotenv import load_dotenv +import os +# Load environment variables from .env file +try: + load_dotenv() +except: + pass + +page_size = int(os.getenv("PAGE_SIZE")) def validate_timebin_params( timebin: Optional[datetime], From d418b75d5427ad7264f9d002fb6cf6c310e2cd5b Mon Sep 17 00:00:00 2001 From: ibraam Date: Wed, 6 Aug 2025 11:49:05 +0300 Subject: [PATCH 37/43] Updated readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 743592f..d52d215 100644 --- a/README.md +++ b/README.md @@ -21,6 +21,7 @@ In the project root directory, create a new `.env` file to define your specific ```env DATABASE_URL=postgresql://:@:/ PROXY_PATH=api-dev +PAGE_SIZE=100000 ``` @@ -129,4 +130,3 @@ Details how to manage database migrations using Alembic, including TimescaleDB-s ### 4. [Adding a New Endpoint](docs/add_new_endpoint.md) A step-by-step guide on how to add a new endpoint to the application. - From a49e79b68eef5a571d14717b69f2bb3a0e6bebaf Mon Sep 17 00:00:00 2001 From: ibraam Date: Tue, 19 Aug 2025 12:24:21 +0300 Subject: [PATCH 38/43] Updated allowed CORS origins --- main.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/main.py b/main.py index f47818f..a8edb44 100644 --- a/main.py +++ b/main.py @@ -1,6 +1,7 @@ import importlib import pkgutil from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware from controllers import __path__ as controllers_path from dotenv import load_dotenv import os @@ -35,6 +36,16 @@ redoc_url=None ) +origins = ["http://localhost", "http://www.ihr.live", "https://www.ihr.live"] + +app.add_middleware( + CORSMiddleware, + allow_origins=origins, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + # Automatically import and register all routers inside "controllers" for _, module_name, _ in pkgutil.iter_modules(controllers_path): module = importlib.import_module(f"controllers.{module_name}") From bcd8e8c8a5e259378c6e55586e6612b8872426e8 Mon Sep 17 00:00:00 2001 From: ibraam Date: Tue, 19 Aug 2025 12:26:35 +0300 Subject: [PATCH 39/43] Increase version --- main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/main.py b/main.py index a8edb44..921a8b8 100644 --- a/main.py +++ b/main.py @@ -32,7 +32,7 @@ root_path="" if PROXY_PATH is None else f"/{PROXY_PATH}", title="IHR API", description=description, - version="v1.1", + version="v1.2", redoc_url=None ) From f07135fc5d132d2d3a23e3be2214ded9cd93b9ba Mon Sep 17 00:00:00 2001 From: ibraam Date: Mon, 25 Aug 2025 16:47:37 +0300 Subject: [PATCH 40/43] Added data-migration.md --- docs/data-migration.md | 153 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 153 insertions(+) create mode 100644 docs/data-migration.md diff --git a/docs/data-migration.md b/docs/data-migration.md new file mode 100644 index 0000000..b7e518f --- /dev/null +++ b/docs/data-migration.md @@ -0,0 +1,153 @@ +## 1. Create the target database + +First, create the target database `ihr-fastapi` where you want the tables and data to reside: + + +## 2. Run Alembic migrations + +Run Alembic to create the schema and tables inside `ihr-fastapi`: + +```bash +alembic upgrade head +``` + +This ensures the target database has all the required tables ready. + +--- + +## 3. Connect to `ihr-fastapi` using psql + +Open a `psql` session to your target database: + +```bash +psql -U -d ihr-fastapi +``` + +Replace `` with your PostgreSQL username. + +--- +## 4. Configure FDW to access the source database (`ihr`) + +Inside the `psql` session, run: + +```sql +-- Enable the FDW extension +CREATE EXTENSION IF NOT EXISTS postgres_fdw; + +-- Create a connection to the ihr DB +CREATE SERVER "" +FOREIGN DATA WRAPPER postgres_fdw +OPTIONS (host '', dbname '', port ''); + +-- Map your Postgres user to the remote DB credentials +CREATE USER MAPPING FOR CURRENT_USER +SERVER "" +OPTIONS (user '', password ''); + +-- Import only the needed tables into a schema called ihr_src +CREATE SCHEMA IF NOT EXISTS ihr_src; +IMPORT FOREIGN SCHEMA public +FROM SERVER "" +INTO ihr_src; +``` + +Replace the following placeholders with your actual values: +- ``: A name for your foreign server +- ``: The hostname where your source database is running +- ``: The name of your source database +- ``: The PostgreSQL port +- ``: Your PostgreSQL username for the source database +- ``: Your PostgreSQL password for the source database + +### Explanation + +* **`CREATE EXTENSION`** enables FDW support. +* **`CREATE SERVER`** defines the connection to the source DB (`ihr`). +* **`USER MAPPING`** maps your local PostgreSQL user to the remote DB user/password. +* **`IMPORT FOREIGN SCHEMA`** pulls in table definitions into a local schema (`ihr_src`). These are **foreign tables** that point to the source DB. + +--- + +## 5. Prepare to export migration SQL + +We’ll generate a file (`migration.sql`) with `INSERT ... SELECT` statements to copy the data. + +In `psql`, run: + +```sql +\x off +\pset format unaligned +\pset pager off +\pset tuples_only on +\o migration.sql +``` + +### Explanation + +* **`\x off`** → disables expanded display. +* **`\pset format unaligned`** → outputs raw SQL without table formatting. +* **`\pset pager off`** → prevents paging. +* **`\pset tuples_only on`** → suppresses headers/footers. +* **`\o migration.sql`** → redirects all output into a file named `migration.sql`. + +--- + +## 6. Generate the data migration SQL + +Run this query in `psql`: + +```sql +SELECT + 'INSERT INTO public.' || table_name || ' (' || + string_agg(column_name, ', ' ORDER BY ordinal_position) || + ') SELECT ' || + string_agg(column_name, ', ' ORDER BY ordinal_position) || + ' FROM ihr_src.' || table_name || + CASE + WHEN EXISTS ( + SELECT 1 FROM information_schema.columns c2 + WHERE c2.table_schema = 'public' + AND c2.table_name = c.table_name + AND c2.column_name = 'timebin' + ) + THEN ' WHERE timebin >= NOW() - INTERVAL ''3 months'';' + ELSE ';' + END || + E'\n' || + '\echo ''SUCCESS: ' || table_name || ' data inserted''' +FROM information_schema.columns c +WHERE table_schema = 'public' +AND table_name LIKE 'ihr_%' +GROUP BY table_name +ORDER BY table_name; +``` + +This builds a series of `INSERT INTO ... SELECT ...` statements that: + +* Copy all data from `ihr_src.table` to `public.table`. +* If a `timebin` column exists, only copy rows from the **last 3 months**. +* Print a confirmation message (`SUCCESS: ...`) after each table insert. + +--- + +## 7. Exit `psql` + +Type: + +```sql +\q +``` + +Now you’ll have a file called `migration.sql` in your current directory. + +--- + +## 8. Run the migration + +Finally, execute the migration script against the `ihr-fastapi` database: + +```bash +psql -U -d ihr-fastapi -f migration.sql +``` + +This will insert data into all your new tables. From 1a69bb937b4dc0b7b6c9af418caedb2b737a20a1 Mon Sep 17 00:00:00 2001 From: ibraam Date: Mon, 25 Aug 2025 16:51:29 +0300 Subject: [PATCH 41/43] Fixed main.py --- main.py | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/main.py b/main.py index b1d947c..3b4b007 100644 --- a/main.py +++ b/main.py @@ -36,16 +36,6 @@ redoc_url=None ) -origins = ["http://localhost", "http://www.ihr.live", "https://www.ihr.live"] - -app.add_middleware( - CORSMiddleware, - allow_origins=origins, - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], -) - # Automatically import and register all routers inside "controllers" for _, module_name, _ in pkgutil.iter_modules(controllers_path): module = importlib.import_module(f"controllers.{module_name}") @@ -63,4 +53,4 @@ allow_origins=origins, allow_methods=["*"], allow_headers=["*"], -) +) \ No newline at end of file From 075d4b441c29aab144219ca3b78eeadcfa2731bd Mon Sep 17 00:00:00 2001 From: ibraam Date: Mon, 25 Aug 2025 16:57:03 +0300 Subject: [PATCH 42/43] Modified data_migration.md --- docs/{data-migration.md => data_migration.md} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename docs/{data-migration.md => data_migration.md} (100%) diff --git a/docs/data-migration.md b/docs/data_migration.md similarity index 100% rename from docs/data-migration.md rename to docs/data_migration.md From a0c3185c2dfcc5734bc80c0d3880117528a46cf5 Mon Sep 17 00:00:00 2001 From: ibraam Date: Sat, 4 Oct 2025 18:49:37 +0300 Subject: [PATCH 43/43] Optimized indexes and performed eager loading of data --- alembic/env.py | 2 +- models/atlas_delay.py | 2 +- models/atlas_delay_alarms.py | 2 +- models/atlas_location.py | 2 +- models/delay.py | 2 +- models/delay_alarms.py | 2 +- models/forwarding.py | 2 +- models/forwarding_alarms.py | 2 +- models/hegemony.py | 2 +- models/hegemony_alarms.py | 2 +- models/hegemony_cone.py | 2 +- models/hegemony_country.py | 2 +- models/hegemony_prefix.py | 2 +- models/metis_atlas_deployment.py | 2 +- models/metis_atlas_selection.py | 2 +- models/tr_hegemony.py | 2 +- repositories/atlas_delay_alarms_repository.py | 8 ++++++-- repositories/atlas_delay_repository.py | 8 ++++++-- repositories/hegemony_alarms_repository.py | 13 +++++++++++-- repositories/hegemony_country_repository.py | 10 ++++++++-- repositories/hegemony_prefix_repository.py | 13 +++++++++++-- repositories/hegemony_repository.py | 16 +++++++++++++--- .../metis_atlas_deployment_repository.py | 7 ++++--- repositories/metis_atlas_selection_repository.py | 7 ++++--- repositories/tr_hegemony_repository.py | 8 ++++++-- 25 files changed, 85 insertions(+), 37 deletions(-) diff --git a/alembic/env.py b/alembic/env.py index 50ac4f9..e566d3d 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -100,7 +100,7 @@ def create_hypertable_ops(table_name, hypertable_meta, is_existing=False): # Create hypertable SQL hypertable_sql = ( - f"SELECT create_hypertable('{table_name}', by_range('{time_col}', INTERVAL '{chunk_interval}'));" + f"SELECT create_hypertable('{table_name}', by_range('{time_col}', INTERVAL '{chunk_interval}'),create_default_indexes => false);" ) upgrade_ops.append(ops.ExecuteSQLOp(hypertable_sql)) diff --git a/models/atlas_delay.py b/models/atlas_delay.py index 14b0d0d..366eeca 100644 --- a/models/atlas_delay.py +++ b/models/atlas_delay.py @@ -10,7 +10,7 @@ class AtlasDelay(Base): __tablename__ = 'ihr_atlas_delay' __table_args__ = ( - PrimaryKeyConstraint('id', 'timebin'), + PrimaryKeyConstraint('timebin','id'), ) __hypertable__ = { diff --git a/models/atlas_delay_alarms.py b/models/atlas_delay_alarms.py index ef71bf7..5f66174 100644 --- a/models/atlas_delay_alarms.py +++ b/models/atlas_delay_alarms.py @@ -8,7 +8,7 @@ class AtlasDelayAlarms(Base): __tablename__ = 'ihr_atlas_delay_alarms' __table_args__ = ( - PrimaryKeyConstraint('id', 'timebin'), + PrimaryKeyConstraint('timebin','id'), ) __hypertable__ = { diff --git a/models/atlas_location.py b/models/atlas_location.py index a2e54f1..df55176 100644 --- a/models/atlas_location.py +++ b/models/atlas_location.py @@ -7,7 +7,7 @@ class AtlasLocation(Base): __indexes__ = [ { 'name': 'ihr_atlas_location_af_name_type_idx', - 'columns': ['af', 'name','type'], + 'columns': ['af','type'], },] id = Column(Integer, primary_key=True, autoincrement=True) name = Column( diff --git a/models/delay.py b/models/delay.py index 8a3a70a..847edc4 100644 --- a/models/delay.py +++ b/models/delay.py @@ -8,7 +8,7 @@ class Delay(Base): __tablename__ = 'ihr_delay' __table_args__ = ( - PrimaryKeyConstraint('id', 'timebin'), + PrimaryKeyConstraint('timebin','id'), ) __hypertable__ = { diff --git a/models/delay_alarms.py b/models/delay_alarms.py index a3f7266..ccf2776 100644 --- a/models/delay_alarms.py +++ b/models/delay_alarms.py @@ -11,7 +11,7 @@ class DelayAlarms(Base): __tablename__ = 'ihr_delay_alarms' __table_args__ = ( - PrimaryKeyConstraint('id', 'timebin'), + PrimaryKeyConstraint('timebin','id'), ) __indexes__ = [ diff --git a/models/forwarding.py b/models/forwarding.py index c2b085d..bf29825 100644 --- a/models/forwarding.py +++ b/models/forwarding.py @@ -8,7 +8,7 @@ class Forwarding(Base): __tablename__ = 'ihr_forwarding' __table_args__ = ( - PrimaryKeyConstraint('id', 'timebin'), + PrimaryKeyConstraint('timebin','id'), ) __hypertable__ = { diff --git a/models/forwarding_alarms.py b/models/forwarding_alarms.py index 8adec29..50cb33b 100644 --- a/models/forwarding_alarms.py +++ b/models/forwarding_alarms.py @@ -8,7 +8,7 @@ class ForwardingAlarms(Base): __tablename__ = 'ihr_forwarding_alarms' __table_args__ = ( - PrimaryKeyConstraint('id', 'timebin'), + PrimaryKeyConstraint('timebin','id'), ) __indexes__ = [ diff --git a/models/hegemony.py b/models/hegemony.py index 07ce68d..be1a70b 100644 --- a/models/hegemony.py +++ b/models/hegemony.py @@ -8,7 +8,7 @@ class Hegemony(Base): __tablename__ = 'ihr_hegemony' __table_args__ = ( - PrimaryKeyConstraint('id', 'timebin'), + PrimaryKeyConstraint('timebin','id'), ) __hypertable__ = { diff --git a/models/hegemony_alarms.py b/models/hegemony_alarms.py index 5e69ef0..7c65a20 100644 --- a/models/hegemony_alarms.py +++ b/models/hegemony_alarms.py @@ -8,7 +8,7 @@ class HegemonyAlarms(Base): __tablename__ = 'ihr_hegemony_alarms' __table_args__ = ( - PrimaryKeyConstraint('id', 'timebin'), + PrimaryKeyConstraint('timebin','id'), ) __hypertable__ = { diff --git a/models/hegemony_cone.py b/models/hegemony_cone.py index 253d1ec..90546f8 100644 --- a/models/hegemony_cone.py +++ b/models/hegemony_cone.py @@ -9,7 +9,7 @@ class HegemonyCone(Base): __tablename__ = 'ihr_hegemonycone' __table_args__ = ( - PrimaryKeyConstraint('id', 'timebin'), + PrimaryKeyConstraint('timebin','id'), ) __indexes__ = [ diff --git a/models/hegemony_country.py b/models/hegemony_country.py index 543a08d..f6c239e 100644 --- a/models/hegemony_country.py +++ b/models/hegemony_country.py @@ -8,7 +8,7 @@ class HegemonyCountry(Base): __tablename__ = 'ihr_hegemony_country' __table_args__ = ( - PrimaryKeyConstraint('id', 'timebin'), + PrimaryKeyConstraint('timebin','id'), ) __hypertable__ = { diff --git a/models/hegemony_prefix.py b/models/hegemony_prefix.py index 1487916..45f9d4b 100644 --- a/models/hegemony_prefix.py +++ b/models/hegemony_prefix.py @@ -8,7 +8,7 @@ class HegemonyPrefix(Base): __tablename__ = 'ihr_hegemony_prefix' __table_args__ = ( - PrimaryKeyConstraint('id', 'timebin'), + PrimaryKeyConstraint('timebin','id'), ) __hypertable__ = { diff --git a/models/metis_atlas_deployment.py b/models/metis_atlas_deployment.py index b10503e..f22a5a6 100644 --- a/models/metis_atlas_deployment.py +++ b/models/metis_atlas_deployment.py @@ -8,7 +8,7 @@ class MetisAtlasDeployment(Base): __tablename__ = 'ihr_metis_atlas_deployment' __table_args__ = ( - PrimaryKeyConstraint('id','timebin'), + PrimaryKeyConstraint('timebin','id'), ) __hypertable__ = { diff --git a/models/metis_atlas_selection.py b/models/metis_atlas_selection.py index a75c21d..f526347 100644 --- a/models/metis_atlas_selection.py +++ b/models/metis_atlas_selection.py @@ -8,7 +8,7 @@ class MetisAtlasSelection(Base): __tablename__ = 'ihr_metis_atlas_selection' __table_args__ = ( - PrimaryKeyConstraint('id','timebin'), + PrimaryKeyConstraint('timebin','id'), ) __hypertable__ = { diff --git a/models/tr_hegemony.py b/models/tr_hegemony.py index 85c6ec8..62f7e54 100644 --- a/models/tr_hegemony.py +++ b/models/tr_hegemony.py @@ -9,7 +9,7 @@ class TRHegemony(Base): __tablename__ = 'ihr_tr_hegemony' __table_args__ = ( - PrimaryKeyConstraint('id', 'timebin'), + PrimaryKeyConstraint('timebin','id'), ) __hypertable__ = { diff --git a/repositories/atlas_delay_alarms_repository.py b/repositories/atlas_delay_alarms_repository.py index 1520bd3..694c04d 100644 --- a/repositories/atlas_delay_alarms_repository.py +++ b/repositories/atlas_delay_alarms_repository.py @@ -1,4 +1,4 @@ -from sqlalchemy.orm import Session, aliased +from sqlalchemy.orm import Session, aliased, contains_eager from sqlalchemy import and_, or_ from models.atlas_delay_alarms import AtlasDelayAlarms from datetime import datetime @@ -37,7 +37,11 @@ def get_alarms( query = db.query(AtlasDelayAlarms)\ .join(Startpoint, AtlasDelayAlarms.startpoint_relation)\ - .join(Endpoint, AtlasDelayAlarms.endpoint_relation) + .join(Endpoint, AtlasDelayAlarms.endpoint_relation)\ + .options( + contains_eager(AtlasDelayAlarms.startpoint_relation, alias=Startpoint), + contains_eager(AtlasDelayAlarms.endpoint_relation, alias=Endpoint) + ) # If no time filters specified, get rows with max timebin diff --git a/repositories/atlas_delay_repository.py b/repositories/atlas_delay_repository.py index f2c76a3..0cac3ca 100644 --- a/repositories/atlas_delay_repository.py +++ b/repositories/atlas_delay_repository.py @@ -1,4 +1,4 @@ -from sqlalchemy.orm import Session, aliased +from sqlalchemy.orm import Session, aliased, contains_eager from sqlalchemy import and_, or_ from models.atlas_delay import AtlasDelay from datetime import datetime @@ -40,7 +40,11 @@ def get_delays( query = db.query(AtlasDelay)\ .join(Startpoint, AtlasDelay.startpoint_relation)\ - .join(Endpoint, AtlasDelay.endpoint_relation) + .join(Endpoint, AtlasDelay.endpoint_relation)\ + .options( + contains_eager(AtlasDelay.startpoint_relation, alias=Startpoint), + contains_eager(AtlasDelay.endpoint_relation, alias=Endpoint) + ) # If no time filters specified, get rows with max timebin diff --git a/repositories/hegemony_alarms_repository.py b/repositories/hegemony_alarms_repository.py index b3883f1..070c466 100644 --- a/repositories/hegemony_alarms_repository.py +++ b/repositories/hegemony_alarms_repository.py @@ -1,5 +1,5 @@ from datetime import datetime -from sqlalchemy.orm import Session +from sqlalchemy.orm import Session, aliased, contains_eager from models.hegemony_alarms import HegemonyAlarms from typing import Optional, List, Tuple from utils import page_size @@ -20,7 +20,16 @@ def get_all( page: int = 1, order_by: Optional[str] = None ) -> Tuple[List[HegemonyAlarms], int]: - query = db.query(HegemonyAlarms) + ASN = aliased(HegemonyAlarms.asn_relation.property.mapper.class_) + OriginASN = aliased(HegemonyAlarms.originasn_relation.property.mapper.class_) + + query = db.query(HegemonyAlarms)\ + .join(ASN, HegemonyAlarms.asn_relation)\ + .join(OriginASN, HegemonyAlarms.originasn_relation)\ + .options( + contains_eager(HegemonyAlarms.asn_relation, alias=ASN), + contains_eager(HegemonyAlarms.originasn_relation, alias=OriginASN) + ) # If no time filters specified, get rows with max timebin if not timebin_gte and not timebin_lte: diff --git a/repositories/hegemony_country_repository.py b/repositories/hegemony_country_repository.py index 691cb12..200b0a8 100644 --- a/repositories/hegemony_country_repository.py +++ b/repositories/hegemony_country_repository.py @@ -1,5 +1,5 @@ from datetime import datetime -from sqlalchemy.orm import Session +from sqlalchemy.orm import Session, contains_eager, aliased from models.hegemony_country import HegemonyCountry from typing import Optional, List, Tuple from utils import page_size @@ -23,7 +23,13 @@ def get_all( page: int = 1, order_by: Optional[str] = None ) -> Tuple[List[HegemonyCountry], int]: - query = db.query(HegemonyCountry) + ASN = aliased(HegemonyCountry.asn_relation.property.mapper.class_) + + query = db.query(HegemonyCountry)\ + .join(ASN, HegemonyCountry.asn_relation)\ + .options( + contains_eager(HegemonyCountry.asn_relation, alias=ASN), + ) # If no time filters specified, get rows with max timebin if not timebin_gte and not timebin_lte: diff --git a/repositories/hegemony_prefix_repository.py b/repositories/hegemony_prefix_repository.py index 5c2a363..589f8cd 100644 --- a/repositories/hegemony_prefix_repository.py +++ b/repositories/hegemony_prefix_repository.py @@ -1,5 +1,5 @@ from datetime import datetime -from sqlalchemy.orm import Session +from sqlalchemy.orm import Session, contains_eager, aliased from models.hegemony_prefix import HegemonyPrefix from typing import Optional, List, Tuple from utils import page_size @@ -28,8 +28,17 @@ def get_all( page: int = 1, order_by: Optional[str] = None ) -> Tuple[List[HegemonyPrefix], int]: - query = db.query(HegemonyPrefix) + ASN = aliased(HegemonyPrefix.asn_relation.property.mapper.class_) + OriginASN = aliased(HegemonyPrefix.originasn_relation.property.mapper.class_) + + query = db.query(HegemonyPrefix)\ + .join(ASN, HegemonyPrefix.asn_relation)\ + .join(OriginASN, HegemonyPrefix.originasn_relation)\ + .options( + contains_eager(HegemonyPrefix.asn_relation, alias=ASN), + contains_eager(HegemonyPrefix.originasn_relation, alias=OriginASN) + ) # If no time filters specified, get rows with max timebin if not timebin_gte and not timebin_lte: max_timebin = db.query(func.max(HegemonyPrefix.timebin)).scalar() diff --git a/repositories/hegemony_repository.py b/repositories/hegemony_repository.py index 2f2ca48..7a13a24 100644 --- a/repositories/hegemony_repository.py +++ b/repositories/hegemony_repository.py @@ -1,5 +1,5 @@ from datetime import datetime -from sqlalchemy.orm import Session +from sqlalchemy.orm import Session, contains_eager, aliased from models.hegemony import Hegemony from typing import Optional, List, Tuple from utils import page_size @@ -21,8 +21,18 @@ def get_all( page: int = 1, order_by: Optional[str] = None ) -> Tuple[List[Hegemony], int]: - query = db.query(Hegemony) - + + ASN = aliased(Hegemony.asn_relation.property.mapper.class_) + OriginASN = aliased(Hegemony.originasn_relation.property.mapper.class_) + + query = db.query(Hegemony)\ + .join(ASN, Hegemony.asn_relation)\ + .join(OriginASN, Hegemony.originasn_relation)\ + .options( + contains_eager(Hegemony.asn_relation, alias=ASN), + contains_eager(Hegemony.originasn_relation, alias=OriginASN) + ) + # If no time filters specified, get rows with max timebin if not timebin_gte and not timebin_lte: max_timebin = db.query(func.max(Hegemony.timebin)).scalar() diff --git a/repositories/metis_atlas_deployment_repository.py b/repositories/metis_atlas_deployment_repository.py index abe9967..8e525f1 100644 --- a/repositories/metis_atlas_deployment_repository.py +++ b/repositories/metis_atlas_deployment_repository.py @@ -1,5 +1,5 @@ from datetime import datetime -from sqlalchemy.orm import Session +from sqlalchemy.orm import Session, contains_eager from models.metis_atlas_deployment import MetisAtlasDeployment from typing import Optional, List, Tuple from utils import page_size @@ -21,8 +21,9 @@ def get_all( page: int = 1, order_by: Optional[str] = None ) -> Tuple[List[MetisAtlasDeployment], int]: - query = db.query(MetisAtlasDeployment).join( - MetisAtlasDeployment.asn_relation) + query = db.query(MetisAtlasDeployment)\ + .join(MetisAtlasDeployment.asn_relation)\ + .options(contains_eager(MetisAtlasDeployment.asn_relation)) # If no time filters specified, get rows with max timebin if not timebin and not timebin_gte and not timebin_lte: diff --git a/repositories/metis_atlas_selection_repository.py b/repositories/metis_atlas_selection_repository.py index abd6e99..348af87 100644 --- a/repositories/metis_atlas_selection_repository.py +++ b/repositories/metis_atlas_selection_repository.py @@ -1,5 +1,5 @@ from datetime import datetime -from sqlalchemy.orm import Session +from sqlalchemy.orm import Session, contains_eager from models.metis_atlas_selection import MetisAtlasSelection from typing import Optional, List, Tuple from utils import page_size @@ -21,8 +21,9 @@ def get_all( page: int = 1, order_by: Optional[str] = None ) -> Tuple[List[MetisAtlasSelection], int]: - query = db.query(MetisAtlasSelection).join( - MetisAtlasSelection.asn_relation) + query = db.query(MetisAtlasSelection)\ + .join(MetisAtlasSelection.asn_relation)\ + .options(contains_eager(MetisAtlasSelection.asn_relation)) # If no time filters specified, get rows with max timebin if not timebin and not timebin_gte and not timebin_lte: diff --git a/repositories/tr_hegemony_repository.py b/repositories/tr_hegemony_repository.py index 5f711c3..27a5462 100644 --- a/repositories/tr_hegemony_repository.py +++ b/repositories/tr_hegemony_repository.py @@ -1,4 +1,4 @@ -from sqlalchemy.orm import Session, aliased +from sqlalchemy.orm import Session, aliased, contains_eager from sqlalchemy import and_, or_ from models.tr_hegemony import TRHegemony from datetime import datetime @@ -34,7 +34,11 @@ def get_tr_hegemony( query = db.query(TRHegemony)\ .join(Origin, TRHegemony.origin_relation)\ - .join(Dependency, TRHegemony.dependency_relation) + .join(Dependency, TRHegemony.dependency_relation)\ + .options( + contains_eager(TRHegemony.origin_relation, alias=Origin), + contains_eager(TRHegemony.dependency_relation, alias=Dependency) + ) # If no time filters specified, get rows with max timebin if not timebin and not timebin_gte and not timebin_lte: