Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
54840c2
Add a view to export datapackage as zip files
Bachibouzouk Dec 8, 2025
f00fd3b
Add option to download as datapackage from progession bar
Bachibouzouk Dec 8, 2025
5528215
Move the to_datapackage of a scenario to the Model definition
Bachibouzouk Dec 8, 2025
2ae4acf
Prevent export of scenario not belonging to user
Bachibouzouk Dec 8, 2025
9dcf1bc
Save project.csv in data folder
Bachibouzouk Dec 9, 2025
f671c2f
Add the project name to the asset csv
Bachibouzouk Dec 9, 2025
d05ea26
Generate a datapackage.json file
Bachibouzouk Dec 9, 2025
9ea5c48
Change oemof-tabular version
Bachibouzouk Dec 15, 2025
a3bd5ea
Install git in docker container
Bachibouzouk Dec 15, 2025
df65e2e
Update oemof-thermal
Bachibouzouk Dec 15, 2025
7878b2b
Add datapackage.json file
Bachibouzouk Dec 15, 2025
3f401d3
Look for foreign keys to resource 'project'
Bachibouzouk Dec 15, 2025
30d595e
Do not associate demand with project_data
Bachibouzouk Dec 15, 2025
26741ca
Update to_datapackage method of Bus Model class
Bachibouzouk Dec 15, 2025
f527a37
Fix typo
Bachibouzouk Dec 15, 2025
b8ee258
Do not provide project_data for dso either
Bachibouzouk Dec 15, 2025
bdeccaa
WIP need to fix bus column label
Bachibouzouk Dec 15, 2025
ba8137a
Adapt local export to datapackage via command
Bachibouzouk Dec 15, 2025
b3d917e
Fix initial assignement in forms
Bachibouzouk Jan 22, 2026
2c1ee53
Update bus port names
Bachibouzouk Feb 2, 2026
8a7a2fd
Adapt datapackage export for storage
Bachibouzouk Feb 2, 2026
c247c9d
Map efficiency and efficiency multiple for chp_fixed_ratio
Bachibouzouk Feb 3, 2026
cb8a296
Change oemof-tabular for oemof-datapackage dependency
Bachibouzouk Feb 3, 2026
49b5c9a
Add outpath option for datapackage export command
Bachibouzouk Feb 3, 2026
34ada21
Enable chp and electrolyzer to be exported to datapackage
Bachibouzouk Feb 9, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions app/compose/production/app_postgres/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,16 @@ RUN mkdir ${CONFIG_ROOT}
COPY app/requirements/base.txt ${CONFIG_ROOT}/base.txt
COPY app/requirements/production.txt ${CONFIG_ROOT}/production.txt

# Install gettext utilities for translations
RUN apt-get update && \
apt-get install -y --no-install-recommends gettext-base gettext git && \
rm -rf /var/lib/apt/lists/*

RUN pip install --upgrade pip \
&& pip install --no-cache-dir -r ${CONFIG_ROOT}/production.txt

WORKDIR ${APP_ROOT}

# Install gettext utilities for translations
RUN apt-get update && \
apt-get install -y --no-install-recommends gettext-base gettext && \
rm -rf /var/lib/apt/lists/*

ADD app/ ${APP_ROOT}

Expand Down
10 changes: 5 additions & 5 deletions app/projects/forms.py
Original file line number Diff line number Diff line change
Expand Up @@ -834,7 +834,7 @@ def __init__(self, *args, **kwargs):
"""
for field in self.fields:
if field == "renewable_asset" and self.asset_type_name in RENEWABLE_ASSETS:
self.fields[field].initial = True
self.initial[field] = True
self.fields[field].widget.attrs.update({f"df-{field}": ""})
if field == "input_timeseries":
self.fields[field].required = self.is_input_timeseries_empty()
Expand Down Expand Up @@ -1130,15 +1130,15 @@ def __init__(self, *args, **kwargs):
asset_type_name = kwargs.pop("asset_type", None)
super(StorageForm, self).__init__(*args, asset_type="capacity", **kwargs)
self.fields["dispatchable"].widget = forms.HiddenInput()
self.fields["dispatchable"].initial = True
self.initial["dispatchable"] = True

if asset_type_name != "hess":
self.fields["fixed_thermal_losses_relative"].widget = forms.HiddenInput()
self.fields["fixed_thermal_losses_relative"].initial = 0
self.initial["fixed_thermal_losses_relative"] = 0
self.fields["fixed_thermal_losses_absolute"].widget = forms.HiddenInput()
self.fields["fixed_thermal_losses_absolute"].initial = 0
self.initial["fixed_thermal_losses_absolute"] = 0
self.fields["thermal_loss_rate"].widget = forms.HiddenInput()
self.fields["thermal_loss_rate"].initial = 0
self.initial["thermal_loss_rate"] = 0
else:
field_name = "fixed_thermal_losses_relative"
help_text = self.fields[field_name].help_text
Expand Down
115 changes: 25 additions & 90 deletions app/projects/management/commands/datapackage.py
Original file line number Diff line number Diff line change
@@ -1,112 +1,47 @@
from django.core.management.base import BaseCommand, CommandError
from projects.models import (
Asset,
AssetType,
TopologyNode,
Scenario,
Timeseries,
ConnectionLink,
Bus,
)
import pandas as pd
from projects.models import Scenario
from pathlib import Path
import numpy as np
import shutil
from oemof.datapackage.datapackage import building
import datapackage as dp


class Command(BaseCommand):
help = "Convert the given scenarios to datapackages"

def add_arguments(self, parser):
parser.add_argument("scen_id", nargs="+", type=int)

parser.add_argument(
"--overwrite", action="store_true", help="Overwrite the datapackage"
)
parser.add_argument("-o", "--outfile", type=str, nargs="?", const="")

def handle(self, *args, **options):
overwrite = options["overwrite"]

for scen_id in options["scen_id"]:
try:
scenario = Scenario.objects.get(pk=scen_id)
except Scenario.DoesNotExist:
raise CommandError('Scenario "%s" does not exist' % scen_id)

destination_path = Path(__file__).resolve().parents[4]

# Create a folder with a datapackage structure
scenario_folder = destination_path / f"scenario_{scen_id}"
create_folder = True

if scenario_folder.exists():
if not overwrite:
create_folder = False
else:
shutil.rmtree(scenario_folder)

elements_folder = scenario_folder / "data" / "elements"
sequences_folder = scenario_folder / "data" / "sequences"

if create_folder:
# create subfolders
(scenario_folder / "scripts").mkdir(parents=True)
elements_folder.mkdir(parents=True)
sequences_folder.mkdir(parents=True)

# List all components of the scenario (except the busses)
qs_assets = Asset.objects.filter(scenario=scenario)
# List all distinct components' assettypes (or facade name)
facade_names = qs_assets.distinct().values_list(
"asset_type__asset_type", flat=True
destination_path = options["outfile"]
if destination_path == "":
destination_path = Path(__file__).resolve().parents[4]
else:
destination_path = Path(destination_path)

scenario_folder = destination_path / f"scenario_{scenario.name}".replace(
" ", "_"
)
if scenario_folder.exists():
shutil.rmtree(scenario_folder)

bus_resource_records = []
profile_resource_records = {}
for facade_name in facade_names:
resource_records = []
for i, asset in enumerate(
qs_assets.filter(asset_type__asset_type=facade_name)
):
resource_rec, bus_resource_rec, profile_resource_rec = (
asset.to_datapackage()
)
resource_records.append(resource_rec)
# those constitute the busses and sequences used by this asset
bus_resource_records.extend(bus_resource_rec)
profile_resource_records.update(profile_resource_rec)

if resource_records:
out_path = elements_folder / f"{facade_name}.csv"
Path(out_path).parent.mkdir(parents=True, exist_ok=True)
df = pd.DataFrame(resource_records)
df.to_csv(out_path, index=False)
dp_json = scenario_folder / "datapackage.json"

# Save all unique busses to a elements resource
if bus_resource_records:
out_path = elements_folder / f"bus.csv"
Path(out_path).parent.mkdir(parents=True, exist_ok=True)
df = pd.DataFrame(bus_resource_records)
df.drop_duplicates("name").to_csv(out_path, index=False)
if dp_json.exists():
print("Only inferring metadata")
p = dp.Package(str(dp_json))
building.infer_package_foreign_keys(p, fk_targets=["project"])
p.descriptor["resources"].sort(key=lambda x: (x["path"], x["name"]))
p.commit()
p.save(dp_json)

# Save all profiles to a sequences resource
if profile_resource_records:
out_path = sequences_folder / f"profiles.csv"
Path(out_path).parent.mkdir(parents=True, exist_ok=True)
# add timestamps to the profiles
profile_resource_records["timeindex"] = scenario.get_timestamps()
try:
df = pd.DataFrame(profile_resource_records)
except ValueError as e:
# If not all profiles have the same length we pad the shorter profiles with np.nan
max_len = max(len(v) for v in profile_resource_records.values())
profile_resource_records = {
k: v + [np.nan] * (max_len - len(v))
for k, v in profile_resource_records.items()
}
df = pd.DataFrame(profile_resource_records)
print(
f"Some profiles have more timesteps that other profiles in scenario {scenario.name}({scen_id}) --> the shorter profiles will be expanded with NaN values"
)
# TODO check if there are column duplicates
df.set_index("timeindex").to_csv(out_path, index=True)
else:
print("Creating datapackage.json")
scenario.to_datapackage(destination_path)
Loading
Loading