Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
83 changes: 55 additions & 28 deletions LocalFeeder/FeederSimulator.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,7 @@
from pydantic import BaseModel
from scipy.sparse import coo_matrix, csc_matrix

logger = logging.getLogger(__name__)
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.INFO)
logger = logging.getLogger("uvicorn.error")


def permutation(from_list, to_list):
Expand Down Expand Up @@ -117,6 +115,7 @@ class FeederSimulator(object):

def __init__(self, config: FeederConfig):
"""Create a ``FeederSimulator`` object."""
logger.info("Creating an instance of feeder simulator")
self._state = OpenDSSState.UNLOADED
self._opendss_location = config.opendss_location
self._profile_location = config.profile_location
Expand All @@ -139,37 +138,47 @@ def __init__(self, config: FeederConfig):

self.tap_setting = config.tap_setting

if config.existing_feeder_file is None or not os.path.exists(
config.existing_feeder_file
):
self._simulation_time_step = "15m"
if config.existing_feeder_file is None:
if self._use_smartds:
logger.info("Downloading Opendss model from OEDI data lake")
self._feeder_file = os.path.join("opendss", "Master.dss")
self.download_data("oedi-data-lake", update_loadshape_location=True)
elif not self._use_smartds and not self._user_uploads_model:
logger.info("Defaulting to OpenDSS master file 'Master.dss'")
self._feeder_file = os.path.join("opendss", "master.dss")
self.download_data("gadal")
else:
logger.Error("Usere should have uploaded model using endpoint before running the simulation")
# User should have uploaded model using endpoint
raise Exception("Set existing_feeder_file when uploading data")
else:
logger.info(f"Using user defined OpenDSS master file: '{config.existing_feeder_file}'")
self._feeder_file = config.existing_feeder_file

self.open_lines = config.open_lines
logger.info(f"Loading OpenDSS model...")
self.load_feeder()

logger.info(f"complete")
logger.info(f"Locating sensor information")
if self._sensor_location is None:
logger.info(f"No sensor info provided. Creating sensors...")
self.create_measurement_lists()
logger.info(f"complete")

logger.info(f"Running snapshop simulation...")
self.snapshot_run()
logger.info(f"Running snapshop simulation")
assert self._state == OpenDSSState.SNAPSHOT_RUN, f"{self._state}"
logger.info(f"complete")

def forcast_pv(self, steps: int) -> list:
"""
Forecasts day ahead PV generation for the OpenDSS feeder. The OpenDSS file is run and the
average irradiance is computed over all PV systems for each time step. This average irradiance
is used to compute the individual PV system power output
"""
cmd = f'Set stepsize={self._run_freq_sec} Number=1'
cmd = f"Set stepsize={self._simulation_time_step} Number=1"
dss.Text.Command(cmd)
forecast = []
for k in range(steps):
Expand Down Expand Up @@ -218,24 +227,30 @@ def reenable(self):

def download_data(self, bucket_name, update_loadshape_location=False):
"""Download data from bucket path."""
logging.info(f"Downloading from bucket {bucket_name}")
logger.info(f"Downloading from bucket {bucket_name}")
# Equivalent to --no-sign-request
s3_resource = boto3.resource("s3", config=Config(signature_version=UNSIGNED))
bucket = s3_resource.Bucket(bucket_name)
opendss_location = self._opendss_location
profile_location = self._profile_location
sensor_location = self._sensor_location

logger.info(f"Downloading the OpenDSS model")

for obj in bucket.objects.filter(Prefix=opendss_location):
output_location = os.path.join(
"opendss", obj.key.replace(opendss_location, "").strip("/")
)
os.makedirs(os.path.dirname(output_location), exist_ok=True)
bucket.download_file(obj.key, output_location)

logger.info(f"Downloading the OpenDSS profiles")

modified_loadshapes = ""
os.makedirs(os.path.join("profiles"), exist_ok=True)
if update_loadshape_location:
logger.info(f"Downloading the OpenDSS loapshape files")

all_profiles = set()
with open(os.path.join("opendss", "LoadShapes.dss"), "r") as fp_loadshapes:
for row in fp_loadshapes.readlines():
Expand All @@ -253,8 +268,9 @@ def download_data(self, bucket_name, update_loadshape_location=False):
for profile in all_profiles:
s3_location = f"{profile_location}/{profile}"
bucket.download_file(s3_location, os.path.join("profiles", profile))

else:
logger.info(f"Downloading load and generation profiles (csv files)")
for obj in bucket.objects.filter(Prefix=profile_location):
output_location = os.path.join(
"profiles", obj.key.replace(profile_location, "").strip("/")
Expand All @@ -263,11 +279,13 @@ def download_data(self, bucket_name, update_loadshape_location=False):
bucket.download_file(obj.key, output_location)

if sensor_location is not None:
logger.info(f"Downloading sensor files")
output_location = os.path.join("sensors", os.path.basename(sensor_location))
if not os.path.exists(os.path.dirname(output_location)):
os.makedirs(os.path.dirname(output_location))
bucket.download_file(sensor_location, output_location)

logger.info(f"Downloading complete")

def create_measurement_lists(
self,
percent_voltage=75,
Expand Down Expand Up @@ -328,7 +346,7 @@ def get_bus_coords(self) -> Dict[str, Tuple[float, float]] | None:
identifier, x, y = row
bus_coords[identifier] = (float(x), float(y))
except ValueError as e:
logging.warning(f"Unable to parse row in bus coords: {row}, {e}")
logger.warning(f"Unable to parse row in bus coords: {row}, {e}")
return None
return bus_coords

Expand Down Expand Up @@ -461,7 +479,7 @@ def initial_disabled_solve(self):
self._state = OpenDSSState.DISABLED_SOLVE

def just_solve(self):
"""Solvesolve without setting time or anything. Useful for commands."""
"""Solve without setting time or anything. Useful for commands."""
assert (
self._state != OpenDSSState.UNLOADED
and self._state != OpenDSSState.DISABLED_RUN
Expand Down Expand Up @@ -501,16 +519,17 @@ def get_PQs_load(self, static=False):
for ld in get_loads(dss, self._circuit):
self._circuit.SetActiveElement("Load." + ld["name"])
current_pq_name = dss.CktElement.Name()
for i, node_name in enumerate(ld["node_names"]):
for ii in range(len(ld["phases"])):
node_name = ld["bus1"].upper() + "." + ld["phases"][ii]
assert (
node_name in all_node_names
), f"{node_name} for {current_pq_name} not found"
if static:
power = complex(ld["kW"], ld["kVar"])
PQs.append(power / ld["numPhases"])
PQs.append(power / len(ld["phases"]))
else:
power = dss.CktElement.Powers()
PQs.append(complex(power[2 * i], power[2 * i + 1]))
PQs.append(complex(power[2 * ii], power[2 * ii + 1]))
pq_names.append(current_pq_name)
node_names.append(node_name)
pq_xr = xr.DataArray(
Expand All @@ -532,20 +551,24 @@ def get_PQs_pv(self, static=False):
node_names: List[str] = []
pq_names: List[str] = []
for PV in get_pvsystems(dss):
bus = PV["bus"].split(".")
if len(bus) == 1:
bus = bus + ["1", "2", "3"]
self._circuit.SetActiveElement("PVSystem." + PV["name"])
current_pq_name = dss.CktElement.Name()
for i, node_name in enumerate(PV["node_names"]):
for ii in range(len(bus) - 1):
node_name = bus[0].upper() + "." + bus[ii + 1]
assert (
node_name in all_node_names
), f"{node_name} for {current_pq_name} not found"
if static:
power = complex(
-1 * PV["kW"], -1 * PV["kVar"]
) # -1 because injecting
PQs.append(power / PV["numPhases"])
PQs.append(power / (len(bus) - 1))
else:
power = dss.CktElement.Powers()
PQs.append(complex(power[2 * i], power[2 * i + 1]))
PQs.append(complex(power[2 * ii], power[2 * ii + 1]))
pq_names.append(current_pq_name)
node_names.append(node_name)
pq_xr = xr.DataArray(
Expand All @@ -567,21 +590,24 @@ def get_PQs_gen(self, static=False):
node_names: List[str] = []
pq_names: List[str] = []
for gen in get_generators(dss):
bus = gen["bus"].split(".")
if len(bus) == 1:
bus = bus + ["1", "2", "3"]
self._circuit.SetActiveElement("Generator." + gen["name"])
current_pq_name = dss.CktElement.Name()

for i, node_name in enumerate(gen["node_names"]):
for ii in range(len(bus) - 1):
node_name = bus[0].upper() + "." + bus[ii + 1]
assert (
node_name in all_node_names
), f"{node_name} for {current_pq_name} not found"
if static:
power = complex(
-1 * gen["kW"], -1 * gen["kVar"]
) # -1 because injecting
PQs.append(power / gen["numPhases"])
PQs.append(power / (len(bus) - 1))
else:
power = dss.CktElement.Powers()
PQs.append(complex(power[2 * i], power[2 * i + 1]))
PQs.append(complex(power[2 * ii], power[2 * ii + 1]))
pq_names.append(current_pq_name)
node_names.append(node_name)
pq_xr = xr.DataArray(
Expand Down Expand Up @@ -609,7 +635,8 @@ def get_PQs_cap(self, static=False):
pq_names: List[str] = []
for cap in get_capacitors(dss):
current_pq_name = cap["name"]
for i, node_name in enumerate(cap["node_names"]):
for ii in range(cap["numPhases"]):
node_name = cap["busname"].upper() + "." + cap["busphase"][ii]
assert (
node_name in all_node_names
), f"{node_name} for {current_pq_name} not found"
Expand All @@ -619,7 +646,7 @@ def get_PQs_cap(self, static=False):
) # -1 because it's injected into the grid
PQs.append(power / cap["numPhases"])
else:
PQs.append(complex(0, cap["power"][2 * i + 1]))
PQs.append(complex(0, cap["power"][2 * ii + 1]))
pq_names.append(current_pq_name)
node_names.append(node_name)
pq_xr = xr.DataArray(
Expand Down Expand Up @@ -731,7 +758,7 @@ def create_inverter(self, pvsystem_set: Set[str]):
pvlist = ""
else:
if len(pvsystem_set) != 1:
logging.error(
logger.error(
"""Controlling mulitple pvsystems manually results in unstable
behavior when the number of phases differ"""
)
Expand Down Expand Up @@ -900,7 +927,7 @@ def get_incidences(self) -> IncidenceList:
# dicts are insert-ordered in >=3.7
names = list(dict.fromkeys(bus_names))
if len(names) != 2:
logging.info(
logger.info(
f"Line {line} has {len(names)} terminals, skipping in incidence matrix"
)
continue
Expand All @@ -916,7 +943,7 @@ def get_incidences(self) -> IncidenceList:
bus_names = map(lambda x: x.split(".")[0], names)
names = list(dict.fromkeys(bus_names))
if len(names) != 2:
logging.info(
logger.info(
f"Transformer {transformer} has {len(names)} terminals, skipping in incidence matrix"
)
continue
Expand Down
38 changes: 8 additions & 30 deletions LocalFeeder/dss_functions.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
"""OpenDSS functions. Mutates global state, originally from GO-Solar project."""

import math


Expand All @@ -19,23 +18,21 @@ def get_loads(dss, circuit):
}
_ = circuit.SetActiveElement("Load.%s" % datum["name"])
cktElement = dss.CktElement
buses = cktElement.BusNames()
bus = buses[0].split(".")
bus = cktElement.BusNames()[0].split(".")
datum["kVar"] = (
float(datum["kW"])
/ float(datum["PF"])
* math.sqrt(1 - float(datum["PF"]) * float(datum["PF"]))
)
datum["bus1"] = bus[0]
datum["numPhases"] = dss.CktElement.NumPhases()
datum["numPhases"] = len(bus[1:])
datum["phases"] = bus[1:]
if not datum["numPhases"]:
datum["numPhases"] = 3
datum["phases"] = ["1", "2", "3"]
datum["voltageMag"] = cktElement.VoltagesMagAng()[0]
datum["voltageAng"] = cktElement.VoltagesMagAng()[1]
datum["power"] = dss.CktElement.Powers()[:2]
datum["node_names"] = get_all_nodes(buses)

data.append(datum)
load_flag = dss.Loads.Next()
Expand All @@ -58,8 +55,7 @@ def get_pvsystems(dss):
PVkvar = dss.PVsystems.kvar()

NumPhase = dss.CktElement.NumPhases()
buses = dss.CktElement.BusNames()
bus = buses[0].split(".")
bus = dss.CktElement.BusNames()[0]
# PVkV = dss.run_command('? ' + PVname + '.kV')
# Not included in PVsystems commands for some reason

Expand All @@ -74,29 +70,12 @@ def get_pvsystems(dss):
datum["numPhase"] = NumPhase
datum["numPhases"] = NumPhase
datum["power"] = dss.CktElement.Powers()[: 2 * NumPhase]
datum["node_names"] = get_all_nodes(buses)

data.append(datum)
PV_flag = dss.PVsystems.Next()
return data


def get_all_nodes(buses: list[str]):
"""Get all nodes from list of buses."""
all_nodes = []
for bus in buses:
sub_bus = bus.split(".")
core_name = sub_bus[0].upper()
phases = sub_bus[1:]
if len(phases) == 0:
all_nodes += [core_name + ".1", core_name + ".2", core_name + ".3"]
continue
phases = filter(lambda x: x != "0", phases)
all_nodes += [core_name + "." + phase for phase in phases]

return all_nodes


def get_generators(dss):
"""Get list of Generator dicts from OpenDSS circuit."""
data = []
Expand All @@ -105,11 +84,13 @@ def get_generators(dss):
while gen_flag:
GENname = dss.Generators.Name()
NumPhase = dss.CktElement.NumPhases()
buses = dss.CktElement.BusNames()
bus = buses[0].split(".")
bus = dss.CktElement.BusNames()[0]
GENkW = dss.Generators.kW()
GENpf = dss.Generators.PF()
GENkV = dss.Generators.kV()
bus = bus.split(".")
if len(bus) == 1:
bus = bus + ["1", "2", "3"]
datum = {
"name": GENname,
"bus": bus,
Expand All @@ -121,7 +102,6 @@ def get_generators(dss):
"kV": GENkV,
"numPhase": NumPhase,
"numPhases": NumPhase,
"node_names": get_all_nodes(buses),
}
data.append(datum)
gen_flag = dss.Generators.Next()
Expand All @@ -137,8 +117,7 @@ def get_capacitors(dss):
datum = {}
capname = dss.CktElement.Name()
NumPhase = dss.CktElement.NumPhases()
buses = dss.CktElement.BusNames()
bus = buses[0]
bus = dss.CktElement.BusNames()[0]
kvar = dss.Capacitors.kvar()
datum["name"] = capname
temp = bus.split(".")
Expand All @@ -149,7 +128,6 @@ def get_capacitors(dss):
datum["kVar"] = kvar
datum["numPhases"] = NumPhase
datum["power"] = dss.CktElement.Powers()[: 2 * NumPhase]
datum["node_names"] = get_all_nodes(buses) # second is 0
data.append(datum)
cap_flag = dss.Capacitors.Next()
return data
Expand Down
5 changes: 1 addition & 4 deletions LocalFeeder/sender_cosim.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,7 @@
)
from scipy.sparse import coo_matrix

logger = logging.getLogger(__name__)
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.DEBUG)

logger = logging.getLogger("uvicorn.error")

def numpy_to_y_matrix(array: npt.NDArray[np.complex64]):
"""Convert 2d numpy array to list of lists."""
Expand Down
Loading