diff --git a/LocalFeeder/FeederSimulator.py b/LocalFeeder/FeederSimulator.py index b35ba97..9427d6d 100644 --- a/LocalFeeder/FeederSimulator.py +++ b/LocalFeeder/FeederSimulator.py @@ -34,9 +34,7 @@ from pydantic import BaseModel from scipy.sparse import coo_matrix, csc_matrix -logger = logging.getLogger(__name__) -logger.addHandler(logging.StreamHandler()) -logger.setLevel(logging.INFO) +logger = logging.getLogger("uvicorn.error") def permutation(from_list, to_list): @@ -117,6 +115,7 @@ class FeederSimulator(object): def __init__(self, config: FeederConfig): """Create a ``FeederSimulator`` object.""" + logger.info("Creating an instance of feeder simulator") self._state = OpenDSSState.UNLOADED self._opendss_location = config.opendss_location self._profile_location = config.profile_location @@ -139,29 +138,39 @@ def __init__(self, config: FeederConfig): self.tap_setting = config.tap_setting - if config.existing_feeder_file is None or not os.path.exists( - config.existing_feeder_file - ): + self._simulation_time_step = "15m" + if config.existing_feeder_file is None: if self._use_smartds: + logger.info("Downloading Opendss model from OEDI data lake") self._feeder_file = os.path.join("opendss", "Master.dss") self.download_data("oedi-data-lake", update_loadshape_location=True) elif not self._use_smartds and not self._user_uploads_model: + logger.info("Defaulting to OpenDSS master file 'Master.dss'") self._feeder_file = os.path.join("opendss", "master.dss") self.download_data("gadal") else: + logger.Error("Usere should have uploaded model using endpoint before running the simulation") # User should have uploaded model using endpoint raise Exception("Set existing_feeder_file when uploading data") else: + logger.info(f"Using user defined OpenDSS master file: '{config.existing_feeder_file}'") self._feeder_file = config.existing_feeder_file self.open_lines = config.open_lines + logger.info(f"Loading OpenDSS model...") self.load_feeder() - + logger.info(f"complete") + logger.info(f"Locating sensor information") if self._sensor_location is None: + logger.info(f"No sensor info provided. Creating sensors...") self.create_measurement_lists() + logger.info(f"complete") + logger.info(f"Running snapshop simulation...") self.snapshot_run() + logger.info(f"Running snapshop simulation") assert self._state == OpenDSSState.SNAPSHOT_RUN, f"{self._state}" + logger.info(f"complete") def forcast_pv(self, steps: int) -> list: """ @@ -169,7 +178,7 @@ def forcast_pv(self, steps: int) -> list: average irradiance is computed over all PV systems for each time step. This average irradiance is used to compute the individual PV system power output """ - cmd = f'Set stepsize={self._run_freq_sec} Number=1' + cmd = f"Set stepsize={self._simulation_time_step} Number=1" dss.Text.Command(cmd) forecast = [] for k in range(steps): @@ -218,7 +227,7 @@ def reenable(self): def download_data(self, bucket_name, update_loadshape_location=False): """Download data from bucket path.""" - logging.info(f"Downloading from bucket {bucket_name}") + logger.info(f"Downloading from bucket {bucket_name}") # Equivalent to --no-sign-request s3_resource = boto3.resource("s3", config=Config(signature_version=UNSIGNED)) bucket = s3_resource.Bucket(bucket_name) @@ -226,6 +235,8 @@ def download_data(self, bucket_name, update_loadshape_location=False): profile_location = self._profile_location sensor_location = self._sensor_location + logger.info(f"Downloading the OpenDSS model") + for obj in bucket.objects.filter(Prefix=opendss_location): output_location = os.path.join( "opendss", obj.key.replace(opendss_location, "").strip("/") @@ -233,9 +244,13 @@ def download_data(self, bucket_name, update_loadshape_location=False): os.makedirs(os.path.dirname(output_location), exist_ok=True) bucket.download_file(obj.key, output_location) + logger.info(f"Downloading the OpenDSS profiles") + modified_loadshapes = "" os.makedirs(os.path.join("profiles"), exist_ok=True) if update_loadshape_location: + logger.info(f"Downloading the OpenDSS loapshape files") + all_profiles = set() with open(os.path.join("opendss", "LoadShapes.dss"), "r") as fp_loadshapes: for row in fp_loadshapes.readlines(): @@ -253,8 +268,9 @@ def download_data(self, bucket_name, update_loadshape_location=False): for profile in all_profiles: s3_location = f"{profile_location}/{profile}" bucket.download_file(s3_location, os.path.join("profiles", profile)) - + else: + logger.info(f"Downloading load and generation profiles (csv files)") for obj in bucket.objects.filter(Prefix=profile_location): output_location = os.path.join( "profiles", obj.key.replace(profile_location, "").strip("/") @@ -263,11 +279,13 @@ def download_data(self, bucket_name, update_loadshape_location=False): bucket.download_file(obj.key, output_location) if sensor_location is not None: + logger.info(f"Downloading sensor files") output_location = os.path.join("sensors", os.path.basename(sensor_location)) if not os.path.exists(os.path.dirname(output_location)): os.makedirs(os.path.dirname(output_location)) bucket.download_file(sensor_location, output_location) - + logger.info(f"Downloading complete") + def create_measurement_lists( self, percent_voltage=75, @@ -328,7 +346,7 @@ def get_bus_coords(self) -> Dict[str, Tuple[float, float]] | None: identifier, x, y = row bus_coords[identifier] = (float(x), float(y)) except ValueError as e: - logging.warning(f"Unable to parse row in bus coords: {row}, {e}") + logger.warning(f"Unable to parse row in bus coords: {row}, {e}") return None return bus_coords @@ -461,7 +479,7 @@ def initial_disabled_solve(self): self._state = OpenDSSState.DISABLED_SOLVE def just_solve(self): - """Solvesolve without setting time or anything. Useful for commands.""" + """Solve without setting time or anything. Useful for commands.""" assert ( self._state != OpenDSSState.UNLOADED and self._state != OpenDSSState.DISABLED_RUN @@ -501,16 +519,17 @@ def get_PQs_load(self, static=False): for ld in get_loads(dss, self._circuit): self._circuit.SetActiveElement("Load." + ld["name"]) current_pq_name = dss.CktElement.Name() - for i, node_name in enumerate(ld["node_names"]): + for ii in range(len(ld["phases"])): + node_name = ld["bus1"].upper() + "." + ld["phases"][ii] assert ( node_name in all_node_names ), f"{node_name} for {current_pq_name} not found" if static: power = complex(ld["kW"], ld["kVar"]) - PQs.append(power / ld["numPhases"]) + PQs.append(power / len(ld["phases"])) else: power = dss.CktElement.Powers() - PQs.append(complex(power[2 * i], power[2 * i + 1])) + PQs.append(complex(power[2 * ii], power[2 * ii + 1])) pq_names.append(current_pq_name) node_names.append(node_name) pq_xr = xr.DataArray( @@ -532,9 +551,13 @@ def get_PQs_pv(self, static=False): node_names: List[str] = [] pq_names: List[str] = [] for PV in get_pvsystems(dss): + bus = PV["bus"].split(".") + if len(bus) == 1: + bus = bus + ["1", "2", "3"] self._circuit.SetActiveElement("PVSystem." + PV["name"]) current_pq_name = dss.CktElement.Name() - for i, node_name in enumerate(PV["node_names"]): + for ii in range(len(bus) - 1): + node_name = bus[0].upper() + "." + bus[ii + 1] assert ( node_name in all_node_names ), f"{node_name} for {current_pq_name} not found" @@ -542,10 +565,10 @@ def get_PQs_pv(self, static=False): power = complex( -1 * PV["kW"], -1 * PV["kVar"] ) # -1 because injecting - PQs.append(power / PV["numPhases"]) + PQs.append(power / (len(bus) - 1)) else: power = dss.CktElement.Powers() - PQs.append(complex(power[2 * i], power[2 * i + 1])) + PQs.append(complex(power[2 * ii], power[2 * ii + 1])) pq_names.append(current_pq_name) node_names.append(node_name) pq_xr = xr.DataArray( @@ -567,10 +590,13 @@ def get_PQs_gen(self, static=False): node_names: List[str] = [] pq_names: List[str] = [] for gen in get_generators(dss): + bus = gen["bus"].split(".") + if len(bus) == 1: + bus = bus + ["1", "2", "3"] self._circuit.SetActiveElement("Generator." + gen["name"]) current_pq_name = dss.CktElement.Name() - - for i, node_name in enumerate(gen["node_names"]): + for ii in range(len(bus) - 1): + node_name = bus[0].upper() + "." + bus[ii + 1] assert ( node_name in all_node_names ), f"{node_name} for {current_pq_name} not found" @@ -578,10 +604,10 @@ def get_PQs_gen(self, static=False): power = complex( -1 * gen["kW"], -1 * gen["kVar"] ) # -1 because injecting - PQs.append(power / gen["numPhases"]) + PQs.append(power / (len(bus) - 1)) else: power = dss.CktElement.Powers() - PQs.append(complex(power[2 * i], power[2 * i + 1])) + PQs.append(complex(power[2 * ii], power[2 * ii + 1])) pq_names.append(current_pq_name) node_names.append(node_name) pq_xr = xr.DataArray( @@ -609,7 +635,8 @@ def get_PQs_cap(self, static=False): pq_names: List[str] = [] for cap in get_capacitors(dss): current_pq_name = cap["name"] - for i, node_name in enumerate(cap["node_names"]): + for ii in range(cap["numPhases"]): + node_name = cap["busname"].upper() + "." + cap["busphase"][ii] assert ( node_name in all_node_names ), f"{node_name} for {current_pq_name} not found" @@ -619,7 +646,7 @@ def get_PQs_cap(self, static=False): ) # -1 because it's injected into the grid PQs.append(power / cap["numPhases"]) else: - PQs.append(complex(0, cap["power"][2 * i + 1])) + PQs.append(complex(0, cap["power"][2 * ii + 1])) pq_names.append(current_pq_name) node_names.append(node_name) pq_xr = xr.DataArray( @@ -731,7 +758,7 @@ def create_inverter(self, pvsystem_set: Set[str]): pvlist = "" else: if len(pvsystem_set) != 1: - logging.error( + logger.error( """Controlling mulitple pvsystems manually results in unstable behavior when the number of phases differ""" ) @@ -900,7 +927,7 @@ def get_incidences(self) -> IncidenceList: # dicts are insert-ordered in >=3.7 names = list(dict.fromkeys(bus_names)) if len(names) != 2: - logging.info( + logger.info( f"Line {line} has {len(names)} terminals, skipping in incidence matrix" ) continue @@ -916,7 +943,7 @@ def get_incidences(self) -> IncidenceList: bus_names = map(lambda x: x.split(".")[0], names) names = list(dict.fromkeys(bus_names)) if len(names) != 2: - logging.info( + logger.info( f"Transformer {transformer} has {len(names)} terminals, skipping in incidence matrix" ) continue diff --git a/LocalFeeder/dss_functions.py b/LocalFeeder/dss_functions.py index 619735e..3b04219 100644 --- a/LocalFeeder/dss_functions.py +++ b/LocalFeeder/dss_functions.py @@ -1,5 +1,4 @@ """OpenDSS functions. Mutates global state, originally from GO-Solar project.""" - import math @@ -19,15 +18,14 @@ def get_loads(dss, circuit): } _ = circuit.SetActiveElement("Load.%s" % datum["name"]) cktElement = dss.CktElement - buses = cktElement.BusNames() - bus = buses[0].split(".") + bus = cktElement.BusNames()[0].split(".") datum["kVar"] = ( float(datum["kW"]) / float(datum["PF"]) * math.sqrt(1 - float(datum["PF"]) * float(datum["PF"])) ) datum["bus1"] = bus[0] - datum["numPhases"] = dss.CktElement.NumPhases() + datum["numPhases"] = len(bus[1:]) datum["phases"] = bus[1:] if not datum["numPhases"]: datum["numPhases"] = 3 @@ -35,7 +33,6 @@ def get_loads(dss, circuit): datum["voltageMag"] = cktElement.VoltagesMagAng()[0] datum["voltageAng"] = cktElement.VoltagesMagAng()[1] datum["power"] = dss.CktElement.Powers()[:2] - datum["node_names"] = get_all_nodes(buses) data.append(datum) load_flag = dss.Loads.Next() @@ -58,8 +55,7 @@ def get_pvsystems(dss): PVkvar = dss.PVsystems.kvar() NumPhase = dss.CktElement.NumPhases() - buses = dss.CktElement.BusNames() - bus = buses[0].split(".") + bus = dss.CktElement.BusNames()[0] # PVkV = dss.run_command('? ' + PVname + '.kV') # Not included in PVsystems commands for some reason @@ -74,29 +70,12 @@ def get_pvsystems(dss): datum["numPhase"] = NumPhase datum["numPhases"] = NumPhase datum["power"] = dss.CktElement.Powers()[: 2 * NumPhase] - datum["node_names"] = get_all_nodes(buses) data.append(datum) PV_flag = dss.PVsystems.Next() return data -def get_all_nodes(buses: list[str]): - """Get all nodes from list of buses.""" - all_nodes = [] - for bus in buses: - sub_bus = bus.split(".") - core_name = sub_bus[0].upper() - phases = sub_bus[1:] - if len(phases) == 0: - all_nodes += [core_name + ".1", core_name + ".2", core_name + ".3"] - continue - phases = filter(lambda x: x != "0", phases) - all_nodes += [core_name + "." + phase for phase in phases] - - return all_nodes - - def get_generators(dss): """Get list of Generator dicts from OpenDSS circuit.""" data = [] @@ -105,11 +84,13 @@ def get_generators(dss): while gen_flag: GENname = dss.Generators.Name() NumPhase = dss.CktElement.NumPhases() - buses = dss.CktElement.BusNames() - bus = buses[0].split(".") + bus = dss.CktElement.BusNames()[0] GENkW = dss.Generators.kW() GENpf = dss.Generators.PF() GENkV = dss.Generators.kV() + bus = bus.split(".") + if len(bus) == 1: + bus = bus + ["1", "2", "3"] datum = { "name": GENname, "bus": bus, @@ -121,7 +102,6 @@ def get_generators(dss): "kV": GENkV, "numPhase": NumPhase, "numPhases": NumPhase, - "node_names": get_all_nodes(buses), } data.append(datum) gen_flag = dss.Generators.Next() @@ -137,8 +117,7 @@ def get_capacitors(dss): datum = {} capname = dss.CktElement.Name() NumPhase = dss.CktElement.NumPhases() - buses = dss.CktElement.BusNames() - bus = buses[0] + bus = dss.CktElement.BusNames()[0] kvar = dss.Capacitors.kvar() datum["name"] = capname temp = bus.split(".") @@ -149,7 +128,6 @@ def get_capacitors(dss): datum["kVar"] = kvar datum["numPhases"] = NumPhase datum["power"] = dss.CktElement.Powers()[: 2 * NumPhase] - datum["node_names"] = get_all_nodes(buses) # second is 0 data.append(datum) cap_flag = dss.Capacitors.Next() return data diff --git a/LocalFeeder/sender_cosim.py b/LocalFeeder/sender_cosim.py index fd07f3b..f5bc35d 100644 --- a/LocalFeeder/sender_cosim.py +++ b/LocalFeeder/sender_cosim.py @@ -30,10 +30,7 @@ ) from scipy.sparse import coo_matrix -logger = logging.getLogger(__name__) -logger.addHandler(logging.StreamHandler()) -logger.setLevel(logging.DEBUG) - +logger = logging.getLogger("uvicorn.error") def numpy_to_y_matrix(array: npt.NDArray[np.complex64]): """Convert 2d numpy array to list of lists.""" diff --git a/LocalFeeder/server.py b/LocalFeeder/server.py index 85032fc..e440509 100644 --- a/LocalFeeder/server.py +++ b/LocalFeeder/server.py @@ -17,12 +17,16 @@ from oedisi.types.common import ServerReply, HeathCheck, DefaultFileNames from oedisi.types.common import BrokerConfig + +logger = logging.getLogger('uvicorn.error') +logger.setLevel(logging.DEBUG) + REQUEST_TIMEOUT_SEC = 1200 app = FastAPI() base_path = os.getcwd() - +params = None @app.middleware("http") async def timeout_middleware(request: Request, call_next): @@ -53,22 +57,35 @@ def read_root(): @app.get("/sensor") async def sensor(): - logging.info(os.getcwd()) - sensor_path = os.path.join(base_path, "sensors", "sensors.json") - while not os.path.exists(sensor_path): - time.sleep(1) - logging.info(f"waiting {sensor_path}") - logging.info("success") - data = json.load(open(sensor_path, "r")) - return data - + logger.info(os.getcwd()) + global params + if params: + sensor_path = os.path.join(base_path, "sensors") + + if "sensor_location" not in params: + required_files = ["voltage_ids", "real_ids", "reactive_ids"] + else: + required_files = ["sensors"] + + while sum([os.path.exists(os.path.join(sensor_path, f"{r}.json")) for r in required_files]) != len(required_files): + time.sleep(1) + logger.info(f"waiting for sensor file: {required_files}") + + logger.info("sensor file available") + sensor_data ={} + for r in required_files: + sensor_data[r] = json.load(open(os.path.join(sensor_path, f"{r}.json"), "r")) + return sensor_data + else: + err = "'params' not defined. This endpoint can only be used after the federate has been configured and is in run mode" + raise HTTPException(500, err) @app.post("/profiles") async def upload_profiles(file: UploadFile): try: data = file.file.read() if not file.filename.endswith(".zip"): - HTTPException(400, "Invalid file type. Only zipped profiles are accepted.") + raise HTTPException(400, "Invalid file type. Only zipped profiles are accepted.") profile_path = "./profiles" @@ -86,12 +103,12 @@ async def upload_profiles(file: UploadFile): ).dict() return JSONResponse(response, 200) else: - HTTPException( + raise HTTPException( 400, "Invalid user defined profile structure. See OEDISI documentation." ) except Exception as e: - HTTPException( + raise HTTPException( 500, "Unknown error while uploading userdefined opendss profiles." ) @@ -120,16 +137,16 @@ async def upload_model(file: UploadFile): return JSONResponse(response, 200) else: - HTTPException(400, "A valid opendss model should have a master.dss file.") + raise HTTPException(400, "A valid opendss model should have a master.dss file.") except Exception as e: - HTTPException(500, "Unknown error while uploading userdefined opendss model.") + raise HTTPException(500, "Unknown error while uploading userdefined opendss model.") @app.post("/run") async def run_feeder( broker_config: BrokerConfig, background_tasks: BackgroundTasks ): # :BrokerConfig - logging.info(broker_config) + logger.info(broker_config) try: background_tasks.add_task(run_simulator, broker_config) response = ServerReply(detail="Task sucessfully added.").dict() @@ -137,11 +154,12 @@ async def run_feeder( return JSONResponse(response, 200) except Exception as e: err = traceback.format_exc() - HTTPException(500, str(err)) + raise HTTPException(500, str(err)) @app.post("/configure") async def configure(component_struct:ComponentStruct): + global params component = component_struct.component params = component.parameters params["name"] = component.name diff --git a/LocalFeeder/tests/config for api test.json b/LocalFeeder/tests/config for api test.json new file mode 100644 index 0000000..d669e1a --- /dev/null +++ b/LocalFeeder/tests/config for api test.json @@ -0,0 +1,29 @@ +{ + "component": { + "name": "feeder", + "type": "LocalFeeder", + "host": "feeder", + "container_port": 5678, + "parameters": { + "use_smartds": true, + "user_uploads_model": false, + "profile_location": "SMART-DS/v1.0/2017/SFO/P9U/profiles", + "opendss_location": "SMART-DS/v1.0/2017/SFO/P9U/scenarios/solar_medium_batteries_none_timeseries/opendss/p9uhs16_1247/p9uhs16_1247--p9udt12866", + "start_date": "2017-05-01 00:00:00", + "number_of_timesteps": 3, + "run_freq_sec": 900, + "topology_output": "topology.json", + "use_sparse_admittance": true + } + }, + "links": [] +} + + +{ + "broker_port": 23404, + "broker_ip": "127.0.0.1", + "api_port": 8766, + "feeder_host": "string", + "feeder_port": 0 +} \ No newline at end of file diff --git a/LocalFeeder/tests/test_feeder.py b/LocalFeeder/tests/test_feeder.py index a9623af..4afffb6 100644 --- a/LocalFeeder/tests/test_feeder.py +++ b/LocalFeeder/tests/test_feeder.py @@ -60,24 +60,6 @@ def federate_config(): ) -@pytest.fixture() -def edge_cases_config(): - return FeederSimulator.FeederConfig( - **{ - "use_smartds": False, - "profile_location": "", - "opendss_location": "", - "sensor_location": "", - "existing_feeder_file": "tests/test_data/master.dss", - "start_date": "2017-01-01 00:00:00", - "number_of_timesteps": 1, - "run_freq_sec": 900, - "topology_output": "topology.json", - "name": "feeder", - } - ) - - def plot_y_matrix(Y): Y_max = np.max(np.abs(Y)) @@ -676,12 +658,3 @@ def test_incidence_matrix(federate_config): assert len(incidences.equipment_type) == len(incidences.from_equipment) if incidences.ids is not None: assert len(incidences.ids) == len(incidences.from_equipment) - - -def test_edge_case(edge_cases_config): - sim = FeederSimulator.FeederSimulator(edge_cases_config) - sim.snapshot_run() - sim.get_PQs_gen(static=True) - sim.solve(0, 0) - pq = sim.get_PQs_gen() - assert len(pq) == 4 diff --git a/measuring_federate/measuring_federate.py b/measuring_federate/measuring_federate.py index e5742ac..ebe703c 100644 --- a/measuring_federate/measuring_federate.py +++ b/measuring_federate/measuring_federate.py @@ -8,7 +8,7 @@ from oedisi.types.data_types import MeasurementArray, EquipmentNodeArray from oedisi.types.common import BrokerConfig -logger = logging.getLogger(__name__) +logger = logging.getLogger("measuring_federate") logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) diff --git a/measuring_federate/server.py b/measuring_federate/server.py index f3632d3..d6ff72f 100644 --- a/measuring_federate/server.py +++ b/measuring_federate/server.py @@ -14,6 +14,11 @@ from oedisi.types.common import ServerReply, HeathCheck, DefaultFileNames from oedisi.types.common import BrokerConfig +sensor_logger = logging.getLogger("measuring_federate") +logger = logging.getLogger('uvicorn.error') +logger.addHandler(*sensor_logger.handlers) +logger.setLevel(logging.DEBUG) + app = FastAPI() is_kubernetes_env = os.environ['SERVICE_NAME'] if 'SERVICE_NAME' in os.environ else None @@ -39,20 +44,23 @@ async def read_root(): @app.post("/run") async def run_model(broker_config:BrokerConfig, background_tasks: BackgroundTasks): - logging.info(broker_config) + logger.info(f"{broker_config=}") feeder_host = broker_config.feeder_host feeder_port = broker_config.feeder_port url = build_url(feeder_host, feeder_port, ['sensor']) - logging.info(url) + logger.info(url) try: + logger.info("Requesting sensor information. This might take a while") reply = requests.get(url) - sensor_data = reply.json() - if not sensor_data: + sensor_dict = reply.json() + if not sensor_dict: msg = "empty sensor list" raise HTTPException(404, msg) - logging.info(sensor_data) - with open("sensors.json", "w") as outfile: - json.dump(sensor_data, outfile) + logger.info(f"Sensors types available {list(sensor_dict.keys())}", ) + + for sensor_type, sensorlist in sensor_dict.items(): + with open(f"{sensor_type}.json", "w") as outfile: + json.dump(sensorlist, outfile) background_tasks.add_task(run_simulator, broker_config) response = ServerReply( @@ -80,4 +88,3 @@ async def configure(component_struct:ComponentStruct): if __name__ == "__main__": uvicorn.run(app, host="0.0.0.0", port=int(os.environ['PORT'])) - diff --git a/system.json b/system.json new file mode 100644 index 0000000..978ea79 --- /dev/null +++ b/system.json @@ -0,0 +1,203 @@ +{ + "name": "docker_test", + "components": [ + { + "name": "feeder", + "type": "LocalFeeder", + "host": "feeder", + "container_port": 5678, + "parameters": { + "use_smartds": false, + "user_uploads_model": false, + "profile_location": "gadal_ieee123/profiles", + "opendss_location": "gadal_ieee123/qsts", + "sensor_location": "gadal_ieee123/sensors.json", + "start_date": "2017-01-01 00:00:00", + "number_of_timesteps": 3, + "run_freq_sec": 900, + "topology_output": "topology.json" + } + }, + { + "name": "recorder_voltage_real", + "type": "Recorder", + "host": "recorder-voltage-real", + "container_port": 5679, + "parameters": {"feather_filename": "voltage_real.feather", + "csv_filename": "voltage_real.csv" + } + }, + { + "name": "recorder_voltage_imag", + "type": "Recorder", + "host": "recorder-voltage-imag", + "container_port": 5680, + "parameters": {"feather_filename": "voltage_imag.feather", + "csv_filename": "voltage_imag.csv" + } + }, + { + "name": "recorder_voltage_mag", + "type": "Recorder", + "host": "recorder-voltage-mag", + "container_port": 5681, + "parameters": {"feather_filename": "voltage_mag.feather", + "csv_filename": "voltage_mag.csv" + } + }, + { + "name": "recorder_voltage_angle", + "type": "Recorder", + "host": "recorder-voltage-angle", + "container_port": 5682, + "parameters": {"feather_filename": "voltage_angle.feather", + "csv_filename": "voltage_angle.csv" + } + }, + { + "name": "state_estimator", + "type": "StateEstimatorComponent", + "host": "state-estimator", + "container_port": 5683, + "parameters": { + "algorithm_parameters": {"tol": 1e-5} + } + }, + { + "name": "sensor_voltage_real", + "type": "MeasurementComponent", + "host": "sensor-voltage-real", + "container_port": 5684, + "parameters": { + "gaussian_variance": 0.0, + "random_percent": 0.0, + "measurement_file": "sensors.json" + } + }, + { + "name": "sensor_voltage_magnitude", + "type": "MeasurementComponent", + "host": "sensor-voltage-magnitude", + "container_port": 5685, + "parameters": { + "gaussian_variance": 0.0, + "random_percent": 0.0, + "measurement_file": "sensors.json" + } + }, + { + "name": "sensor_voltage_imaginary", + "type": "MeasurementComponent", + "host": "sensor-voltage-imaginary", + "container_port": 5686, + "parameters": { + "gaussian_variance": 0.0, + "random_percent": 0.0, + "measurement_file": "sensors.json" + } + }, + { + "name": "sensor_power_real", + "type": "MeasurementComponent", + "host": "sensor-power-real", + "container_port": 5687, + "parameters": { + "gaussian_variance": 0.0, + "random_percent": 0.0, + "measurement_file": "sensors.json" + } + }, + { + "name": "sensor_power_imaginary", + "type": "MeasurementComponent", + "host": "sensor-power-imaginary", + "container_port": 5688, + "parameters": { + "gaussian_variance": 0.0, + "random_percent": 0.0, + "measurement_file": "sensors.json" + } + } + + ], + "links": [ + { + "source": "feeder", + "source_port": "voltages_magnitude", + "target": "sensor_voltage_magnitude", + "target_port": "subscription" + }, + { + "source": "feeder", + "source_port": "voltages_real", + "target": "sensor_voltage_real", + "target_port": "subscription" + }, + { + "source": "feeder", + "source_port": "voltages_imag", + "target": "sensor_voltage_imaginary", + "target_port": "subscription" + }, + { + "source": "feeder", + "source_port": "powers_real", + "target": "sensor_power_real", + "target_port": "subscription" + }, + { + "source": "feeder", + "source_port": "powers_imag", + "target": "sensor_power_imaginary", + "target_port": "subscription" + }, + { + "source": "feeder", + "source_port": "topology", + "target": "state_estimator", + "target_port": "topology" + }, + { + "source": "sensor_voltage_magnitude", + "source_port": "publication", + "target": "state_estimator", + "target_port": "voltages_magnitude" + }, + { + "source": "sensor_power_real", + "source_port": "publication", + "target": "state_estimator", + "target_port": "powers_real" + }, + { + "source": "sensor_power_imaginary", + "source_port": "publication", + "target": "state_estimator", + "target_port": "powers_imaginary" + }, + { + "source": "feeder", + "source_port": "voltages_real", + "target": "recorder_voltage_real", + "target_port": "subscription" + }, + { + "source": "feeder", + "source_port": "voltages_imag", + "target": "recorder_voltage_imag", + "target_port": "subscription" + }, + { + "source": "state_estimator", + "source_port": "voltage_angle", + "target": "recorder_voltage_angle", + "target_port": "subscription" + }, + { + "source": "state_estimator", + "source_port": "voltage_mag", + "target": "recorder_voltage_mag", + "target_port": "subscription" + } + ] +}