Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
97 changes: 97 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
*~
.DS_Store
.ipynb_checkpoints
*.
*.egg-info
__pycache__
*.pyc
*.so.dSYM
.idea/


# Covers JetBrains IDEs: IntelliJ, GoLand, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839

# User-specific stuff
**/.idea/**/workspace.xml
**/.idea/**/tasks.xml
**/.idea/**/usage.statistics.xml
**/.idea/**/dictionaries
**/.idea/**/shelf

# AWS User-specific
**/.idea/**/aws.xml

# Generated files
**/.idea/**/contentModel.xml

# Sensitive or high-churn files
**/.idea/**/dataSources/
**/.idea/**/dataSources.ids
**/.idea/**/dataSources.local.xml
**/.idea/**/sqlDataSources.xml
**/.idea/**/dynamic.xml
**/.idea/**/uiDesigner.xml
**/.idea/**/dbnavigator.xml

# Gradle
**/.idea/**/gradle.xml
**/.idea/**/libraries

# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr

# CMake
cmake-build-*/

# Mongo Explorer plugin
**/.idea/**/mongoSettings.xml

# File-based project format
*.iws

# IntelliJ
out/

# mpeltonen/sbt-idea plugin
.idea_modules/

# JIRA plugin
atlassian-ide-plugin.xml

# Cursive Clojure plugin
**/.idea/replstate.xml

# SonarLint plugin
**/.idea/sonarlint/
**/.idea/sonarlint.xml # see https://community.sonarsource.com/t/is-the-file-idea-idea-idea-sonarlint-xml-intended-to-be-under-source-control/121119

# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties

# Editor-based HTTP Client
**/.idea/httpRequests
http-client.private.env.json

# Android studio 3.1+ serialized cache file
**/.idea/caches/build_file_checksums.ser

# Apifox Helper cache
**/.idea/.cache/.Apifox_Helper
**/.idea/ApifoxUploaderProjectSetting.xml

# Github Copilot persisted session migrations, see: https://github.com/microsoft/copilot-intellij-feedback/issues/712#issuecomment-3322062215
**/.idea/**/copilot.data.migration.*.xml
15 changes: 12 additions & 3 deletions eregion/configs/config.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from abc import ABC, abstractmethod
import yaml
import logging
from utils.misc_utils import configure_logger

# A yaml constructor for slice objects
def slice_constructor(loader, node):
Expand Down Expand Up @@ -31,7 +31,7 @@ def __init__(self, config_input):
Path to a YAML config file or config data as a string or dictionary.
"""
self.config = None
self.logger = logging.getLogger(__name__)
self.logger = configure_logger(self.__class__.__name__)

if isinstance(config_input, str) and config_input.endswith(('.yaml', '.yml')):
self.set_from_file(config_input)
Expand Down Expand Up @@ -110,7 +110,8 @@ def validate_config(self):

### Pipeline Configuration Class ###
class PipelineConfig(ConfigLoader):
required_keys = ['pipeline']
required_keys = ['pipelines']
required_pipeline_keys = ['name', 'lazy', 'nodes']

def __init__(self, config_input):
"""
Expand All @@ -124,3 +125,11 @@ def validate_config(self):
for key in self.required_keys:
if key not in self.config:
raise ValueError(f"Missing required config key: {key}")

for pipeline in self.config['pipelines']:
for key in self.required_pipeline_keys:
if key not in pipeline:
raise ValueError(f"Missing required pipeline key: {key} in pipeline {pipeline.get('name', 'unknown')}")

if pipeline['lazy']:
assert 'source' in pipeline, f"Missing required key 'source' for lazy pipeline {pipeline.get('name', 'unknown')}"
16 changes: 8 additions & 8 deletions eregion/configs/detectors/deimos_singledet.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@ objects:
ext_id: 1 # FITS extension ID
ext_slice: [!slice [0, 4125], !slice [0, 1094]] # Slice of the ext_id that has the data for this output
data_slice: [!slice [0, 4125], !slice [0, 1094]] # Slice of the full DetImage data array where this output's data will go
serial_prescan: !slice [0, 0]
serial_overscan: !slice [1094, 1094]
parallel_prescan: !slice [0, 0]
parallel_overscan: !slice [4125, 4125]
serial_prescan: !slice [0, 50]
serial_overscan: !slice [1074, 1094]
parallel_prescan: !slice [0, 50]
parallel_overscan: !slice [4105, 4125]
parallel_axis: 'y' # First axis in the data array (rows) represent parallel readout direction
readout_pixel: [0, 0] # top left pixel
gain: 1.0 # electrons/ADU
Expand All @@ -33,10 +33,10 @@ objects:
ext_id: 2 # FITS extension ID
ext_slice: [!slice [0, 4125], !slice [0, 1094]] # Slice of the ext_id that has the data for this output
data_slice: [!slice [0, 4125], !slice [1094, 2188]] # Slice of the full DetImage data array where this output's data will go
serial_prescan: !slice [0, 0]
serial_overscan: !slice [1094, 1094]
parallel_prescan: !slice [0, 0]
parallel_overscan: !slice [4125, 4125]
serial_prescan: !slice [1094, 1044]
serial_overscan: !slice [20, 0]
parallel_prescan: !slice [0, 50]
parallel_overscan: !slice [4105, 4125]
parallel_axis: 'y' # First axis in the data array (rows) represent parallel readout direction
readout_pixel: [0, 1094] # top right pixel
gain: 1.0 # electrons/ADU
Expand Down
67 changes: 67 additions & 0 deletions eregion/configs/pipeline_flows/example.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
# This is an example pipeline flow configuration

debug: false # Optional: set to true to enable debug mode (more verbose logging, etc.)

pipelines:
- name: PIPE_1 # Name of the pipeline flow, required
description: Pipeline flow 1
lazy: false # Set true if this sub-pipeline should be run lazily (i.e. as images arrive)

nodes: # List of tasks (nodes) in the pipeline flow
- name: TASK_1 # Name of the task node, required
task: package.module.class # Path to the Class of the task to run, must be a subclass of `Task` defined in tasks.task

init: # Initialization parameters (for Task.__init__)
inputs: # Specify any args needed from outputs of other tasks in this config
arg_1: pipe_name.node_name.data.key # Output of tasks are wrapped in TaskResult objects by the engine, and the data produced by the task is in the TaskResult.data dict; specify the path to the data you want to use as input for this task
# etc.
params: # Specify any additional kwargs (which are not task outputs) needed; refer to the task documentation for required and optional params and kwargs
param_1: value
param_2: value
# etc.

run: # Run-time (Task.run() or Task.lazy_run()) inputs and parameters, as above, use `inputs` to specify data coming from outputs of other tasks, and `params` for any additional parameters
inputs:
arg_1: pipe_name.node_name.data.key
# etc.
params:
param_1: value
param_2: value
# etc.

- name: TASK_2
task: package.module.class
init:
inputs:
arg_1: PIPE_1.TASK_1.data.key # Example of using output from TASK_1 as input for TASK_2
params:
param_1: value
# etc.
run:
inputs:
arg_1: PIPE_1.TASK_1.data.key # Example of using output from TASK_1 as input for TASK_2
params:
param_1: value
# etc.

depends_on: [TASK_1] # should be specified if this task depends on the output of another task; ensures correct execution order in the pipeline flow

- name: PIPE_2
description: Pipeline flow 2
lazy: true # This sub-pipeline will be run lazily (i.e. as images arrive)
nodes:
- name: TASK_3
task: package.module.class
init:
inputs:
arg_1: PIPE_1.TASK_1.data.key # Example of using output from a task in another pipeline flow as input
params:
param_1: value
# etc.
run:
inputs:
arg_1: PIPE_1.TASK_2.data.key # Example of using output from a task in another pipeline flow as input
params:
param_1: value
# etc.
depends_on: [PIPE_1.TASK_1, PIPE_1.TASK_2] # specify dependencies across pipeline flows as well
87 changes: 62 additions & 25 deletions eregion/configs/pipeline_flows/masterbias_example.yaml
Original file line number Diff line number Diff line change
@@ -1,32 +1,69 @@
# This is an example pipeline flow configuration for creating a master bias frame
# Example

debug: false # Optional: set to true to enable debug mode (more verbose logging, etc.)
pipeline:
- name: image_creator
task: tasks.imagegen.ImageCreator
debug: false
pipelines:
- name: calib_flow
description: Pipeline flow to create a master bias frame from bias images
lazy: false
init: # Initialization parameters for the image creator
detector_config: "/Users/yashvi/Desktop/Detector Characterization Tools/eregion/configs/detectors/deimos_singledet.yaml" # For required params, see the task documentation
# Any additional params are passed as kwargs that get set in the task's meta dict
# See task documentation for which kwargs are used
nodes:
- name: image_creator
task: tasks.imagegen.ImageCreator
init:
params:
detector_config: "/Users/yashvi/Desktop/Detector Characterization Tools/eregion/eregion/configs/detectors/deimos_singledet.yaml"
run:
params:
input_source: "/Users/yashvi/Desktop/Detector Characterization Tools/DTU_dettest/DTU_singledet_acceptance/PTC/SCI/20250812-101350/*_bias_*.fits"
identifier_func: tasks.custom.guess_image_type_from_filename_DEIMOS

run: # Run-time inputs and parameters for the image creator
params: # Check which params go here in the task documentation
input_source: "/Users/yashvi/Desktop/Detector Characterization Tools/DTU_dettest/DTU_singledet_acceptance/PTC/SCI/20250812-101350/*_bias_*.fits"
identifier_func: tasks.custom.guess_image_type_from_filename_DEIMOS
- name: master_bias
task: tasks.calibration.MasterBias
init:
params:
method: "median"
run:
inputs:
bias_images: calib_flow.image_creator.data.bias
depends_on: [calib_flow.image_creator]


- name: master_bias
task: tasks.calibration.MasterBias
- name: preproc_flow
description: Example pre-processing pipeline flow
lazy: false
init:
method: "median" # Optional param (kwarg) method to combine bias frames; see task documentation for options
run:
inputs: # use inputs to specify data input coming in from output of other tasks
bias_images: image_creator.data.images # E.g. Input images from the image creator task's output
# Each task's outputs are objects of class TaskResult
# TaskResult.data is a dict containing the actual data produced by the task
# Check which outputs are available in the task documentation
nodes:
- name: image_creator
task: tasks.imagegen.ImageCreator
init:
params:
detector_config: "/Users/yashvi/Desktop/Detector Characterization Tools/eregion/eregion/configs/detectors/deimos_singledet.yaml"
run:
params:
input_source: "/Users/yashvi/Desktop/Detector Characterization Tools/DTU_dettest/DTU_singledet_acceptance/PTC/SCI/20250812-101350/*flat_0.000*.fits"
identifier_func: tasks.custom.guess_image_type_from_filename_DEIMOS

- name: bias_subtraction
task: tasks.preprocessing.BiasSubtraction
init:
inputs:
master_biases: calib_flow.master_bias.data.master_biases
run:
inputs:
images: preproc_flow.image_creator.data.flat
depends_on: [preproc_flow.image_creator, calib_flow.master_bias]

- name: overscan_subtraction
task: tasks.preprocessing.ScanSubtraction
init:
params:
which_scan: 'serial_overscan'
method: 'median_by_axis'
run:
inputs:
images: preproc_flow.bias_subtraction.data.flat
depends_on: [preproc_flow.bias_subtraction]

depends_on: [image_creator]
- name: badpixel_masking
task: tasks.preprocessing.SigmaClipMasking
run:
inputs:
images: preproc_flow.overscan_subtraction.data.flat
depends_on: [preproc_flow.overscan_subtraction]
15 changes: 9 additions & 6 deletions eregion/core/image_operations.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
### Collection of utility functions for image processing tasks.
import numpy as np
from typing import Callable, Any
from typing import Callable, Any, Optional
from astropy.stats import sigma_clip

def median_combine(images: list[np.ndarray]) -> np.ndarray:
Expand Down Expand Up @@ -37,7 +37,7 @@ def mean_combine(images: list[np.ndarray]) -> np.ndarray:
stacked_images = np.stack(images, axis=0)
return np.mean(stacked_images, axis=0)

def subtract_from_image(image: np.ndarray, subtract_object: np.ndarray | float, method: Callable, *args):
def subtract_from_image(image: np.ndarray, subtract_object: np.ndarray | float, method: Optional[Callable]=None, **kwargs):
"""
Subtract a given object (array or scalar) from an image.

Expand All @@ -46,16 +46,19 @@ def subtract_from_image(image: np.ndarray, subtract_object: np.ndarray | float,
image : np.ndarray
2D numpy array representing the image.
subtract_object : np.ndarray or float
The object to subtract from the image. Can be an array of any size from which the value to subtract is derived.
method : Callable
The object to subtract from the image. Can be an array of any size from which the object to subtract is derived.
method : Optional[Callable]
A function that takes the subtract_object and returns a scalar/array to subtract from the image.
kwargs :
Additional keyword arguments to pass to the method function.
Returns
-------
np.ndarray
The resulting image after subtraction.
"""
value_to_subtract = method(subtract_object, *args)
return image - value_to_subtract, value_to_subtract
if method:
subtract_object = method(subtract_object, **kwargs)
return image - subtract_object, subtract_object

def simple_median(data: np.ndarray, *args) -> Any:
return np.median(data)
Expand Down
Loading