52 lines
1.7 KiB
YAML
52 lines
1.7 KiB
YAML
# Example configuration file for a data processing pipeline
|
|
|
|
general:
|
|
logging: "console" # Options: "console", "file", "both"
|
|
log_level: "INFO" # Options: "DEBUG", "INFO", "WARN", "ERROR", "CRITICAL"
|
|
log_file: "logs/pipeline.log" # Path to the log file if logging to file
|
|
|
|
stages:
|
|
- name: "Load_datasets"
|
|
output_stream: "input_dataset"
|
|
module_path: "EEG_preprocessing_modules/data_loader.py"
|
|
module_params:
|
|
datasets:
|
|
- path: "data/dataset1.csv"
|
|
name: "dataset1"
|
|
- path: "data/dataset2.csv"
|
|
name: "dataset2"
|
|
action: "merge"
|
|
|
|
- name: "Process_data"
|
|
input_stream: "input_dataset"
|
|
output_stream: "processed_dataset"
|
|
module_path: "EEG_preprocessing_modules/preprocessing.py"
|
|
module_params:
|
|
select_channels:
|
|
- "channel1"
|
|
- "channel2"
|
|
filter_frequency: 0.5
|
|
resample_rate: 1000
|
|
|
|
- name: "Train_augment_model"
|
|
type: "train"
|
|
input_stream: "processed_dataset"
|
|
module_path: "models_augment/GAN/main.py"
|
|
module_params: # Model specific parameters, they're passed to the module script as dictionary
|
|
noise_level: 0.01
|
|
save_path: "models_augment/GAN/model.pth"
|
|
|
|
|
|
- name: "Augment_data"
|
|
type: "inference" # Inference is default, but can be specified explicitly
|
|
input_stream: "processed_dataset"
|
|
output_stream: "augmented_dataset"
|
|
module_path: "models_augment/GAN/main.py"
|
|
module_params: # Model specific parameters, they're passed to the model script as dict
|
|
noise_level: 0.01
|
|
model_weights: "models_augment/GAN/model.pth"
|
|
|
|
- name: "Save_dataset"
|
|
input_stream: "augmented_dataset"
|
|
module_params:
|
|
output_path: "data/augmented_dataset.mne" |