This documentation provides comprehensive examples of common HATSA workflows, from basic alignment to advanced task-informed analyses.
Basic Workflow
# Load your data (list of subject matrices)
data <- load_fmri_data() # Returns list of time × voxel matrices
# Get parameter suggestions
params <- hatsa_suggest(data)
# Run basic HATSA
result <- hatsa(data, components = params$components)
# Extract aligned data
aligned_data <- get_aligned_data(result)
template <- get_template(result)
# Check quality
hatsa_summary(result)
plot_hatsa(result, type = "eigenvalues")
Task-Informed Workflow
# Load task design matrices
task_data <- load_task_designs() # List of time × condition matrices
# Automatic method selection
result <- hatsa_task(data, task_data, method = "auto")
# Or choose specific method
result <- hatsa_task(data, task_data,
method = "blend",
lambda_blend = 0.2)
# Analyze task-specific alignment
task_metrics <- get_task_alignment_metrics(result)
Advanced Anchor Selection
# Manual anchor selection based on ROI
roi_indices <- get_roi_voxels("visual_cortex")
result <- hatsa(data, anchors = roi_indices)
# Multi-resolution anchor selection
anchors <- select_anchors_mra(
U_list = preliminary_decomposition,
n_anchors = 100,
n_resolutions = 5
)
result <- hatsa(data, anchors = anchors)
Group Analysis
# Align multiple groups separately
result_controls <- hatsa(data[control_idx])
result_patients <- hatsa(data[patient_idx])
# Compare alignment quality
compare_alignments(result_controls, result_patients)
# Project new subjects to existing space
new_aligned <- predict(result_controls, newdata_list = new_subjects)
Performance Optimization
# For large datasets, use fast preset
result <- hatsa(big_data, preset = "fast")
# For parallel processing (if available)
options(hatsa.parallel = TRUE)
options(hatsa.cores = 4)
result <- hatsa(data)
# For very high-dimensional data
# First reduce dimensions
data_reduced <- lapply(data, function(X) {
svd_X <- svd(X, nu = 100, nv = 100)
svd_X$u
})
result <- hatsa(data_reduced)