Skip to content

Commit ecdc143

Browse files
committed
Fix analysis consolidation to use compiler-resolved configs and step plan directories
- Add plate_metadata_config resolution in compiler using ObjectState - Store plate_metadata_config on context alongside analysis_consolidation_config - Change consolidate_results_directories signature to accept individual configs - Remove global_config dependency from consolidate_results_directories - Remove well_pattern fallback - use filename_parser exclusively - Update orchestrator to collect results directories from step plans - Simplify orchestrator consolidation logic - Update plate_viewer_window to use ObjectState for config resolution - Remove _get_consolidate_analysis_results lazy import function
1 parent 33a9f10 commit ecdc143

4 files changed

Lines changed: 304 additions & 220 deletions

File tree

openhcs/core/orchestrator/orchestrator.py

Lines changed: 38 additions & 68 deletions
Original file line numberDiff line numberDiff line change
@@ -99,27 +99,9 @@ def _find_locks(obj, path="", seen=None):
9999

100100

101101
from openhcs.microscopes.microscope_base import MicroscopeHandler
102-
103-
104-
# Lazy import of consolidate_analysis_results to avoid blocking GUI startup
105-
# This function imports GPU libraries, so we defer it until first use
106-
def _get_consolidate_analysis_results():
107-
"""Lazy import of consolidate_analysis_results function."""
108-
if os.getenv("OPENHCS_SUBPROCESS_NO_GPU") == "1":
109-
# Subprocess runner mode - create placeholder
110-
def consolidate_analysis_results(*args, **kwargs):
111-
"""Placeholder for subprocess runner mode."""
112-
raise RuntimeError(
113-
"Analysis consolidation not available in subprocess runner mode"
114-
)
115-
116-
return consolidate_analysis_results
117-
else:
118-
from openhcs.processing.backends.analysis.consolidate_analysis_results import (
119-
consolidate_analysis_results,
120-
)
121-
122-
return consolidate_analysis_results
102+
from openhcs.processing.backends.analysis.consolidate_analysis_results import (
103+
consolidate_results_directories,
104+
)
123105

124106

125107
# Import generic component system - required for orchestrator functionality
@@ -1579,58 +1561,46 @@ def execute_compiled_plate(
15791561
analysis_consolidation_config = getattr(
15801562
first_context, "analysis_consolidation_config", None
15811563
)
1582-
if analysis_consolidation_config and analysis_consolidation_config.enabled:
1564+
1565+
# Debug logging for consolidation troubleshooting
1566+
if not analysis_consolidation_config.enabled:
1567+
logger.info("⏭️ CONSOLIDATION: Disabled")
1568+
else:
15831569
try:
1584-
# Get results directory from compiled contexts (path planner already determined it)
1585-
results_dir = None
1586-
for axis_id, context in compiled_contexts.items():
1587-
# Check if context has step plans with special outputs
1570+
# Collect all unique results directories from step plans
1571+
results_dirs = set()
1572+
for context in compiled_contexts.values():
15881573
for step_plan in context.step_plans.values():
1589-
special_outputs = step_plan.get("special_outputs", {})
1590-
if special_outputs:
1591-
# Extract results directory from first special output path
1592-
first_output = next(iter(special_outputs.values()))
1593-
output_path = Path(first_output["path"])
1594-
potential_results_dir = output_path.parent
1595-
1596-
if potential_results_dir.exists():
1597-
results_dir = potential_results_dir
1598-
break
1599-
1600-
if results_dir:
1601-
break
1602-
1603-
if results_dir and results_dir.exists():
1604-
# Check if there are actually CSV files (materialized results)
1605-
csv_files = list(results_dir.glob("*.csv"))
1606-
if csv_files:
1607-
# Get well IDs from compiled contexts
1608-
axis_ids = list(compiled_contexts.keys())
1609-
1610-
consolidate_fn = _get_consolidate_analysis_results()
1611-
# Get plate_metadata_config from the context's global_config
1612-
plate_metadata_config = (
1613-
first_context.global_config.plate_metadata_config
1614-
if first_context.global_config
1615-
else None
1616-
)
1617-
consolidate_fn(
1618-
results_directory=str(results_dir),
1619-
well_ids=axis_ids,
1620-
consolidation_config=analysis_consolidation_config,
1621-
plate_metadata_config=plate_metadata_config,
1622-
)
1623-
logger.info("✅ CONSOLIDATION: Completed successfully")
1624-
else:
1574+
if "analysis_results_dir" in step_plan:
1575+
results_dirs.add(
1576+
Path(step_plan["analysis_results_dir"])
1577+
)
1578+
if "materialized_analysis_results_dir" in step_plan:
1579+
results_dirs.add(
1580+
Path(step_plan["materialized_analysis_results_dir"])
1581+
)
1582+
1583+
if results_dirs:
1584+
successful_dirs, failed_dirs = consolidate_results_directories(
1585+
results_dirs=list(results_dirs),
1586+
plate_path=Path(first_context.plate_path),
1587+
analysis_consolidation_config=analysis_consolidation_config,
1588+
plate_metadata_config=first_context.plate_metadata_config,
1589+
filename_parser=self.microscope_handler.parser,
1590+
)
1591+
1592+
if successful_dirs:
16251593
logger.info(
1626-
f"⏭️ CONSOLIDATION: No CSV files found in {results_dir}, skipping"
1594+
f"✅ CONSOLIDATION: {len(successful_dirs)} directories consolidated"
1595+
)
1596+
if failed_dirs:
1597+
logger.warning(
1598+
f"⚠️ CONSOLIDATION: {len(failed_dirs)} directories failed"
16271599
)
1628-
else:
1629-
logger.info(
1630-
"⏭️ CONSOLIDATION: No results directory found in compiled contexts"
1631-
)
16321600
except Exception as e:
1633-
logger.error(f"❌ CONSOLIDATION: Failed: {e}")
1601+
logger.error(
1602+
f"❌ CONSOLIDATION: Failed with error: {e}", exc_info=True
1603+
)
16341604

16351605
# Update state based on execution results
16361606
if all(result.is_success() for result in execution_results.values()):

openhcs/core/pipeline/compiler.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1365,6 +1365,11 @@ def compile_pipelines(
13651365
else lazy_analysis_config
13661366
)
13671367

1368+
# Resolve plate_metadata_config via ObjectState (same pattern as analysis_consolidation_config)
1369+
plate_metadata_config = pipeline_config_state.get_saved_resolved_value(
1370+
"plate_metadata_config"
1371+
)
1372+
13681373
# Get auto_add_output_plate flag directly (it's a top-level field, not a dataclass)
13691374
auto_add_output_plate = pipeline_config_state.get_saved_resolved_value(
13701375
"auto_add_output_plate_to_plate_manager"
@@ -1495,6 +1500,7 @@ def compile_pipelines(
14951500
context.analysis_consolidation_config = (
14961501
analysis_consolidation_config
14971502
)
1503+
context.plate_metadata_config = plate_metadata_config
14981504
context.auto_add_output_plate_to_plate_manager = (
14991505
auto_add_output_plate
15001506
)
@@ -1550,6 +1556,7 @@ def compile_pipelines(
15501556
context.analysis_consolidation_config = (
15511557
analysis_consolidation_config
15521558
)
1559+
context.plate_metadata_config = plate_metadata_config
15531560
context.auto_add_output_plate_to_plate_manager = (
15541561
auto_add_output_plate
15551562
)

0 commit comments

Comments
 (0)