From ea80ca75c514d7c20ffd529c079e3292384df97e Mon Sep 17 00:00:00 2001 From: AllenBaranov Date: Sat, 7 Feb 2026 16:30:08 -0500 Subject: [PATCH 01/10] Added Cisco and Chronos2 Time Series Foundation Models to Orion --- .../pretrained/chronos2/chronos2.json | 36 ++ orion/pipelines/pretrained/cisco/cisco.json | 39 ++ .../cisco/cisco_modeling/__init__.py | 28 ++ .../cisco/cisco_modeling/cisco_tsm_mr.py | 393 +++++++++++++++++ .../patched_decoder_multi_resolution.py | 416 ++++++++++++++++++ orion/primitives/chronos2.py | 136 ++++++ orion/primitives/cisco.py | 111 +++++ .../orion.primitives.chronos2.Chronos2.json | 62 +++ .../jsons/orion.primitives.cisco.Cisco.json | 57 +++ 9 files changed, 1278 insertions(+) create mode 100644 orion/pipelines/pretrained/chronos2/chronos2.json create mode 100644 orion/pipelines/pretrained/cisco/cisco.json create mode 100644 orion/pipelines/pretrained/cisco/cisco_modeling/__init__.py create mode 100644 orion/pipelines/pretrained/cisco/cisco_modeling/cisco_tsm_mr.py create mode 100644 orion/pipelines/pretrained/cisco/cisco_modeling/patched_decoder_multi_resolution.py create mode 100644 orion/primitives/chronos2.py create mode 100644 orion/primitives/cisco.py create mode 100644 orion/primitives/jsons/orion.primitives.chronos2.Chronos2.json create mode 100644 orion/primitives/jsons/orion.primitives.cisco.Cisco.json diff --git a/orion/pipelines/pretrained/chronos2/chronos2.json b/orion/pipelines/pretrained/chronos2/chronos2.json new file mode 100644 index 00000000..aee16aaf --- /dev/null +++ b/orion/pipelines/pretrained/chronos2/chronos2.json @@ -0,0 +1,36 @@ +{ + "primitives": [ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate", + "sklearn.impute.SimpleImputer", + "mlstars.custom.timeseries_preprocessing.rolling_window_sequences", + "orion.primitives.chronos2.Chronos2", + "orion.primitives.timeseries_errors.regression_errors", + "orion.primitives.timeseries_anomalies.find_anomalies" + ], + "init_params": { + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 600, + "method": "mean" + }, + "mlstars.custom.timeseries_preprocessing.rolling_window_sequences#1": { + "target_column": 0, + "window_size": 256 + }, + "orion.primitives.timeseries_anomalies.find_anomalies#1": { + "window_size_portion": 0.33, + "window_step_size_portion": 0.1, + "fixed_threshold": true + } + }, + "input_names": { + "orion.primitives.timeseries_anomalies.find_anomalies#1": { + "index": "target_index" + } + }, + "output_names": { + "orion.primitives.timeseries_anomalies.find_anomalies#1": { + "y": "anomalies" + } + } +} \ No newline at end of file diff --git a/orion/pipelines/pretrained/cisco/cisco.json b/orion/pipelines/pretrained/cisco/cisco.json new file mode 100644 index 00000000..2c359ba5 --- /dev/null +++ b/orion/pipelines/pretrained/cisco/cisco.json @@ -0,0 +1,39 @@ +{ + "primitives": [ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate", + "sklearn.impute.SimpleImputer", + "mlstars.custom.timeseries_preprocessing.rolling_window_sequences", + "orion.primitives.cisco.Cisco", + "orion.primitives.timeseries_errors.regression_errors", + "orion.primitives.timeseries_anomalies.find_anomalies" + ], + "init_params": { + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 600, + "method": "mean" + }, + "mlstars.custom.timeseries_preprocessing.rolling_window_sequences#1": { + "target_column": 0, + "window_size": 30720, + "step_size": 16 + }, + "orion.primitives.timeseries_anomalies.find_anomalies#1": { + "window_size_portion": 0.33, + "window_step_size_portion": 0.1, + "fixed_threshold": true, + "window_size": 250, + "window_step_size": 40 + } + }, + "input_names": { + "orion.primitives.timeseries_anomalies.find_anomalies#1": { + "index": "target_index" + } + }, + "output_names": { + "orion.primitives.timeseries_anomalies.find_anomalies#1": { + "y": "anomalies" + } + } +} \ No newline at end of file diff --git a/orion/pipelines/pretrained/cisco/cisco_modeling/__init__.py b/orion/pipelines/pretrained/cisco/cisco_modeling/__init__.py new file mode 100644 index 00000000..95dc496d --- /dev/null +++ b/orion/pipelines/pretrained/cisco/cisco_modeling/__init__.py @@ -0,0 +1,28 @@ +# +# Copyright 2025 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Multi-resolution Cisco Time Series Model. +""" + +from .patched_decoder_multi_resolution import ( + CiscoTsmMRConfig, + PatchedTSMultiResolutionDecoder, +) +from .cisco_tsm_mr import CiscoTsmMR, TimesFmHparams, TimesFmCheckpoint + +__all__ = [ + "CiscoTsmMRConfig", + "PatchedTSMultiResolutionDecoder", + "CiscoTsmMR", +] diff --git a/orion/pipelines/pretrained/cisco/cisco_modeling/cisco_tsm_mr.py b/orion/pipelines/pretrained/cisco/cisco_modeling/cisco_tsm_mr.py new file mode 100644 index 00000000..ed077c0c --- /dev/null +++ b/orion/pipelines/pretrained/cisco/cisco_modeling/cisco_tsm_mr.py @@ -0,0 +1,393 @@ +# +# Copyright 2025 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging +from os import path +from typing import Any, List, Sequence, Union, Tuple + +import numpy as np + +import torch + +from huggingface_hub import snapshot_download + +from timesfm import TimesFmHparams, TimesFmCheckpoint +from timesfm.timesfm_torch import TimesFmTorch +from timesfm.timesfm_base import strip_leading_nans, linear_interpolation + +from .patched_decoder_multi_resolution import CiscoTsmMRConfig, PatchedTSMultiResolutionDecoder + + +class CiscoTsmMR(TimesFmTorch): + """Cisco Time Series Model Multi-resolution Forecast API.""" + + def __init__( + self, + hparams: TimesFmHparams, + checkpoint: TimesFmCheckpoint, + *, + use_resolution_embeddings: bool = True, + use_special_token: bool = True, + ) -> None: + self.use_resolution_embeddings = use_resolution_embeddings + self.use_special_token = use_special_token + super().__init__(hparams, checkpoint) + + def __post_init__(self): + # Building MR config + self._model_config = CiscoTsmMRConfig( + num_layers=self.num_layers, + num_heads=self.num_heads, + hidden_size=self.model_dims, + intermediate_size=self.model_dims, + patch_len=self.input_patch_len, + horizon_len=self.output_patch_len, + head_dim=self.model_dims // self.num_heads, + quantiles=self.quantiles, + use_positional_embedding=self.use_pos_emb, + use_resolution_embeddings=self.use_resolution_embeddings, + use_special_token=self.use_special_token, + ) + self._model = None + self.num_cores = 1 + self.global_batch_size = self.per_core_batch_size + self._device = torch.device("cuda:0" if ( + torch.cuda.is_available() and self.backend == "gpu") else "cpu") + self._median_index = -1 + + + def load_from_checkpoint( + self, + checkpoint: TimesFmCheckpoint, + ) -> None: + """Loads a Multiresolution Model checkpoint from path and prepares the MR decoder for inference. + Args: + checkpoint: TimesFmCheckpoint object containing checkpoint info (local or HF repo). + """ + + checkpoint_path = checkpoint.path + repo_id = checkpoint.huggingface_repo_id + if checkpoint_path is None: + checkpoint_path = path.join( + snapshot_download(repo_id, local_dir=checkpoint.local_dir), + "torch_model.pt") + self._model = PatchedTSMultiResolutionDecoder(self._model_config) + loaded_checkpoint = torch.load(checkpoint_path, weights_only=True, map_location=self._device) + logging.info("Loading checkpoint from %s", checkpoint_path) + incompatible = self._model.load_state_dict(loaded_checkpoint, strict=True) + + if getattr(incompatible, "missing_keys", None) or getattr(incompatible, "unexpected_keys", None): + logging.info( + "MR decoder state load differences. missing=%s unexpected=%s", + getattr(incompatible, "missing_keys", []), + getattr(incompatible, "unexpected_keys", []), + ) + + logging.info(f"Loaded model from checkpoint: {checkpoint_path}") + logging.info("Sending checkpoint to device %s", f"{self._device}") + + self._model.to(self._device) + self._model.eval() + + + def _pad_or_truncate(self, + ts: torch.Tensor, + target_len: int) -> Tuple[torch.Tensor, torch.Tensor]: + """Pad or truncate a time series to a target length, especially [LEFT-PADDING]. + Args: + ts: 1D or 2D tensor of shape [L] or [L, 1]. + target_len: desired target length after padding/truncation. + Returns: + padded_ts: tensor of shape [target_len]. + pad_mask: tensor of shape [target_len], with 1.0 for padded positions and 0.0 for actual data. + + """ + + if ts.ndim == 2 and ts.shape[-1] == 1: + ts = ts.squeeze(-1) + + L = ts.shape[0] + + if L == target_len: + return ts, torch.zeros_like(ts, dtype=torch.float32) + + if L > target_len: + return ts[-target_len:], torch.zeros(target_len, dtype=torch.float32) + + pad_len = target_len - L + padded = torch.cat([torch.zeros(pad_len, dtype=ts.dtype), ts], dim=0) + + pad_mask = torch.cat([ + torch.ones(pad_len, dtype=torch.float32), + torch.zeros(L, dtype=torch.float32) + ], dim=0) + + return padded, pad_mask + + + def normalize_with_pad(self, + context, + pad_mask: torch.Tensor | None = None, + clamp_range=(-1000, 1000)) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, float]: + """Normalize context with padding mask. + Args: + context: tensor of shape [B, T] or [B, T, 1]. + pad_mask: tensor of shape [B, T], with 1.0 for padded positions and 0.0 for actual data. + clamp_range: tuple of (min, max) to clamp normalized values. Default (-1000, 1000). + Returns: + ctx_normalized: normalized context tensor with same shape as input. + offset: mean used for normalization, shape [B, 1]. + scale: stddev used for normalization, shape [B, 1]. + eps: small epsilon value used for numerical stability. + """ + + eps = 1e-8 + + if context.ndim == 3: + context = context.squeeze(-1) + + if pad_mask is None: + pad_mask = torch.zeros_like(context) + + valid = (1.0 - pad_mask) # 1 for real, 0 for pad + # Prevent divide-by-zero + count = valid.sum(dim=1, keepdim=True).clamp_min(1.0) + + # Masked mean, variance and std + context_mean = (context * valid).sum(dim=1, keepdim=True) / count + + # Center for variance + context_var = (((context - context_mean) * valid)**2).sum(dim=1, keepdim=True) / count + context_std = context_var.sqrt() + + ctx_normalized = (context - context_mean) / (context_std + eps) + stats = (context_mean, context_std) + + ctx_normalized = ctx_normalized * valid + + ctx_normalized = torch.clamp(ctx_normalized, *clamp_range) + + offset, scale = stats + + return ctx_normalized, offset, scale, eps + + + def slice_fine_context(self, series: List[float], fine_len: int = 512) -> List[float]: + """Return the rightmost fine_len points (or entire series if shorter). + Args: + series: list or array of fine-resolution (fine-level) time series data. + fine_len: desired length of fine-level context to extract. + Returns: + List of floats representing the fine-level context of length <= fine_len. + """ + return series[-fine_len:] + + + def build_coarse_context(self, series: np.ndarray, max_coarse_ctx: int = 512, block: int = 60) -> List[float]: + """Construct coarse context by: + 1. Taking up to rightmost (max_coarse_ctx * block) raw fine samples. + 2. Partitioning into consecutive non-overlapping blocks of 'block' size from left to right (chronological order preserved). + 3. Computing the mean of each block. + + Args: + series: array of fine-resolution (fine-level) time series data. + max_coarse_ctx: maximum number of coarse points to return. + block: number of fine samples to aggregate into one coarse sample. + Returns: + List of floats representing coarse means with length <= max_coarse_ctx. + """ + needed_raw = max_coarse_ctx * block + raw_slice = series[-needed_raw:] + # Ensure we only form full blocks; drop partial leading block if length not multiple + remainder = len(raw_slice) % block + if remainder != 0: + raw_slice = raw_slice[remainder:] # align to block boundary at the right edge + coarse = [] + for i in range(0, len(raw_slice), block): + block_vals = raw_slice[i:i+block] + if len(block_vals) < block: + break + coarse.append(float(sum(block_vals) / block)) + return coarse[-max_coarse_ctx:] + + + def build_multi_resolution(self, series: np.ndarray, agg_factor: int = 60) -> Tuple[List[float], List[float]]: + """Builds multi-resolution contexts from a fine-resolution time series. + Args: + series: array of fine-resolution (fine-level) time series data. + agg_factor: aggregation factor to form coarse context from fine context. + Returns: + Tuple of: + - coarse_ctx: list of floats representing the coarse context. + - fine_ctx: list of floats representing the fine context. + """ + + coarse_ctx = self.build_coarse_context(series, max_coarse_ctx=512, block=agg_factor) + fine_ctx = self.slice_fine_context(series) + return coarse_ctx, fine_ctx + + + def _normalize_inputs(self, inputs): + """Normalizes input series into a batched series. + Args: + inputs: single series or list of series. + Returns: + List of series, each as a list of floats. + """ + + if isinstance(inputs, (np.ndarray, torch.Tensor)): + inputs = inputs.tolist() + + if not isinstance(inputs, (list, tuple)): + return [[inputs]] + + # Compute series depth. + def _depth(x): + if isinstance(x, (list, tuple, np.ndarray, torch.Tensor)): + return 1 + (max((_depth(y) for y in x), default=0)) + return 0 + + d = _depth(inputs) + if d > 2: + raise ValueError("Input series must be strictly list-of-lists or a list.") + + if d == 1: + return [list(inputs)] + + final_inps = [] + for s in inputs: + if isinstance(s, (np.ndarray, torch.Tensor)): + s = s.tolist() + elif not isinstance(s, (list, tuple, np.ndarray, torch.Tensor)): + raise ValueError("Each series must be list-like when providing a list of series.") + + final_inps.append(list(s)) + return final_inps + + + def forecast(self, + inputs: Sequence[Any], + horizon_len: Union[int, None] = None, + agg_factor: int = 60, + batch_size: int = 8) -> List[dict[str, Any]]: + """Forecasts from a single fine-resolution stream. + + Derives the coarse-resolution stream by aggregating the fine-resolution + context in blocks of `agg_factor` (e.g., 60 minutes -> hourly) and then + runs multi-resolution decoding. + + Args: + inputs: list-like of fine-resolution context series. + horizon_len: forecast horizon length; if None, uses the model's configured horizon length. + agg_factor: size of aggregation window to form the coarse context from the fine context. + batch_size: batch size for forecasting. + + Returns: + List of dictionaries containing mean and quantile forecasts for each series input. + """ + if self._model is None: + raise ValueError("Checkpoint is not properly loaded.") + + if horizon_len is None: + horizon_len = self.output_patch_len + + if horizon_len <= 0: + raise ValueError("horizon_len must be positive") + + if agg_factor <= 0: + raise ValueError("agg_factor must be positive") + + fine_contexts = [] + coarse_contexts = [] + fine_pads = [] + coarse_pads = [] + offsets_fine = [] + scales_fine = [] + global_eps = 1e-8 + + horizon_len = horizon_len or self.output_patch_len + + CONTEXT_LEN_FINE = 512 + CONTEXT_LEN_COARSE = 512 + + inputs = self._normalize_inputs(inputs) + + for seq in inputs: + series = np.array(seq) + if not np.isfinite(series).all(): + series = np.where(np.isfinite(series), series, np.nan) + series = strip_leading_nans(series) + series = linear_interpolation(series) + + coarse_ctx, fine_ctx = self.build_multi_resolution(series, agg_factor=agg_factor) + + # Raw tensors + ctx_coarse = torch.tensor(coarse_ctx, dtype=torch.float32) + ctx_fine = torch.tensor(fine_ctx, dtype=torch.float32) + + # Pad / truncate + ctx_coarse_pad, mask_coarse = self._pad_or_truncate(ctx_coarse, CONTEXT_LEN_COARSE) + ctx_fine_pad, mask_fine = self._pad_or_truncate(ctx_fine, CONTEXT_LEN_FINE) + + # Add batch dim + ctx_coarse_pad_b = ctx_coarse_pad.unsqueeze(0) + mask_coarse_b = mask_coarse.unsqueeze(0) + ctx_fine_pad_b = ctx_fine_pad.unsqueeze(0) + mask_fine_b = mask_fine.unsqueeze(0) + + # Normalize + norm_coarse, _, _, _ = self.normalize_with_pad(ctx_coarse_pad_b, pad_mask=mask_coarse_b) + norm_fine, offset_fine, scale_fine, _ = self.normalize_with_pad(ctx_fine_pad_b, pad_mask=mask_fine_b) + + # Store normalized contexts + coarse_contexts.append(norm_coarse.squeeze(0).numpy()) # [L_coarse] + coarse_pads.append(mask_coarse_b.squeeze(0).numpy()) + fine_contexts.append(norm_fine.squeeze(0).numpy()) # [L_fine] + fine_pads.append(mask_fine_b.squeeze(0).numpy()) + + # Store scalar stats only from fine contexts + offsets_fine.append(float(offset_fine.squeeze())) + scales_fine.append(float(scale_fine.squeeze())) + + # Arrays of shape [N] + offsets_fine = np.array(offsets_fine, dtype=np.float32) + scales_fine = np.array(scales_fine, dtype=np.float32) + + N = len(fine_contexts) + final_predictions = [] + + for start in range(0, N, batch_size): + end = min(start + batch_size, N) + + batch_coarse = torch.as_tensor(np.stack(coarse_contexts[start:end]), dtype=torch.float32).unsqueeze(-1).to(self._device) + batch_coarse_pad = torch.as_tensor(np.stack(coarse_pads[start:end]), dtype=torch.float32).unsqueeze(-1).to(self._device) + batch_fine = torch.as_tensor(np.stack(fine_contexts[start:end]), dtype=torch.float32).unsqueeze(-1).to(self._device) + batch_fine_pad = torch.as_tensor(np.stack(fine_pads[start:end]), dtype=torch.float32).unsqueeze(-1).to(self._device) + + freq_tensor = torch.zeros((end - start, 1), dtype=torch.long, device=self._device) + + with torch.no_grad(): + preds = self._model.decode([batch_coarse, batch_fine], + [batch_coarse_pad.float(), batch_fine_pad.float()], + freq_tensor, + horizon_len=horizon_len, + agg_factor=agg_factor, + offsets=offsets_fine[start:end], + scales=scales_fine[start:end], + global_eps=global_eps, + output_patch_len=self.output_patch_len) + + final_predictions += preds + + return final_predictions diff --git a/orion/pipelines/pretrained/cisco/cisco_modeling/patched_decoder_multi_resolution.py b/orion/pipelines/pretrained/cisco/cisco_modeling/patched_decoder_multi_resolution.py new file mode 100644 index 00000000..1b1e8ce3 --- /dev/null +++ b/orion/pipelines/pretrained/cisco/cisco_modeling/patched_decoder_multi_resolution.py @@ -0,0 +1,416 @@ +# +# Copyright 2025 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import math +import dataclasses +from typing import List, Tuple, Union + +import numpy as np + +import torch +from torch import nn +import torch.nn.functional as F + +from timesfm import pytorch_patched_decoder as ppd + + +@dataclasses.dataclass +class CiscoTsmMRConfig(ppd.TimesFMConfig): + """Config extension to toggle multi-resolution behaviors. + + - use_resolution_embeddings: add scale embeddings (low/high) to the token stream. + - use_special_token: insert a learned special token between streams. + """ + + use_resolution_embeddings: bool = False + use_special_token: bool = False + + +class PatchedTSMultiResolutionDecoder(ppd.PatchedTimeSeriesDecoder): + """Extension of upstream decoder with multi-resolution support. + + This class keeps the upstream API intact, while enabling two optional + behaviors: + - scale embedding per token for low/high streams, + - an optional learned special token between streams. + """ + + def __init__(self, config: CiscoTsmMRConfig): + super().__init__(config) + self.config: CiscoTsmMRConfig + + # Multi-resolution Embedding Layer + if self.config.use_resolution_embeddings: + self.multi_resolution = nn.Embedding(num_embeddings=2, + embedding_dim=self.config.hidden_size) + + # Special Token between streams + if self.config.use_special_token: + self.special_token = nn.Parameter(torch.zeros(1, 1, self.config.hidden_size)) + nn.init.normal_(self.special_token, mean=0.0, std=0.02) + + + def _reverse_transform_segments( + self, + outputs: torch.Tensor, + stats_list: List[Tuple[torch.Tensor, torch.Tensor]], + indices_list: List[Tuple[int, int]], + ) -> torch.Tensor: + """Reverse-transform with per-timeseries stats. + + Args: + outputs: [B, N, P, Q] + stats_list: list of (mu, sigma) each shaped [B] + indices_list: matching list of (start_N, end_N) segment ranges over N + """ + B, N, _, _ = outputs.shape + device = outputs.device + dtype = outputs.dtype + + if len(indices_list) == 0: + return outputs + + # Build [S] tensors of segment starts/ends (S = number of streams) + starts = torch.tensor([s for (s, _) in indices_list], device=device) + ends = torch.tensor([e for (_, e) in indices_list], device=device) + S = starts.shape[0] + + # Per-batch stats stacked as [B, S] + mus = torch.stack([mu.to(dtype) for (mu, _) in stats_list], dim=1) # [B, S] + sigmas = torch.stack([sigma.to(dtype) for (_, sigma) in stats_list], dim=1) # [B, S] + + # Build boolean mask per segment over N: [S, N] + posN = torch.arange(N, device=device) + seg_mask_SN = ((posN.unsqueeze(0) >= starts.unsqueeze(1)) & + (posN.unsqueeze(0) < ends.unsqueeze(1))) # [S, N] + + # Expand to broadcast shapes: + # seg_mask: [1, S, N, 1, 1], mus/sigmas: [B, S, 1, 1, 1] + seg_mask = seg_mask_SN.unsqueeze(0).unsqueeze(-1).unsqueeze(-1).to(dtype) # [1, S, N, 1, 1] + mus_b = mus.unsqueeze(-1).unsqueeze(-1).unsqueeze(-1) # [B, S, 1, 1, 1] + sigmas_b = sigmas.unsqueeze(-1).unsqueeze(-1).unsqueeze(-1) # [B, S, 1, 1, 1] + + # Aggregate per-position parameters + mu_map = (mus_b * seg_mask).sum(dim=1) # [B, N, 1, 1] + sigma_map = (sigmas_b * seg_mask).sum(dim=1) # [B, N, 1, 1] + + # For positions not covered by any segment, keep outputs unchanged: sigma=1, mu=0 + covered = (seg_mask.sum(dim=1) > 0).to(dtype) # [1, N, 1, 1] + sigma_map = sigma_map + (1.0 - covered).expand(B, -1, -1, -1) # add 1 where uncovered + + return outputs * sigma_map + mu_map + + + def _postprocess_output( + self, + model_output: torch.Tensor, + horizon_len: int, + head: nn.Module, + num_outputs: int, + stats_list: list[tuple[torch.Tensor, torch.Tensor]], + indices_list: list[tuple[int, int]], + ) -> torch.Tensor: + """Postprocess output of stacked transformer.""" + + # B x N x (H.Q) + output_ts = head(model_output) + + # Reshape using view + b, n, _ = output_ts.shape + output_ts = output_ts.view(b, n, horizon_len, num_outputs) + + return self._reverse_transform_segments(output_ts, stats_list, indices_list) + + + def forward( + self, + input_ts: Union[List[torch.Tensor], torch.Tensor], + input_padding: Union[List[torch.LongTensor], torch.LongTensor], + freq: torch.Tensor, + ) -> torch.Tensor: + """Multi-resolution forward pass. + Args: + input_ts: list of batched tensors for coarse/fine resolution streams. + input_padding: list of batched paddings for coarse/fine resolution streams. + freq: batched tensor of frequency indices. + """ + num_outputs = len(self.config.quantiles) + 1 + + if isinstance(input_ts, torch.Tensor): + raise ValueError("PatchedTSMultiResolutionDecoder expects multi-resolution inputs as a list of tensors.") + + # Multi-resolution processing + ts_coarse, ts_fine = input_ts + pad_coarse, pad_fine = input_padding + + model_input_coarse, pad_coarse, stats_coarse, _ = super()._preprocess_input( + input_ts=ts_coarse, + input_padding=pad_coarse, + ) + model_input_fine, pad_fine, stats_fine, _ = super()._preprocess_input( + input_ts=ts_fine, + input_padding=pad_fine, + ) + + B = model_input_coarse.shape[0] + Ncoarse = model_input_coarse.shape[1] + Nfine = model_input_fine.shape[1] + D = model_input_coarse.shape[2] + device = model_input_coarse.device + + # Special Token between streams + if self.config.use_special_token: + spec_tok = self.special_token.to(device).expand(B, 1, D) + spec_pad = torch.zeros(B, 1, device=device, dtype=pad_coarse.dtype) + + model_input = torch.cat([model_input_coarse, spec_tok, model_input_fine], dim=1) # [B, N1+1+N2, D] + patched_padding = torch.cat([pad_coarse, spec_pad, pad_fine], dim=1) + + # Keep mask to drop the special token position after decoding + keep_mask = torch.ones(Ncoarse + 1 + Nfine, device=device, dtype=torch.bool) + keep_mask[Ncoarse] = False # special token index + spec_len = 1 + else: + model_input = torch.cat([model_input_coarse, model_input_fine], dim=1) # [B, N1+N2, D] + patched_padding = torch.cat([pad_coarse, pad_fine], dim=1) + keep_mask = None + spec_len = 0 + + # Multi-resolution Embedding + if self.config.use_resolution_embeddings: + mr_coarse = torch.zeros(Ncoarse, dtype=torch.long, device=device) + mr_spec = torch.zeros(spec_len, dtype=torch.long, device=device) # we use 0 for special token + mr_fine = torch.ones(Nfine, dtype=torch.long, device=device) + + mr_idx = torch.cat([mr_coarse, mr_spec, mr_fine], dim=0) # [N_total] + mr_idx = mr_idx.unsqueeze(0).expand(B, -1) # [B, N_total] + + mr_emb = self.multi_resolution(mr_idx) # [B, N_total, D] + model_input += mr_emb + + if freq.device != device: + freq = freq.to(device) + + f_emb = self.freq_emb(freq) # [B, 1, D] + model_input += f_emb + + model_output = self.stacked_transformer(model_input, patched_padding) + + # Project and apply per-segment reverse-transform + + indices_list = [ + (0, Ncoarse), + (Ncoarse + spec_len, Ncoarse + spec_len + Nfine), + ] + stats_list = [stats_coarse, stats_fine] + + output_min_all = self._postprocess_output( + model_output=model_output, + horizon_len=self.config.horizon_len, + head=self.horizon_ff_layer, + num_outputs=num_outputs, + stats_list=stats_list, + indices_list=indices_list, + ) + + if keep_mask is not None: + output_min_all = output_min_all[:, keep_mask, :, :] + + return output_min_all + + + def _trim_context(self, ts: torch.Tensor, pad: torch.Tensor, max_len: int) -> tuple[torch.Tensor, torch.Tensor]: + """Trim context to be aligned to patch boundaries and within max length. + Args: + ts: [B, T, C] input time series tensor. + pad: [B, T, 1] input padding tensor. + max_len: maximum allowed length. + Returns: + Trimmed (ts, pad) tensors. + """ + target_len = max(self.config.patch_len, (max_len // self.config.patch_len) * self.config.patch_len) + + if ts.shape[1] > target_len: + ts = ts[:, -target_len:, :] + pad = pad[:, -target_len:, :] + + rem = ts.shape[1] % self.config.patch_len + + if rem: + ts = ts[:, rem:, :] + pad = pad[:, rem:, :] + + return ts, pad + + + def decode( + self, + input_ts: Union[list[torch.Tensor], Tuple[torch.Tensor, torch.Tensor]], + paddings: Union[list[torch.Tensor], Tuple[torch.Tensor, torch.Tensor]], + freq: torch.LongTensor, + horizon_len: int, + agg_factor: int = 60, + offsets: List[float] = None, + scales: List[float] = None, + global_eps: float = 1e-8, + output_patch_len: int = 128, + ) -> List[Tuple[List[float], dict[str, List[float]]]]: + """Autoregressive Multiresolution Decoding. + + Args: + input_ts: list of [B, T1, C], [B, T2, C] tensors for low/high resolution streams. + paddings: list of [B, T1, 1], [B, T2, 1] paddings for low/high resolution streams. + freq: [B] tensor of frequency indices. + horizon_len: total forecast horizon length (in high-res steps). + agg_factor: aggregation factor from high-res to low-res (e.g., 60 for min->hr). + offsets: list of length B of denormalization offsets for high-res stream. + scales: list of length B of denormalization scales for high-res stream. + global_eps: small value to avoid division by zero during denormalization. + output_patch_len: number of high-res steps to decode per iteration. + + Returns: + A list of length B of tuples: + - mean forecast list of length `horizon_len`, + - dict of quantile forecasts, each a list of length `horizon_len`. + """ + if agg_factor <= 0: + raise ValueError("agg_factor must be positive for autoregressive decoding.") + + q_levels = self.config.quantiles + expected_q_plus_mean = len(q_levels) + 1 + expected_q_only = len(q_levels) + + if not isinstance(input_ts, (list, tuple)) or len(input_ts) != 2: + raise ValueError("Multi-resolution autoregressive decoding expects [low_res, high_res] inputs.") + + coarse_ts, fine_ts = input_ts + coarse_pad, fine_pad = paddings + + # Ensure 3D shapes + if coarse_ts.ndim == 2: + coarse_ts = coarse_ts.unsqueeze(-1) + if fine_ts.ndim == 2: + fine_ts = fine_ts.unsqueeze(-1) + if coarse_pad.ndim == 2: + coarse_pad = coarse_pad.unsqueeze(-1) + if fine_pad.ndim == 2: + fine_pad = fine_pad.unsqueeze(-1) + + device = fine_ts.device + batch_size = fine_ts.shape[0] + patch_len = self.config.patch_len + output_patch_len = output_patch_len or self.config.horizon_len + + # Offsets/scales for denormalization + offsets = [0.0] * batch_size if offsets is None else offsets + scales = [1.0] * batch_size if scales is None else scales + batch_offsets = torch.as_tensor(offsets, dtype=torch.float32, device=device).view(batch_size, 1) + batch_scales = torch.as_tensor(scales, dtype=torch.float32, device=device).view(batch_size, 1) + + # Keep working windows aligned and trimming to patch boundaries for performing decoding step. + max_ctx_len_coarse = max(patch_len, (coarse_ts.shape[1] // patch_len) * patch_len) + coarse_ts, coarse_pad = self._trim_context(coarse_ts, coarse_pad, max_ctx_len_coarse) + max_ctx_len_fine = max(patch_len, (fine_ts.shape[1] // patch_len) * patch_len) + fine_ts, fine_pad = self._trim_context(fine_ts, fine_pad, max_ctx_len_fine) + + remaining = horizon_len + mean_chunks = [] + quant_chunks = [] + + # Number of decode steps to perform + num_decode_patches = math.ceil(horizon_len / output_patch_len) + + for _ in range(num_decode_patches): + preds = self([coarse_ts, fine_ts], [coarse_pad.float(), fine_pad.float()], freq) + if preds.ndim != 4: + raise ValueError(f"Unexpected prediction rank: {preds.shape}") + + num_coarse_patches = coarse_ts.shape[1] // patch_len + num_fine_patches = fine_ts.shape[1] // patch_len + fine_patch_idx = num_coarse_patches + num_fine_patches - 1 + + if fine_patch_idx >= preds.shape[1]: + raise ValueError(f"Fine patch index {fine_patch_idx} out of range for preds shape {preds.shape}") + + fine_patch = preds[:, fine_patch_idx, :, :] + + if fine_patch.shape[1] < output_patch_len: + raise ValueError(f"Model horizon - {fine_patch.shape[1]} < requested output_patch_len {output_patch_len}") + + fine_patch = fine_patch[:, :output_patch_len, :] + + C = fine_patch.shape[2] + if C == expected_q_plus_mean: + mean_channel = fine_patch[..., 0] # [B, L] + quant_block = fine_patch[..., 1:] # [B, L, Q] + elif C == expected_q_only: + mean_channel = None + quant_block = fine_patch + else: + raise ValueError(f"Channel count {C} != {expected_q_plus_mean} or {expected_q_only}") + + if mean_channel is None: + mean_channel = quant_block.median(dim=-1).values # [B, L] + + step_taken = min(remaining, output_patch_len) + mean_denorm = mean_channel * (batch_scales + global_eps) + batch_offsets + quant_denorm = quant_block * (batch_scales.unsqueeze(-1) + global_eps) + batch_offsets.unsqueeze(-1) + mean_denorm = torch.nan_to_num(mean_denorm, nan=0.0, posinf=0.0, neginf=-0.0) + quant_denorm = torch.nan_to_num(quant_denorm, nan=0.0, posinf=0.0, neginf=-0.0) + + mean_chunks.append(mean_denorm[:, :step_taken]) + quant_chunks.append(quant_denorm[:, :step_taken, :]) + remaining -= step_taken + + # Append normalized minute predictions for the next step. + fine_append = mean_channel[:, :output_patch_len].unsqueeze(-1) + fine_ts = torch.cat([fine_ts, fine_append], dim=1) + fine_pad = torch.cat( + [fine_pad, torch.zeros((batch_size, output_patch_len, 1), device=device, dtype=fine_pad.dtype)], + dim=1) + + # Aggregate minute predictions into coarse stream (drop remainder < agg_factor). + agg_block_len = (output_patch_len // agg_factor) * agg_factor + if agg_block_len > 0: + agg_source = mean_channel[:, :agg_block_len] + agg_vals = agg_source.view(batch_size, -1, agg_factor).mean(dim=2).unsqueeze(-1) + coarse_ts = torch.cat([coarse_ts, agg_vals.to(coarse_ts.dtype)], dim=1) + coarse_pad = torch.cat( + [coarse_pad, torch.zeros((batch_size, agg_vals.shape[1], 1), device=device, dtype=coarse_pad.dtype)], + dim=1) + + # Keep contexts aligned and bounded. + fine_ts, fine_pad = self._trim_context(fine_ts, fine_pad, max_ctx_len_fine) + coarse_ts, coarse_pad = self._trim_context(coarse_ts, coarse_pad, max_ctx_len_coarse) + + if remaining <= 0: + break + + mean_full = torch.cat(mean_chunks, dim=1)[:, :horizon_len] + quant_full = torch.cat(quant_chunks, dim=1)[:, :horizon_len, :] + + mean_np = mean_full.cpu().numpy() + quant_np = quant_full.cpu().numpy() + + final_predictions = [] + for i in range(batch_size): + q_arr = np.transpose(quant_np[i], (1, 0)) # [Q, H] + final_predictions.append( + { + "mean": mean_np[i], + "quantiles": {str(q_levels[q_i]): q_arr[q_i] for q_i in range(q_arr.shape[0])} + } + ) + + return final_predictions diff --git a/orion/primitives/chronos2.py b/orion/primitives/chronos2.py new file mode 100644 index 00000000..aa27ab5c --- /dev/null +++ b/orion/primitives/chronos2.py @@ -0,0 +1,136 @@ +""" +This primitive an implementation of Amazon's Chronos2 model for timeseries forecasting. + +The model implementation can be found at +https://huggingface.co/amazon/chronos-2 + +Note: This primitive assumes that Chronos2 doesn't care about specific timestamps +of the data. We fill in the timestamps with a linear sequence of timestamps in order +for the model to work. +""" + +import sys +import torch +import numpy as np +import pandas as pd +# if sys.version_info < (3, 11): +# msg = ( +# '`timesfm` requires Python >= 3.11 and your ' +# f'python version is {sys.version}.\n' +# 'Make sure you are using Python 3.11 or later.\n' +# ) +# raise RuntimeError(msg) + +try: + from chronos import Chronos2Pipeline +except ImportError as ie: + ie.msg += ( + '\n\nIt seems like `chronos` is not installed.\n' + 'Please install `chronos` using:\n' + '\n pip install chronos' + ) + raise + + +class Chronos2: + """Chronos2 model for timeseries forecasting. + + Args: + window_size (int): + Window size of each sample. Default to 256. + step (int): + Stride length between samples. Default to 1. + pred_len (int): + Prediction horizon length. Default to 1. + repo_id (str): + Directory of the model checkpoint. Default to "amazon/chronos-2" + batch_size(int): + Size of one batch. Default to 32. + freq (int): + Frequency. TimesFM expects a categorical indicator valued in {0, 1, 2}. + Default to 0. + target (int): + Index of target column in multivariate case. Default to 0. + start_time (datetime): + Start time of the timeseries. Default to Jan 1, 2020 00:00:00. + time_interval (int): + Time interval between two samples in seconds. Default to 600. + """ + + def __init__(self, + window_size=256, + pred_len=1, + repo_id="amazon/chronos-2", + batch_size=32, + target=0, + start_time=pd.to_datetime("2000-01-01 00:00:00"), + time_interval=600): + + self.window_size = window_size + self.pred_len = pred_len + self.batch_size = batch_size + self.target = f"{target}" + self.start_time = start_time + self.time_interval = pd.Timedelta(seconds=time_interval) + + device = "cuda" if torch.cuda.is_available() else "cpu" + self.model = Chronos2Pipeline.from_pretrained(repo_id, device_map=device) + + def predict(self, X, force=False): + """Forecasting timeseries + + Args: + X (ndarray): + input timeseries. + Return: + ndarray: + forecasted timeseries. + """ + n_windows, window_size, n_features = X.shape + + outs = [] + + for i in range(0, n_windows, self.batch_size): + x_batch = self.convert_to_df(X[i:i+self.batch_size, :self.window_size], start_batch_at = i) + y_batch = self.model.predict_df( + df=x_batch, + prediction_length=self.pred_len, + quantile_levels=[0.5], + id_column="item_id", + timestamp_column="timestamp", + target=self.target, + ) + + y_batch = y_batch.sort_values(["item_id", "timestamp"]) + preds = np.stack( + y_batch.groupby("item_id", sort=False)["predictions"] + .apply(lambda s: s.to_numpy()) + .to_list() + ) + outs.append(preds) + + return np.concatenate(outs, axis=0) + + + def convert_to_df(self, x_batch, start_batch_at=0): + n_windows_in_batch, window_size, n_features = x_batch.shape + + rows = [] + for window in range(n_windows_in_batch): + for data_entry in range(window_size): + rows.append({ + "timestamp": self.start_time + self.time_interval * data_entry, + "item_id": f"window_{start_batch_at + window}", + **{f"{i}": x_batch[window, data_entry, i] for i in range(n_features)} + }) + + rows = pd.DataFrame(rows) + return rows + + +if __name__ == "__main__": + chronos2 = Chronos2() + X = np.random.rand(100, 256, 10) + y = chronos2.predict(X) + print(y.shape) + print(y) \ No newline at end of file diff --git a/orion/primitives/cisco.py b/orion/primitives/cisco.py new file mode 100644 index 00000000..b11ca9ee --- /dev/null +++ b/orion/primitives/cisco.py @@ -0,0 +1,111 @@ +""" +This primitive an implementation of Cisco's Time Series Foundation Model for timeseries forecasting. + +The model implementation can be found at +https://arxiv.org/pdf/2511.19841 + +We use code from https://github.com/splunk/cisco-time-series-model +in this primitive, which can be found in the relative import path. +""" + +import torch +import numpy as np + +try: + from ..pipelines.pretrained.cisco.cisco_modeling import CiscoTsmMR, TimesFmHparams, TimesFmCheckpoint +except ImportError as ie: + ie.msg += ( + '\n\nIt seems like `cisco` cannot be imported.\n' + 'It is likely that relative import is failing. Please flag this issue. \n' + ) + raise + + +class Cisco: + """Cisco model for timeseries forecasting. + + Args: + window_size (int): + Window size of each sample. Default to 256. + step (int): + Stride length between samples. Default to 1. + pred_len (int): + Prediction horizon length. Default to 1. + repo_id (str): + Directory of the model checkpoint. Default to "cisco-ai/cisco-time-series-model-1.0-preview" + batch_size(int): + Size of one batch. Default to 32. + freq (int): + Frequency. TimesFM expects a categorical indicator valued in {0, 1, 2}. + Default to 0. + target (int): + Index of target column in multivariate case. Default to 0. + start_time (datetime): + Start time of the timeseries. Default to Jan 1, 2020 00:00:00. + time_interval (int): + Time interval between two samples in seconds. Default to 600. + """ + + def __init__( + self, + window_size=30720, # note that cisco expects a large window size because it uses long term context + pred_len=1, + repo_id="cisco-ai/cisco-time-series-model-1.0-preview", + batch_size=32, + target=0, + return_quantile=None, + ): + self.window_size = int(window_size) + self.pred_len = int(pred_len) + self.batch_size = int(batch_size) + self.target = int(target) + self.return_quantile = return_quantile + + # Match the model-card example + backend = "gpu" if torch.cuda.is_available() else "cpu" + hparams = TimesFmHparams( + num_layers=50, + use_positional_embedding=False, + backend=backend, + ) + ckpt = TimesFmCheckpoint(huggingface_repo_id=repo_id) + + self.model = CiscoTsmMR( + hparams=hparams, + checkpoint=ckpt, + use_resolution_embeddings=True, + use_special_token=True, + ) + + def predict(self, X): + """Forecast. + + Args: + X (ndarray): shape (n_windows, window_size, n_features) + Returns: + ndarray: shape (n_windows, pred_len) + """ + + n_windows = X.shape[0] + + outs = [] + for i in range(0, n_windows, self.batch_size): + x_batch = X[i:i + self.batch_size, :self.window_size, self.target].astype(np.float32) + + series_list = [x_batch[j] for j in range(x_batch.shape[0])] # x_batch.shape[0] could be lower than self.batch_size + forecast_list = self.model.forecast(series_list, horizon_len=self.pred_len) + preds = np.stack([f["mean"] for f in forecast_list], axis=0) + + outs.append(preds) + + return np.concatenate(outs, axis=0) + + + + +if __name__ == "__main__": + cisco_predictor = CiscoPredictor(window_size=256, pred_len=16, batch_size=32, target=0, return_quantile=None) + X = np.random.rand(100, 256, 10).astype(np.float32) + y = cisco_predictor.predict(X) + print(y.shape) # should be (100, 16) + print(y[:2]) \ No newline at end of file diff --git a/orion/primitives/jsons/orion.primitives.chronos2.Chronos2.json b/orion/primitives/jsons/orion.primitives.chronos2.Chronos2.json new file mode 100644 index 00000000..9630e596 --- /dev/null +++ b/orion/primitives/jsons/orion.primitives.chronos2.Chronos2.json @@ -0,0 +1,62 @@ +{ + "name": "orion.primitives.chronos2.Chronos2", + "contributors": [ + "Allen Baranov " + ], + "documentation": "https://huggingface.co/amazon/chronos-2", + "description": "Amazon Chronos2 model for timeseries forecasting", + "classifiers": { + "type": "estimator", + "subtype": "regressor" + }, + "modalities": [], + "primitive": "orion.primitives.chronos2.Chronos2", + "produce": { + "method": "predict", + "args": [ + { + "name": "X", + "type": "ndarray" + }, + { + "name": "force", + "type": "bool", + "default": false + } + ], + "output": [ + { + "name": "y_hat", + "type": "ndarray" + } + ] + }, + "hyperparameters": { + "fixed": { + "window_size": { + "type": "int", + "default": 256 + }, + "pred_len": { + "type": "int", + "default": 1 + }, + "repo_id": { + "type": "str", + "default": "amazon/chronos-2" + }, + "batch_size": { + "type": "int", + "default": 32 + }, + "target": { + "type": "int", + "default": 0 + }, + "time_interval": { + "type": "int", + "default": 600 + } + } + } +} diff --git a/orion/primitives/jsons/orion.primitives.cisco.Cisco.json b/orion/primitives/jsons/orion.primitives.cisco.Cisco.json new file mode 100644 index 00000000..826a6bd8 --- /dev/null +++ b/orion/primitives/jsons/orion.primitives.cisco.Cisco.json @@ -0,0 +1,57 @@ +{ + "name": "orion.primitives.cisco.Cisco", + "contributors": [ + "Allen Baranov " + ], + "documentation": "https://arxiv.org/pdf/2511.19841", + "description": "Cisco Time Series Foundation Model for timeseries forecasting", + "classifiers": { + "type": "estimator", + "subtype": "regressor" + }, + "modalities": [], + "primitive": "orion.primitives.cisco.Cisco", + "produce": { + "method": "predict", + "args": [ + { + "name": "X", + "type": "ndarray" + } + ], + "output": [ + { + "name": "y_hat", + "type": "ndarray" + } + ] + }, + "hyperparameters": { + "fixed": { + "window_size": { + "type": "int", + "default": 30720 + }, + "pred_len": { + "type": "int", + "default": 1 + }, + "repo_id": { + "type": "str", + "default": "cisco-ai/cisco-time-series-model-1.0-preview" + }, + "batch_size": { + "type": "int", + "default": 32 + }, + "target": { + "type": "int", + "default": 0 + }, + "return_quantile": { + "type": "float", + "default": null + } + } + } +} From d8a70ee173ccdbd58a4416e8072c7b1b152bc834 Mon Sep 17 00:00:00 2001 From: AllenBaranov Date: Sat, 7 Feb 2026 16:36:43 -0500 Subject: [PATCH 02/10] Added dataset-specific JSON files for Cisco and Chronos2 --- .../pretrained/chronos2/chronos2_artificialwithanomaly.json | 6 ++++++ orion/pipelines/pretrained/chronos2/chronos2_msl.json | 6 ++++++ .../pretrained/chronos2/chronos2_realadexchange.json | 6 ++++++ .../pretrained/chronos2/chronos2_realawscloudwatch.json | 6 ++++++ .../pipelines/pretrained/chronos2/chronos2_realtraffic.json | 6 ++++++ .../pipelines/pretrained/chronos2/chronos2_realtweets.json | 6 ++++++ orion/pipelines/pretrained/chronos2/chronos2_smap.json | 6 ++++++ orion/pipelines/pretrained/chronos2/chronos2_ucr.json | 6 ++++++ orion/pipelines/pretrained/chronos2/chronos2_yahooa1.json | 6 ++++++ orion/pipelines/pretrained/chronos2/chronos2_yahooa2.json | 6 ++++++ orion/pipelines/pretrained/chronos2/chronos2_yahooa3.json | 6 ++++++ orion/pipelines/pretrained/chronos2/chronos2_yahooa4.json | 6 ++++++ .../pretrained/cisco/cisco_artificialwithanomaly.json | 6 ++++++ orion/pipelines/pretrained/cisco/cisco_msl.json | 6 ++++++ orion/pipelines/pretrained/cisco/cisco_realadexchange.json | 6 ++++++ .../pipelines/pretrained/cisco/cisco_realawscloudwatch.json | 6 ++++++ orion/pipelines/pretrained/cisco/cisco_realtraffic.json | 6 ++++++ orion/pipelines/pretrained/cisco/cisco_realtweets.json | 6 ++++++ orion/pipelines/pretrained/cisco/cisco_smap.json | 6 ++++++ orion/pipelines/pretrained/cisco/cisco_ucr.json | 6 ++++++ orion/pipelines/pretrained/cisco/cisco_yahooa1.json | 6 ++++++ orion/pipelines/pretrained/cisco/cisco_yahooa2.json | 6 ++++++ orion/pipelines/pretrained/cisco/cisco_yahooa3.json | 6 ++++++ orion/pipelines/pretrained/cisco/cisco_yahooa4.json | 6 ++++++ 24 files changed, 144 insertions(+) create mode 100644 orion/pipelines/pretrained/chronos2/chronos2_artificialwithanomaly.json create mode 100644 orion/pipelines/pretrained/chronos2/chronos2_msl.json create mode 100644 orion/pipelines/pretrained/chronos2/chronos2_realadexchange.json create mode 100644 orion/pipelines/pretrained/chronos2/chronos2_realawscloudwatch.json create mode 100644 orion/pipelines/pretrained/chronos2/chronos2_realtraffic.json create mode 100644 orion/pipelines/pretrained/chronos2/chronos2_realtweets.json create mode 100644 orion/pipelines/pretrained/chronos2/chronos2_smap.json create mode 100644 orion/pipelines/pretrained/chronos2/chronos2_ucr.json create mode 100644 orion/pipelines/pretrained/chronos2/chronos2_yahooa1.json create mode 100644 orion/pipelines/pretrained/chronos2/chronos2_yahooa2.json create mode 100644 orion/pipelines/pretrained/chronos2/chronos2_yahooa3.json create mode 100644 orion/pipelines/pretrained/chronos2/chronos2_yahooa4.json create mode 100644 orion/pipelines/pretrained/cisco/cisco_artificialwithanomaly.json create mode 100644 orion/pipelines/pretrained/cisco/cisco_msl.json create mode 100644 orion/pipelines/pretrained/cisco/cisco_realadexchange.json create mode 100644 orion/pipelines/pretrained/cisco/cisco_realawscloudwatch.json create mode 100644 orion/pipelines/pretrained/cisco/cisco_realtraffic.json create mode 100644 orion/pipelines/pretrained/cisco/cisco_realtweets.json create mode 100644 orion/pipelines/pretrained/cisco/cisco_smap.json create mode 100644 orion/pipelines/pretrained/cisco/cisco_ucr.json create mode 100644 orion/pipelines/pretrained/cisco/cisco_yahooa1.json create mode 100644 orion/pipelines/pretrained/cisco/cisco_yahooa2.json create mode 100644 orion/pipelines/pretrained/cisco/cisco_yahooa3.json create mode 100644 orion/pipelines/pretrained/cisco/cisco_yahooa4.json diff --git a/orion/pipelines/pretrained/chronos2/chronos2_artificialwithanomaly.json b/orion/pipelines/pretrained/chronos2/chronos2_artificialwithanomaly.json new file mode 100644 index 00000000..eebcc81d --- /dev/null +++ b/orion/pipelines/pretrained/chronos2/chronos2_artificialwithanomaly.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 600 + } +} diff --git a/orion/pipelines/pretrained/chronos2/chronos2_msl.json b/orion/pipelines/pretrained/chronos2/chronos2_msl.json new file mode 100644 index 00000000..e4fe0c11 --- /dev/null +++ b/orion/pipelines/pretrained/chronos2/chronos2_msl.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 21600 + } +} diff --git a/orion/pipelines/pretrained/chronos2/chronos2_realadexchange.json b/orion/pipelines/pretrained/chronos2/chronos2_realadexchange.json new file mode 100644 index 00000000..6b8aac0a --- /dev/null +++ b/orion/pipelines/pretrained/chronos2/chronos2_realadexchange.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 3600 + } +} diff --git a/orion/pipelines/pretrained/chronos2/chronos2_realawscloudwatch.json b/orion/pipelines/pretrained/chronos2/chronos2_realawscloudwatch.json new file mode 100644 index 00000000..eebcc81d --- /dev/null +++ b/orion/pipelines/pretrained/chronos2/chronos2_realawscloudwatch.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 600 + } +} diff --git a/orion/pipelines/pretrained/chronos2/chronos2_realtraffic.json b/orion/pipelines/pretrained/chronos2/chronos2_realtraffic.json new file mode 100644 index 00000000..eebcc81d --- /dev/null +++ b/orion/pipelines/pretrained/chronos2/chronos2_realtraffic.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 600 + } +} diff --git a/orion/pipelines/pretrained/chronos2/chronos2_realtweets.json b/orion/pipelines/pretrained/chronos2/chronos2_realtweets.json new file mode 100644 index 00000000..eebcc81d --- /dev/null +++ b/orion/pipelines/pretrained/chronos2/chronos2_realtweets.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 600 + } +} diff --git a/orion/pipelines/pretrained/chronos2/chronos2_smap.json b/orion/pipelines/pretrained/chronos2/chronos2_smap.json new file mode 100644 index 00000000..e4fe0c11 --- /dev/null +++ b/orion/pipelines/pretrained/chronos2/chronos2_smap.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 21600 + } +} diff --git a/orion/pipelines/pretrained/chronos2/chronos2_ucr.json b/orion/pipelines/pretrained/chronos2/chronos2_ucr.json new file mode 100644 index 00000000..f3ca6b04 --- /dev/null +++ b/orion/pipelines/pretrained/chronos2/chronos2_ucr.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 300 + } +} diff --git a/orion/pipelines/pretrained/chronos2/chronos2_yahooa1.json b/orion/pipelines/pretrained/chronos2/chronos2_yahooa1.json new file mode 100644 index 00000000..0fdb0776 --- /dev/null +++ b/orion/pipelines/pretrained/chronos2/chronos2_yahooa1.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 1 + } +} diff --git a/orion/pipelines/pretrained/chronos2/chronos2_yahooa2.json b/orion/pipelines/pretrained/chronos2/chronos2_yahooa2.json new file mode 100644 index 00000000..0fdb0776 --- /dev/null +++ b/orion/pipelines/pretrained/chronos2/chronos2_yahooa2.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 1 + } +} diff --git a/orion/pipelines/pretrained/chronos2/chronos2_yahooa3.json b/orion/pipelines/pretrained/chronos2/chronos2_yahooa3.json new file mode 100644 index 00000000..0fdb0776 --- /dev/null +++ b/orion/pipelines/pretrained/chronos2/chronos2_yahooa3.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 1 + } +} diff --git a/orion/pipelines/pretrained/chronos2/chronos2_yahooa4.json b/orion/pipelines/pretrained/chronos2/chronos2_yahooa4.json new file mode 100644 index 00000000..0fdb0776 --- /dev/null +++ b/orion/pipelines/pretrained/chronos2/chronos2_yahooa4.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 1 + } +} diff --git a/orion/pipelines/pretrained/cisco/cisco_artificialwithanomaly.json b/orion/pipelines/pretrained/cisco/cisco_artificialwithanomaly.json new file mode 100644 index 00000000..eebcc81d --- /dev/null +++ b/orion/pipelines/pretrained/cisco/cisco_artificialwithanomaly.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 600 + } +} diff --git a/orion/pipelines/pretrained/cisco/cisco_msl.json b/orion/pipelines/pretrained/cisco/cisco_msl.json new file mode 100644 index 00000000..e4fe0c11 --- /dev/null +++ b/orion/pipelines/pretrained/cisco/cisco_msl.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 21600 + } +} diff --git a/orion/pipelines/pretrained/cisco/cisco_realadexchange.json b/orion/pipelines/pretrained/cisco/cisco_realadexchange.json new file mode 100644 index 00000000..6b8aac0a --- /dev/null +++ b/orion/pipelines/pretrained/cisco/cisco_realadexchange.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 3600 + } +} diff --git a/orion/pipelines/pretrained/cisco/cisco_realawscloudwatch.json b/orion/pipelines/pretrained/cisco/cisco_realawscloudwatch.json new file mode 100644 index 00000000..eebcc81d --- /dev/null +++ b/orion/pipelines/pretrained/cisco/cisco_realawscloudwatch.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 600 + } +} diff --git a/orion/pipelines/pretrained/cisco/cisco_realtraffic.json b/orion/pipelines/pretrained/cisco/cisco_realtraffic.json new file mode 100644 index 00000000..eebcc81d --- /dev/null +++ b/orion/pipelines/pretrained/cisco/cisco_realtraffic.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 600 + } +} diff --git a/orion/pipelines/pretrained/cisco/cisco_realtweets.json b/orion/pipelines/pretrained/cisco/cisco_realtweets.json new file mode 100644 index 00000000..eebcc81d --- /dev/null +++ b/orion/pipelines/pretrained/cisco/cisco_realtweets.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 600 + } +} diff --git a/orion/pipelines/pretrained/cisco/cisco_smap.json b/orion/pipelines/pretrained/cisco/cisco_smap.json new file mode 100644 index 00000000..e4fe0c11 --- /dev/null +++ b/orion/pipelines/pretrained/cisco/cisco_smap.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 21600 + } +} diff --git a/orion/pipelines/pretrained/cisco/cisco_ucr.json b/orion/pipelines/pretrained/cisco/cisco_ucr.json new file mode 100644 index 00000000..f3ca6b04 --- /dev/null +++ b/orion/pipelines/pretrained/cisco/cisco_ucr.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 300 + } +} diff --git a/orion/pipelines/pretrained/cisco/cisco_yahooa1.json b/orion/pipelines/pretrained/cisco/cisco_yahooa1.json new file mode 100644 index 00000000..0fdb0776 --- /dev/null +++ b/orion/pipelines/pretrained/cisco/cisco_yahooa1.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 1 + } +} diff --git a/orion/pipelines/pretrained/cisco/cisco_yahooa2.json b/orion/pipelines/pretrained/cisco/cisco_yahooa2.json new file mode 100644 index 00000000..0fdb0776 --- /dev/null +++ b/orion/pipelines/pretrained/cisco/cisco_yahooa2.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 1 + } +} diff --git a/orion/pipelines/pretrained/cisco/cisco_yahooa3.json b/orion/pipelines/pretrained/cisco/cisco_yahooa3.json new file mode 100644 index 00000000..0fdb0776 --- /dev/null +++ b/orion/pipelines/pretrained/cisco/cisco_yahooa3.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 1 + } +} diff --git a/orion/pipelines/pretrained/cisco/cisco_yahooa4.json b/orion/pipelines/pretrained/cisco/cisco_yahooa4.json new file mode 100644 index 00000000..0fdb0776 --- /dev/null +++ b/orion/pipelines/pretrained/cisco/cisco_yahooa4.json @@ -0,0 +1,6 @@ +{ + "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { + "time_column": "timestamp", + "interval": 1 + } +} From aa239f4c38e1e74605e1be91f1c2f0cad5440828 Mon Sep 17 00:00:00 2001 From: AllenBaranov Date: Tue, 10 Feb 2026 02:28:54 -0500 Subject: [PATCH 03/10] Add chronos2 tutorial notebook with hyperparameters --- tutorials/pipelines/chronos2.ipynb | 721 +++++++++++++++++++++++++++++ 1 file changed, 721 insertions(+) create mode 100644 tutorials/pipelines/chronos2.ipynb diff --git a/tutorials/pipelines/chronos2.ipynb b/tutorials/pipelines/chronos2.ipynb new file mode 100644 index 00000000..0d21f507 --- /dev/null +++ b/tutorials/pipelines/chronos2.ipynb @@ -0,0 +1,721 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from orion.data import load_signal, load_anomalies" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# 1. Data" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
timestampvalue
01222819200-0.366359
11222840800-0.394108
212228624000.403625
31222884000-0.362759
41222905600-0.370746
\n", + "
" + ], + "text/plain": [ + " timestamp value\n", + "0 1222819200 -0.366359\n", + "1 1222840800 -0.394108\n", + "2 1222862400 0.403625\n", + "3 1222884000 -0.362759\n", + "4 1222905600 -0.370746" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "signal_name = 'S-1'\n", + "\n", + "data = load_signal(signal_name)\n", + "\n", + "anomalies = load_anomalies(signal_name)\n", + "\n", + "data.head()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# 2. Pipeline" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/baranov/miniconda/envs/orion310/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n", + "2026-02-10 01:37:01.358594: I tensorflow/tsl/cuda/cudart_stub.cc:28] Could not find cuda drivers on your machine, GPU will not be used.\n", + "2026-02-10 01:37:01.392927: E tensorflow/compiler/xla/stream_executor/cuda/cuda_dnn.cc:9342] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n", + "2026-02-10 01:37:01.392954: E tensorflow/compiler/xla/stream_executor/cuda/cuda_fft.cc:609] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n", + "2026-02-10 01:37:01.392990: E tensorflow/compiler/xla/stream_executor/cuda/cuda_blas.cc:1518] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n", + "2026-02-10 01:37:01.400419: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n", + "To enable the following instructions: AVX2 AVX512F FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n", + "2026-02-10 01:37:02.090769: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n" + ] + } + ], + "source": [ + "from mlblocks import MLPipeline\n", + "\n", + "pipeline_name = 'chronos2'\n", + "\n", + "pipeline = MLPipeline(pipeline_name)\n", + " " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Hyperparameters\n", + "\n", + "The Chronos2 pipeline can be customized with the following hyperparameters:\n", + "\n", + "| Primitive | Parameter | Default | Description |\n", + "|-----------|-----------|---------|-------------|\n", + "| time_segments_aggregate | `interval` | 600 | Aggregation interval in seconds |\n", + "| time_segments_aggregate | `method` | \"mean\" | Aggregation method (mean, median, sum) |\n", + "| rolling_window_sequences | `window_size` | 256 | Context window size |\n", + "| **Chronos2** | `window_size` | 256 | Must match rolling_window_sequences |\n", + "| **Chronos2** | `pred_len` | 1 | Prediction horizon length |\n", + "| **Chronos2** | `repo_id` | \"amazon/chronos-2\" | HuggingFace model repository |\n", + "| **Chronos2** | `batch_size` | 32 | Batch size for inference |\n", + "| **Chronos2** | `target` | 0 | Target column index (multivariate) |\n", + "| **Chronos2** | `time_interval` | 600 | Time interval between samples (seconds) |\n", + "| find_anomalies | `window_size_portion` | 0.33 | Portion of data for window |\n", + "| find_anomalies | `fixed_threshold` | True | Use fixed vs dynamic threshold |" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "hyperparameters = {\n", + " \"mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1\": {\n", + " \"time_column\": \"timestamp\",\n", + " \"interval\": 600, \n", + " \"method\": \"mean\" \n", + " },\n", + " \n", + " \"mlstars.custom.timeseries_preprocessing.rolling_window_sequences#1\": {\n", + " \"target_column\": 0,\n", + " \"window_size\": 256 \n", + " },\n", + " \n", + " \"orion.primitives.chronos2.Chronos2#1\": {\n", + " \"window_size\": 256, \n", + " \"pred_len\": 1, \n", + " \"repo_id\": \"amazon/chronos-2\", \n", + " \"batch_size\": 32, \n", + " \"target\": 0, \n", + " \"time_interval\": 600 \n", + " },\n", + " \n", + " \"orion.primitives.timeseries_anomalies.find_anomalies#1\": {\n", + " \"window_size_portion\": 0.33,\n", + " \"window_step_size_portion\": 0.1,\n", + " \"fixed_threshold\": True\n", + " }\n", + "}\n", + "\n", + "pipeline.set_hyperparameters(hyperparameters)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## step by step execution\n", + "\n", + "MLPipelines are compose of a squence of primitives, these primitives apply tranformation and calculation operations to the data and updates the variables within the pipeline. To view the primitives used by the pipeline, we access its `primtivies` attribute. \n", + "\n", + "The `UniTS` contains 6 primitives. we will observe how the `context` (which are the variables held within the pipeline) are updated after the execution of each primitive." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['mlstars.custom.timeseries_preprocessing.time_segments_aggregate',\n", + " 'sklearn.impute.SimpleImputer',\n", + " 'mlstars.custom.timeseries_preprocessing.rolling_window_sequences',\n", + " 'orion.primitives.chronos2.Chronos2',\n", + " 'orion.primitives.timeseries_errors.regression_errors',\n", + " 'orion.primitives.timeseries_anomalies.find_anomalies']" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pipeline.primitives" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### time segments aggregate\n", + "this primitive creates an equi-spaced time series by aggregating values over fixed specified interval.\n", + "\n", + "* **input**: `X` which is an n-dimensional sequence of values.\n", + "* **output**:\n", + " - `X` sequence of aggregated values, one column for each aggregation method.\n", + " - `index` sequence of index values (first index of each aggregated segment)." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "dict_keys(['X', 'index'])" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "context = pipeline.fit(data, output_=0)\n", + "context.keys()" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "entry at 1222819200 has value [-0.36635895]\n", + "entry at 1222819800 has value [nan]\n", + "entry at 1222820400 has value [nan]\n", + "entry at 1222821000 has value [nan]\n", + "entry at 1222821600 has value [nan]\n" + ] + } + ], + "source": [ + "for i, x in list(zip(context['index'], context['X']))[:5]:\n", + " print(\"entry at {} has value {}\".format(i, x))" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### SimpleImputer\n", + "this primitive is an imputation transformer for filling missing values.\n", + "* **input**: `X` which is an n-dimensional sequence of values.\n", + "* **output**: `X` which is a transformed version of X." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "dict_keys(['index', 'X'])" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "step = 1\n", + "\n", + "context = pipeline.fit(**context, output_=step, start_=step)\n", + "context.keys()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### rolling window sequence\n", + "this primitive generates many sub-sequences of the original sequence. it uses a rolling window approach to create the sub-sequences out of time series data.\n", + "\n", + "* **input**: \n", + " - `X` n-dimensional sequence to iterate over.\n", + " - `index` array containing the index values of X.\n", + "* **output**:\n", + " - `X` input sequences.\n", + " - `y` target sequences.\n", + " - `index` first index value of each input sequence.\n", + " - `target_index` first index value of each target sequence." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "dict_keys(['index', 'X', 'y', 'target_index'])" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "step = 2\n", + "\n", + "context = pipeline.fit(**context, output_=step, start_=step)\n", + "context.keys()" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "X shape = (365073, 256, 1)\n", + "y shape = (365073, 1)\n", + "index shape = (365073,)\n", + "target index shape = (365073,)\n" + ] + } + ], + "source": [ + "# after slicing X into multiple sub-sequences\n", + "# we obtain a 3 dimensional matrix X where\n", + "# the shape indicates (# slices, window size, 1)\n", + "# and similarly y is (# slices, target size)\n", + "\n", + "print(\"X shape = {}\\ny shape = {}\\nindex shape = {}\\ntarget index shape = {}\".format(\n", + " context['X'].shape, context['y'].shape, context['index'].shape, context['target_index'].shape))" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Chronos-2\n", + "This is the forecasting step using Amazon Chronos-2 Time Series Foundation Model. You can read more about it in the [related paper](https://arxiv.org/abs/2510.15821). The [Huggingface Repo](https://huggingface.co/amazon/chronos-2) has additional helpful information about the use of the model. This is a multivariate model that does single target predictions.\n", + "\n", + "* **input**: \n", + " - `X` n-dimensional array containing the input sequences for the model.\n", + "* **output**: \n", + " - `y_hat` predicted values for target column" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "dict_keys(['index', 'target_index', 'X', 'y', 'y_hat'])" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "step = 3\n", + "\n", + "context = pipeline.fit(**context, output_=step, start_=step)\n", + "context.keys()" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(365073, 1)" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "context['y_hat'].shape" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### regression errors\n", + "\n", + "this primitive computes an array of errors comparing predictions and expected output.\n", + "\n", + "* **input**: \n", + " - `y` ground truth.\n", + " - `y_hat` forecasted values.\n", + "* **output**: `errors` array of errors." + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "dict_keys(['index', 'target_index', 'y_hat', 'X', 'y', 'errors'])" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "step = 4\n", + "\n", + "context = pipeline.fit(**context, output_=step, start_=step)\n", + "context.keys()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### find anomalies\n", + "\n", + "this primitive finds anomalies from sequence of errors\n", + "\n", + "* **input**: \n", + " - `errors` array of errors\n", + " - `target_index` indices\n", + "* **output**: `anomalies`." + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "dict_keys(['index', 'target_index', 'y_hat', 'errors', 'X', 'y', 'anomalies'])" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "step = 5\n", + "\n", + "context = pipeline.fit(**context, output_=step, start_=step)\n", + "context.keys()" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[1.2229836e+09, 1.2231516e+09, 5.3378149e-01]])" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "context['anomalies']" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 3. Evaluate performance\n", + "\n", + "In this next step we will load some already known anomalous intervals and evaluate how\n", + "good our anomaly detection was by comparing those with our detected intervals.\n", + "\n", + "For this, we will first load the known anomalies for the signal that we are using:" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
startend
013981680001407823200
\n", + "
" + ], + "text/plain": [ + " start end\n", + "0 1398168000 1407823200" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from orion.data import load_anomalies\n", + "\n", + "ground_truth = load_anomalies('S-1')\n", + "ground_truth" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[(1222983600.0, 1223151600.0)]" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "anomalies = []\n", + "for ano in context['anomalies']:\n", + " anomalies.append((ano[0], ano[1]))\n", + "anomalies" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(None, 1, 1, 0)" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from orion.evaluation import contextual_confusion_matrix, contextual_f1_score\n", + "\n", + "start, end = context['index'][0], context['index'][-1]\n", + "\n", + "contextual_confusion_matrix(ground_truth, anomalies, start = start, end = end, weighted=False)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Invalid value encountered for precision 0.0/ recall 0.0.\n", + "Traceback (most recent call last):\n", + " File \"/home/baranov/projects/Orion/orion/evaluation/common.py\", line 70, in _f1_score\n", + " return 2 * (precision * recall) / (precision + recall)\n", + "ZeroDivisionError: float division by zero\n" + ] + }, + { + "data": { + "text/plain": [ + "nan" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "contextual_f1_score(ground_truth, anomalies, start = start, end = end, weighted=False)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "orion310", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.18" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} From 277127221c0b56c829694f4d86e602dcd3529815 Mon Sep 17 00:00:00 2001 From: AllenBaranov Date: Tue, 10 Feb 2026 02:33:43 -0500 Subject: [PATCH 04/10] Remove cisco files from chronos-2 branch --- orion/pipelines/pretrained/cisco/cisco.json | 39 -- .../cisco/cisco_artificialwithanomaly.json | 6 - .../cisco/cisco_modeling/__init__.py | 28 -- .../cisco/cisco_modeling/cisco_tsm_mr.py | 393 ----------------- .../patched_decoder_multi_resolution.py | 416 ------------------ .../pipelines/pretrained/cisco/cisco_msl.json | 6 - .../cisco/cisco_realadexchange.json | 6 - .../cisco/cisco_realawscloudwatch.json | 6 - .../pretrained/cisco/cisco_realtraffic.json | 6 - .../pretrained/cisco/cisco_realtweets.json | 6 - .../pretrained/cisco/cisco_smap.json | 6 - .../pipelines/pretrained/cisco/cisco_ucr.json | 6 - .../pretrained/cisco/cisco_yahooa1.json | 6 - .../pretrained/cisco/cisco_yahooa2.json | 6 - .../pretrained/cisco/cisco_yahooa3.json | 6 - .../pretrained/cisco/cisco_yahooa4.json | 6 - orion/primitives/cisco.py | 111 ----- .../jsons/orion.primitives.cisco.Cisco.json | 57 --- 18 files changed, 1116 deletions(-) delete mode 100644 orion/pipelines/pretrained/cisco/cisco.json delete mode 100644 orion/pipelines/pretrained/cisco/cisco_artificialwithanomaly.json delete mode 100644 orion/pipelines/pretrained/cisco/cisco_modeling/__init__.py delete mode 100644 orion/pipelines/pretrained/cisco/cisco_modeling/cisco_tsm_mr.py delete mode 100644 orion/pipelines/pretrained/cisco/cisco_modeling/patched_decoder_multi_resolution.py delete mode 100644 orion/pipelines/pretrained/cisco/cisco_msl.json delete mode 100644 orion/pipelines/pretrained/cisco/cisco_realadexchange.json delete mode 100644 orion/pipelines/pretrained/cisco/cisco_realawscloudwatch.json delete mode 100644 orion/pipelines/pretrained/cisco/cisco_realtraffic.json delete mode 100644 orion/pipelines/pretrained/cisco/cisco_realtweets.json delete mode 100644 orion/pipelines/pretrained/cisco/cisco_smap.json delete mode 100644 orion/pipelines/pretrained/cisco/cisco_ucr.json delete mode 100644 orion/pipelines/pretrained/cisco/cisco_yahooa1.json delete mode 100644 orion/pipelines/pretrained/cisco/cisco_yahooa2.json delete mode 100644 orion/pipelines/pretrained/cisco/cisco_yahooa3.json delete mode 100644 orion/pipelines/pretrained/cisco/cisco_yahooa4.json delete mode 100644 orion/primitives/cisco.py delete mode 100644 orion/primitives/jsons/orion.primitives.cisco.Cisco.json diff --git a/orion/pipelines/pretrained/cisco/cisco.json b/orion/pipelines/pretrained/cisco/cisco.json deleted file mode 100644 index 2c359ba5..00000000 --- a/orion/pipelines/pretrained/cisco/cisco.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "primitives": [ - "mlstars.custom.timeseries_preprocessing.time_segments_aggregate", - "sklearn.impute.SimpleImputer", - "mlstars.custom.timeseries_preprocessing.rolling_window_sequences", - "orion.primitives.cisco.Cisco", - "orion.primitives.timeseries_errors.regression_errors", - "orion.primitives.timeseries_anomalies.find_anomalies" - ], - "init_params": { - "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { - "time_column": "timestamp", - "interval": 600, - "method": "mean" - }, - "mlstars.custom.timeseries_preprocessing.rolling_window_sequences#1": { - "target_column": 0, - "window_size": 30720, - "step_size": 16 - }, - "orion.primitives.timeseries_anomalies.find_anomalies#1": { - "window_size_portion": 0.33, - "window_step_size_portion": 0.1, - "fixed_threshold": true, - "window_size": 250, - "window_step_size": 40 - } - }, - "input_names": { - "orion.primitives.timeseries_anomalies.find_anomalies#1": { - "index": "target_index" - } - }, - "output_names": { - "orion.primitives.timeseries_anomalies.find_anomalies#1": { - "y": "anomalies" - } - } -} \ No newline at end of file diff --git a/orion/pipelines/pretrained/cisco/cisco_artificialwithanomaly.json b/orion/pipelines/pretrained/cisco/cisco_artificialwithanomaly.json deleted file mode 100644 index eebcc81d..00000000 --- a/orion/pipelines/pretrained/cisco/cisco_artificialwithanomaly.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { - "time_column": "timestamp", - "interval": 600 - } -} diff --git a/orion/pipelines/pretrained/cisco/cisco_modeling/__init__.py b/orion/pipelines/pretrained/cisco/cisco_modeling/__init__.py deleted file mode 100644 index 95dc496d..00000000 --- a/orion/pipelines/pretrained/cisco/cisco_modeling/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -# -# Copyright 2025 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Multi-resolution Cisco Time Series Model. -""" - -from .patched_decoder_multi_resolution import ( - CiscoTsmMRConfig, - PatchedTSMultiResolutionDecoder, -) -from .cisco_tsm_mr import CiscoTsmMR, TimesFmHparams, TimesFmCheckpoint - -__all__ = [ - "CiscoTsmMRConfig", - "PatchedTSMultiResolutionDecoder", - "CiscoTsmMR", -] diff --git a/orion/pipelines/pretrained/cisco/cisco_modeling/cisco_tsm_mr.py b/orion/pipelines/pretrained/cisco/cisco_modeling/cisco_tsm_mr.py deleted file mode 100644 index ed077c0c..00000000 --- a/orion/pipelines/pretrained/cisco/cisco_modeling/cisco_tsm_mr.py +++ /dev/null @@ -1,393 +0,0 @@ -# -# Copyright 2025 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import logging -from os import path -from typing import Any, List, Sequence, Union, Tuple - -import numpy as np - -import torch - -from huggingface_hub import snapshot_download - -from timesfm import TimesFmHparams, TimesFmCheckpoint -from timesfm.timesfm_torch import TimesFmTorch -from timesfm.timesfm_base import strip_leading_nans, linear_interpolation - -from .patched_decoder_multi_resolution import CiscoTsmMRConfig, PatchedTSMultiResolutionDecoder - - -class CiscoTsmMR(TimesFmTorch): - """Cisco Time Series Model Multi-resolution Forecast API.""" - - def __init__( - self, - hparams: TimesFmHparams, - checkpoint: TimesFmCheckpoint, - *, - use_resolution_embeddings: bool = True, - use_special_token: bool = True, - ) -> None: - self.use_resolution_embeddings = use_resolution_embeddings - self.use_special_token = use_special_token - super().__init__(hparams, checkpoint) - - def __post_init__(self): - # Building MR config - self._model_config = CiscoTsmMRConfig( - num_layers=self.num_layers, - num_heads=self.num_heads, - hidden_size=self.model_dims, - intermediate_size=self.model_dims, - patch_len=self.input_patch_len, - horizon_len=self.output_patch_len, - head_dim=self.model_dims // self.num_heads, - quantiles=self.quantiles, - use_positional_embedding=self.use_pos_emb, - use_resolution_embeddings=self.use_resolution_embeddings, - use_special_token=self.use_special_token, - ) - self._model = None - self.num_cores = 1 - self.global_batch_size = self.per_core_batch_size - self._device = torch.device("cuda:0" if ( - torch.cuda.is_available() and self.backend == "gpu") else "cpu") - self._median_index = -1 - - - def load_from_checkpoint( - self, - checkpoint: TimesFmCheckpoint, - ) -> None: - """Loads a Multiresolution Model checkpoint from path and prepares the MR decoder for inference. - Args: - checkpoint: TimesFmCheckpoint object containing checkpoint info (local or HF repo). - """ - - checkpoint_path = checkpoint.path - repo_id = checkpoint.huggingface_repo_id - if checkpoint_path is None: - checkpoint_path = path.join( - snapshot_download(repo_id, local_dir=checkpoint.local_dir), - "torch_model.pt") - self._model = PatchedTSMultiResolutionDecoder(self._model_config) - loaded_checkpoint = torch.load(checkpoint_path, weights_only=True, map_location=self._device) - logging.info("Loading checkpoint from %s", checkpoint_path) - incompatible = self._model.load_state_dict(loaded_checkpoint, strict=True) - - if getattr(incompatible, "missing_keys", None) or getattr(incompatible, "unexpected_keys", None): - logging.info( - "MR decoder state load differences. missing=%s unexpected=%s", - getattr(incompatible, "missing_keys", []), - getattr(incompatible, "unexpected_keys", []), - ) - - logging.info(f"Loaded model from checkpoint: {checkpoint_path}") - logging.info("Sending checkpoint to device %s", f"{self._device}") - - self._model.to(self._device) - self._model.eval() - - - def _pad_or_truncate(self, - ts: torch.Tensor, - target_len: int) -> Tuple[torch.Tensor, torch.Tensor]: - """Pad or truncate a time series to a target length, especially [LEFT-PADDING]. - Args: - ts: 1D or 2D tensor of shape [L] or [L, 1]. - target_len: desired target length after padding/truncation. - Returns: - padded_ts: tensor of shape [target_len]. - pad_mask: tensor of shape [target_len], with 1.0 for padded positions and 0.0 for actual data. - - """ - - if ts.ndim == 2 and ts.shape[-1] == 1: - ts = ts.squeeze(-1) - - L = ts.shape[0] - - if L == target_len: - return ts, torch.zeros_like(ts, dtype=torch.float32) - - if L > target_len: - return ts[-target_len:], torch.zeros(target_len, dtype=torch.float32) - - pad_len = target_len - L - padded = torch.cat([torch.zeros(pad_len, dtype=ts.dtype), ts], dim=0) - - pad_mask = torch.cat([ - torch.ones(pad_len, dtype=torch.float32), - torch.zeros(L, dtype=torch.float32) - ], dim=0) - - return padded, pad_mask - - - def normalize_with_pad(self, - context, - pad_mask: torch.Tensor | None = None, - clamp_range=(-1000, 1000)) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, float]: - """Normalize context with padding mask. - Args: - context: tensor of shape [B, T] or [B, T, 1]. - pad_mask: tensor of shape [B, T], with 1.0 for padded positions and 0.0 for actual data. - clamp_range: tuple of (min, max) to clamp normalized values. Default (-1000, 1000). - Returns: - ctx_normalized: normalized context tensor with same shape as input. - offset: mean used for normalization, shape [B, 1]. - scale: stddev used for normalization, shape [B, 1]. - eps: small epsilon value used for numerical stability. - """ - - eps = 1e-8 - - if context.ndim == 3: - context = context.squeeze(-1) - - if pad_mask is None: - pad_mask = torch.zeros_like(context) - - valid = (1.0 - pad_mask) # 1 for real, 0 for pad - # Prevent divide-by-zero - count = valid.sum(dim=1, keepdim=True).clamp_min(1.0) - - # Masked mean, variance and std - context_mean = (context * valid).sum(dim=1, keepdim=True) / count - - # Center for variance - context_var = (((context - context_mean) * valid)**2).sum(dim=1, keepdim=True) / count - context_std = context_var.sqrt() - - ctx_normalized = (context - context_mean) / (context_std + eps) - stats = (context_mean, context_std) - - ctx_normalized = ctx_normalized * valid - - ctx_normalized = torch.clamp(ctx_normalized, *clamp_range) - - offset, scale = stats - - return ctx_normalized, offset, scale, eps - - - def slice_fine_context(self, series: List[float], fine_len: int = 512) -> List[float]: - """Return the rightmost fine_len points (or entire series if shorter). - Args: - series: list or array of fine-resolution (fine-level) time series data. - fine_len: desired length of fine-level context to extract. - Returns: - List of floats representing the fine-level context of length <= fine_len. - """ - return series[-fine_len:] - - - def build_coarse_context(self, series: np.ndarray, max_coarse_ctx: int = 512, block: int = 60) -> List[float]: - """Construct coarse context by: - 1. Taking up to rightmost (max_coarse_ctx * block) raw fine samples. - 2. Partitioning into consecutive non-overlapping blocks of 'block' size from left to right (chronological order preserved). - 3. Computing the mean of each block. - - Args: - series: array of fine-resolution (fine-level) time series data. - max_coarse_ctx: maximum number of coarse points to return. - block: number of fine samples to aggregate into one coarse sample. - Returns: - List of floats representing coarse means with length <= max_coarse_ctx. - """ - needed_raw = max_coarse_ctx * block - raw_slice = series[-needed_raw:] - # Ensure we only form full blocks; drop partial leading block if length not multiple - remainder = len(raw_slice) % block - if remainder != 0: - raw_slice = raw_slice[remainder:] # align to block boundary at the right edge - coarse = [] - for i in range(0, len(raw_slice), block): - block_vals = raw_slice[i:i+block] - if len(block_vals) < block: - break - coarse.append(float(sum(block_vals) / block)) - return coarse[-max_coarse_ctx:] - - - def build_multi_resolution(self, series: np.ndarray, agg_factor: int = 60) -> Tuple[List[float], List[float]]: - """Builds multi-resolution contexts from a fine-resolution time series. - Args: - series: array of fine-resolution (fine-level) time series data. - agg_factor: aggregation factor to form coarse context from fine context. - Returns: - Tuple of: - - coarse_ctx: list of floats representing the coarse context. - - fine_ctx: list of floats representing the fine context. - """ - - coarse_ctx = self.build_coarse_context(series, max_coarse_ctx=512, block=agg_factor) - fine_ctx = self.slice_fine_context(series) - return coarse_ctx, fine_ctx - - - def _normalize_inputs(self, inputs): - """Normalizes input series into a batched series. - Args: - inputs: single series or list of series. - Returns: - List of series, each as a list of floats. - """ - - if isinstance(inputs, (np.ndarray, torch.Tensor)): - inputs = inputs.tolist() - - if not isinstance(inputs, (list, tuple)): - return [[inputs]] - - # Compute series depth. - def _depth(x): - if isinstance(x, (list, tuple, np.ndarray, torch.Tensor)): - return 1 + (max((_depth(y) for y in x), default=0)) - return 0 - - d = _depth(inputs) - if d > 2: - raise ValueError("Input series must be strictly list-of-lists or a list.") - - if d == 1: - return [list(inputs)] - - final_inps = [] - for s in inputs: - if isinstance(s, (np.ndarray, torch.Tensor)): - s = s.tolist() - elif not isinstance(s, (list, tuple, np.ndarray, torch.Tensor)): - raise ValueError("Each series must be list-like when providing a list of series.") - - final_inps.append(list(s)) - return final_inps - - - def forecast(self, - inputs: Sequence[Any], - horizon_len: Union[int, None] = None, - agg_factor: int = 60, - batch_size: int = 8) -> List[dict[str, Any]]: - """Forecasts from a single fine-resolution stream. - - Derives the coarse-resolution stream by aggregating the fine-resolution - context in blocks of `agg_factor` (e.g., 60 minutes -> hourly) and then - runs multi-resolution decoding. - - Args: - inputs: list-like of fine-resolution context series. - horizon_len: forecast horizon length; if None, uses the model's configured horizon length. - agg_factor: size of aggregation window to form the coarse context from the fine context. - batch_size: batch size for forecasting. - - Returns: - List of dictionaries containing mean and quantile forecasts for each series input. - """ - if self._model is None: - raise ValueError("Checkpoint is not properly loaded.") - - if horizon_len is None: - horizon_len = self.output_patch_len - - if horizon_len <= 0: - raise ValueError("horizon_len must be positive") - - if agg_factor <= 0: - raise ValueError("agg_factor must be positive") - - fine_contexts = [] - coarse_contexts = [] - fine_pads = [] - coarse_pads = [] - offsets_fine = [] - scales_fine = [] - global_eps = 1e-8 - - horizon_len = horizon_len or self.output_patch_len - - CONTEXT_LEN_FINE = 512 - CONTEXT_LEN_COARSE = 512 - - inputs = self._normalize_inputs(inputs) - - for seq in inputs: - series = np.array(seq) - if not np.isfinite(series).all(): - series = np.where(np.isfinite(series), series, np.nan) - series = strip_leading_nans(series) - series = linear_interpolation(series) - - coarse_ctx, fine_ctx = self.build_multi_resolution(series, agg_factor=agg_factor) - - # Raw tensors - ctx_coarse = torch.tensor(coarse_ctx, dtype=torch.float32) - ctx_fine = torch.tensor(fine_ctx, dtype=torch.float32) - - # Pad / truncate - ctx_coarse_pad, mask_coarse = self._pad_or_truncate(ctx_coarse, CONTEXT_LEN_COARSE) - ctx_fine_pad, mask_fine = self._pad_or_truncate(ctx_fine, CONTEXT_LEN_FINE) - - # Add batch dim - ctx_coarse_pad_b = ctx_coarse_pad.unsqueeze(0) - mask_coarse_b = mask_coarse.unsqueeze(0) - ctx_fine_pad_b = ctx_fine_pad.unsqueeze(0) - mask_fine_b = mask_fine.unsqueeze(0) - - # Normalize - norm_coarse, _, _, _ = self.normalize_with_pad(ctx_coarse_pad_b, pad_mask=mask_coarse_b) - norm_fine, offset_fine, scale_fine, _ = self.normalize_with_pad(ctx_fine_pad_b, pad_mask=mask_fine_b) - - # Store normalized contexts - coarse_contexts.append(norm_coarse.squeeze(0).numpy()) # [L_coarse] - coarse_pads.append(mask_coarse_b.squeeze(0).numpy()) - fine_contexts.append(norm_fine.squeeze(0).numpy()) # [L_fine] - fine_pads.append(mask_fine_b.squeeze(0).numpy()) - - # Store scalar stats only from fine contexts - offsets_fine.append(float(offset_fine.squeeze())) - scales_fine.append(float(scale_fine.squeeze())) - - # Arrays of shape [N] - offsets_fine = np.array(offsets_fine, dtype=np.float32) - scales_fine = np.array(scales_fine, dtype=np.float32) - - N = len(fine_contexts) - final_predictions = [] - - for start in range(0, N, batch_size): - end = min(start + batch_size, N) - - batch_coarse = torch.as_tensor(np.stack(coarse_contexts[start:end]), dtype=torch.float32).unsqueeze(-1).to(self._device) - batch_coarse_pad = torch.as_tensor(np.stack(coarse_pads[start:end]), dtype=torch.float32).unsqueeze(-1).to(self._device) - batch_fine = torch.as_tensor(np.stack(fine_contexts[start:end]), dtype=torch.float32).unsqueeze(-1).to(self._device) - batch_fine_pad = torch.as_tensor(np.stack(fine_pads[start:end]), dtype=torch.float32).unsqueeze(-1).to(self._device) - - freq_tensor = torch.zeros((end - start, 1), dtype=torch.long, device=self._device) - - with torch.no_grad(): - preds = self._model.decode([batch_coarse, batch_fine], - [batch_coarse_pad.float(), batch_fine_pad.float()], - freq_tensor, - horizon_len=horizon_len, - agg_factor=agg_factor, - offsets=offsets_fine[start:end], - scales=scales_fine[start:end], - global_eps=global_eps, - output_patch_len=self.output_patch_len) - - final_predictions += preds - - return final_predictions diff --git a/orion/pipelines/pretrained/cisco/cisco_modeling/patched_decoder_multi_resolution.py b/orion/pipelines/pretrained/cisco/cisco_modeling/patched_decoder_multi_resolution.py deleted file mode 100644 index 1b1e8ce3..00000000 --- a/orion/pipelines/pretrained/cisco/cisco_modeling/patched_decoder_multi_resolution.py +++ /dev/null @@ -1,416 +0,0 @@ -# -# Copyright 2025 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import math -import dataclasses -from typing import List, Tuple, Union - -import numpy as np - -import torch -from torch import nn -import torch.nn.functional as F - -from timesfm import pytorch_patched_decoder as ppd - - -@dataclasses.dataclass -class CiscoTsmMRConfig(ppd.TimesFMConfig): - """Config extension to toggle multi-resolution behaviors. - - - use_resolution_embeddings: add scale embeddings (low/high) to the token stream. - - use_special_token: insert a learned special token between streams. - """ - - use_resolution_embeddings: bool = False - use_special_token: bool = False - - -class PatchedTSMultiResolutionDecoder(ppd.PatchedTimeSeriesDecoder): - """Extension of upstream decoder with multi-resolution support. - - This class keeps the upstream API intact, while enabling two optional - behaviors: - - scale embedding per token for low/high streams, - - an optional learned special token between streams. - """ - - def __init__(self, config: CiscoTsmMRConfig): - super().__init__(config) - self.config: CiscoTsmMRConfig - - # Multi-resolution Embedding Layer - if self.config.use_resolution_embeddings: - self.multi_resolution = nn.Embedding(num_embeddings=2, - embedding_dim=self.config.hidden_size) - - # Special Token between streams - if self.config.use_special_token: - self.special_token = nn.Parameter(torch.zeros(1, 1, self.config.hidden_size)) - nn.init.normal_(self.special_token, mean=0.0, std=0.02) - - - def _reverse_transform_segments( - self, - outputs: torch.Tensor, - stats_list: List[Tuple[torch.Tensor, torch.Tensor]], - indices_list: List[Tuple[int, int]], - ) -> torch.Tensor: - """Reverse-transform with per-timeseries stats. - - Args: - outputs: [B, N, P, Q] - stats_list: list of (mu, sigma) each shaped [B] - indices_list: matching list of (start_N, end_N) segment ranges over N - """ - B, N, _, _ = outputs.shape - device = outputs.device - dtype = outputs.dtype - - if len(indices_list) == 0: - return outputs - - # Build [S] tensors of segment starts/ends (S = number of streams) - starts = torch.tensor([s for (s, _) in indices_list], device=device) - ends = torch.tensor([e for (_, e) in indices_list], device=device) - S = starts.shape[0] - - # Per-batch stats stacked as [B, S] - mus = torch.stack([mu.to(dtype) for (mu, _) in stats_list], dim=1) # [B, S] - sigmas = torch.stack([sigma.to(dtype) for (_, sigma) in stats_list], dim=1) # [B, S] - - # Build boolean mask per segment over N: [S, N] - posN = torch.arange(N, device=device) - seg_mask_SN = ((posN.unsqueeze(0) >= starts.unsqueeze(1)) & - (posN.unsqueeze(0) < ends.unsqueeze(1))) # [S, N] - - # Expand to broadcast shapes: - # seg_mask: [1, S, N, 1, 1], mus/sigmas: [B, S, 1, 1, 1] - seg_mask = seg_mask_SN.unsqueeze(0).unsqueeze(-1).unsqueeze(-1).to(dtype) # [1, S, N, 1, 1] - mus_b = mus.unsqueeze(-1).unsqueeze(-1).unsqueeze(-1) # [B, S, 1, 1, 1] - sigmas_b = sigmas.unsqueeze(-1).unsqueeze(-1).unsqueeze(-1) # [B, S, 1, 1, 1] - - # Aggregate per-position parameters - mu_map = (mus_b * seg_mask).sum(dim=1) # [B, N, 1, 1] - sigma_map = (sigmas_b * seg_mask).sum(dim=1) # [B, N, 1, 1] - - # For positions not covered by any segment, keep outputs unchanged: sigma=1, mu=0 - covered = (seg_mask.sum(dim=1) > 0).to(dtype) # [1, N, 1, 1] - sigma_map = sigma_map + (1.0 - covered).expand(B, -1, -1, -1) # add 1 where uncovered - - return outputs * sigma_map + mu_map - - - def _postprocess_output( - self, - model_output: torch.Tensor, - horizon_len: int, - head: nn.Module, - num_outputs: int, - stats_list: list[tuple[torch.Tensor, torch.Tensor]], - indices_list: list[tuple[int, int]], - ) -> torch.Tensor: - """Postprocess output of stacked transformer.""" - - # B x N x (H.Q) - output_ts = head(model_output) - - # Reshape using view - b, n, _ = output_ts.shape - output_ts = output_ts.view(b, n, horizon_len, num_outputs) - - return self._reverse_transform_segments(output_ts, stats_list, indices_list) - - - def forward( - self, - input_ts: Union[List[torch.Tensor], torch.Tensor], - input_padding: Union[List[torch.LongTensor], torch.LongTensor], - freq: torch.Tensor, - ) -> torch.Tensor: - """Multi-resolution forward pass. - Args: - input_ts: list of batched tensors for coarse/fine resolution streams. - input_padding: list of batched paddings for coarse/fine resolution streams. - freq: batched tensor of frequency indices. - """ - num_outputs = len(self.config.quantiles) + 1 - - if isinstance(input_ts, torch.Tensor): - raise ValueError("PatchedTSMultiResolutionDecoder expects multi-resolution inputs as a list of tensors.") - - # Multi-resolution processing - ts_coarse, ts_fine = input_ts - pad_coarse, pad_fine = input_padding - - model_input_coarse, pad_coarse, stats_coarse, _ = super()._preprocess_input( - input_ts=ts_coarse, - input_padding=pad_coarse, - ) - model_input_fine, pad_fine, stats_fine, _ = super()._preprocess_input( - input_ts=ts_fine, - input_padding=pad_fine, - ) - - B = model_input_coarse.shape[0] - Ncoarse = model_input_coarse.shape[1] - Nfine = model_input_fine.shape[1] - D = model_input_coarse.shape[2] - device = model_input_coarse.device - - # Special Token between streams - if self.config.use_special_token: - spec_tok = self.special_token.to(device).expand(B, 1, D) - spec_pad = torch.zeros(B, 1, device=device, dtype=pad_coarse.dtype) - - model_input = torch.cat([model_input_coarse, spec_tok, model_input_fine], dim=1) # [B, N1+1+N2, D] - patched_padding = torch.cat([pad_coarse, spec_pad, pad_fine], dim=1) - - # Keep mask to drop the special token position after decoding - keep_mask = torch.ones(Ncoarse + 1 + Nfine, device=device, dtype=torch.bool) - keep_mask[Ncoarse] = False # special token index - spec_len = 1 - else: - model_input = torch.cat([model_input_coarse, model_input_fine], dim=1) # [B, N1+N2, D] - patched_padding = torch.cat([pad_coarse, pad_fine], dim=1) - keep_mask = None - spec_len = 0 - - # Multi-resolution Embedding - if self.config.use_resolution_embeddings: - mr_coarse = torch.zeros(Ncoarse, dtype=torch.long, device=device) - mr_spec = torch.zeros(spec_len, dtype=torch.long, device=device) # we use 0 for special token - mr_fine = torch.ones(Nfine, dtype=torch.long, device=device) - - mr_idx = torch.cat([mr_coarse, mr_spec, mr_fine], dim=0) # [N_total] - mr_idx = mr_idx.unsqueeze(0).expand(B, -1) # [B, N_total] - - mr_emb = self.multi_resolution(mr_idx) # [B, N_total, D] - model_input += mr_emb - - if freq.device != device: - freq = freq.to(device) - - f_emb = self.freq_emb(freq) # [B, 1, D] - model_input += f_emb - - model_output = self.stacked_transformer(model_input, patched_padding) - - # Project and apply per-segment reverse-transform - - indices_list = [ - (0, Ncoarse), - (Ncoarse + spec_len, Ncoarse + spec_len + Nfine), - ] - stats_list = [stats_coarse, stats_fine] - - output_min_all = self._postprocess_output( - model_output=model_output, - horizon_len=self.config.horizon_len, - head=self.horizon_ff_layer, - num_outputs=num_outputs, - stats_list=stats_list, - indices_list=indices_list, - ) - - if keep_mask is not None: - output_min_all = output_min_all[:, keep_mask, :, :] - - return output_min_all - - - def _trim_context(self, ts: torch.Tensor, pad: torch.Tensor, max_len: int) -> tuple[torch.Tensor, torch.Tensor]: - """Trim context to be aligned to patch boundaries and within max length. - Args: - ts: [B, T, C] input time series tensor. - pad: [B, T, 1] input padding tensor. - max_len: maximum allowed length. - Returns: - Trimmed (ts, pad) tensors. - """ - target_len = max(self.config.patch_len, (max_len // self.config.patch_len) * self.config.patch_len) - - if ts.shape[1] > target_len: - ts = ts[:, -target_len:, :] - pad = pad[:, -target_len:, :] - - rem = ts.shape[1] % self.config.patch_len - - if rem: - ts = ts[:, rem:, :] - pad = pad[:, rem:, :] - - return ts, pad - - - def decode( - self, - input_ts: Union[list[torch.Tensor], Tuple[torch.Tensor, torch.Tensor]], - paddings: Union[list[torch.Tensor], Tuple[torch.Tensor, torch.Tensor]], - freq: torch.LongTensor, - horizon_len: int, - agg_factor: int = 60, - offsets: List[float] = None, - scales: List[float] = None, - global_eps: float = 1e-8, - output_patch_len: int = 128, - ) -> List[Tuple[List[float], dict[str, List[float]]]]: - """Autoregressive Multiresolution Decoding. - - Args: - input_ts: list of [B, T1, C], [B, T2, C] tensors for low/high resolution streams. - paddings: list of [B, T1, 1], [B, T2, 1] paddings for low/high resolution streams. - freq: [B] tensor of frequency indices. - horizon_len: total forecast horizon length (in high-res steps). - agg_factor: aggregation factor from high-res to low-res (e.g., 60 for min->hr). - offsets: list of length B of denormalization offsets for high-res stream. - scales: list of length B of denormalization scales for high-res stream. - global_eps: small value to avoid division by zero during denormalization. - output_patch_len: number of high-res steps to decode per iteration. - - Returns: - A list of length B of tuples: - - mean forecast list of length `horizon_len`, - - dict of quantile forecasts, each a list of length `horizon_len`. - """ - if agg_factor <= 0: - raise ValueError("agg_factor must be positive for autoregressive decoding.") - - q_levels = self.config.quantiles - expected_q_plus_mean = len(q_levels) + 1 - expected_q_only = len(q_levels) - - if not isinstance(input_ts, (list, tuple)) or len(input_ts) != 2: - raise ValueError("Multi-resolution autoregressive decoding expects [low_res, high_res] inputs.") - - coarse_ts, fine_ts = input_ts - coarse_pad, fine_pad = paddings - - # Ensure 3D shapes - if coarse_ts.ndim == 2: - coarse_ts = coarse_ts.unsqueeze(-1) - if fine_ts.ndim == 2: - fine_ts = fine_ts.unsqueeze(-1) - if coarse_pad.ndim == 2: - coarse_pad = coarse_pad.unsqueeze(-1) - if fine_pad.ndim == 2: - fine_pad = fine_pad.unsqueeze(-1) - - device = fine_ts.device - batch_size = fine_ts.shape[0] - patch_len = self.config.patch_len - output_patch_len = output_patch_len or self.config.horizon_len - - # Offsets/scales for denormalization - offsets = [0.0] * batch_size if offsets is None else offsets - scales = [1.0] * batch_size if scales is None else scales - batch_offsets = torch.as_tensor(offsets, dtype=torch.float32, device=device).view(batch_size, 1) - batch_scales = torch.as_tensor(scales, dtype=torch.float32, device=device).view(batch_size, 1) - - # Keep working windows aligned and trimming to patch boundaries for performing decoding step. - max_ctx_len_coarse = max(patch_len, (coarse_ts.shape[1] // patch_len) * patch_len) - coarse_ts, coarse_pad = self._trim_context(coarse_ts, coarse_pad, max_ctx_len_coarse) - max_ctx_len_fine = max(patch_len, (fine_ts.shape[1] // patch_len) * patch_len) - fine_ts, fine_pad = self._trim_context(fine_ts, fine_pad, max_ctx_len_fine) - - remaining = horizon_len - mean_chunks = [] - quant_chunks = [] - - # Number of decode steps to perform - num_decode_patches = math.ceil(horizon_len / output_patch_len) - - for _ in range(num_decode_patches): - preds = self([coarse_ts, fine_ts], [coarse_pad.float(), fine_pad.float()], freq) - if preds.ndim != 4: - raise ValueError(f"Unexpected prediction rank: {preds.shape}") - - num_coarse_patches = coarse_ts.shape[1] // patch_len - num_fine_patches = fine_ts.shape[1] // patch_len - fine_patch_idx = num_coarse_patches + num_fine_patches - 1 - - if fine_patch_idx >= preds.shape[1]: - raise ValueError(f"Fine patch index {fine_patch_idx} out of range for preds shape {preds.shape}") - - fine_patch = preds[:, fine_patch_idx, :, :] - - if fine_patch.shape[1] < output_patch_len: - raise ValueError(f"Model horizon - {fine_patch.shape[1]} < requested output_patch_len {output_patch_len}") - - fine_patch = fine_patch[:, :output_patch_len, :] - - C = fine_patch.shape[2] - if C == expected_q_plus_mean: - mean_channel = fine_patch[..., 0] # [B, L] - quant_block = fine_patch[..., 1:] # [B, L, Q] - elif C == expected_q_only: - mean_channel = None - quant_block = fine_patch - else: - raise ValueError(f"Channel count {C} != {expected_q_plus_mean} or {expected_q_only}") - - if mean_channel is None: - mean_channel = quant_block.median(dim=-1).values # [B, L] - - step_taken = min(remaining, output_patch_len) - mean_denorm = mean_channel * (batch_scales + global_eps) + batch_offsets - quant_denorm = quant_block * (batch_scales.unsqueeze(-1) + global_eps) + batch_offsets.unsqueeze(-1) - mean_denorm = torch.nan_to_num(mean_denorm, nan=0.0, posinf=0.0, neginf=-0.0) - quant_denorm = torch.nan_to_num(quant_denorm, nan=0.0, posinf=0.0, neginf=-0.0) - - mean_chunks.append(mean_denorm[:, :step_taken]) - quant_chunks.append(quant_denorm[:, :step_taken, :]) - remaining -= step_taken - - # Append normalized minute predictions for the next step. - fine_append = mean_channel[:, :output_patch_len].unsqueeze(-1) - fine_ts = torch.cat([fine_ts, fine_append], dim=1) - fine_pad = torch.cat( - [fine_pad, torch.zeros((batch_size, output_patch_len, 1), device=device, dtype=fine_pad.dtype)], - dim=1) - - # Aggregate minute predictions into coarse stream (drop remainder < agg_factor). - agg_block_len = (output_patch_len // agg_factor) * agg_factor - if agg_block_len > 0: - agg_source = mean_channel[:, :agg_block_len] - agg_vals = agg_source.view(batch_size, -1, agg_factor).mean(dim=2).unsqueeze(-1) - coarse_ts = torch.cat([coarse_ts, agg_vals.to(coarse_ts.dtype)], dim=1) - coarse_pad = torch.cat( - [coarse_pad, torch.zeros((batch_size, agg_vals.shape[1], 1), device=device, dtype=coarse_pad.dtype)], - dim=1) - - # Keep contexts aligned and bounded. - fine_ts, fine_pad = self._trim_context(fine_ts, fine_pad, max_ctx_len_fine) - coarse_ts, coarse_pad = self._trim_context(coarse_ts, coarse_pad, max_ctx_len_coarse) - - if remaining <= 0: - break - - mean_full = torch.cat(mean_chunks, dim=1)[:, :horizon_len] - quant_full = torch.cat(quant_chunks, dim=1)[:, :horizon_len, :] - - mean_np = mean_full.cpu().numpy() - quant_np = quant_full.cpu().numpy() - - final_predictions = [] - for i in range(batch_size): - q_arr = np.transpose(quant_np[i], (1, 0)) # [Q, H] - final_predictions.append( - { - "mean": mean_np[i], - "quantiles": {str(q_levels[q_i]): q_arr[q_i] for q_i in range(q_arr.shape[0])} - } - ) - - return final_predictions diff --git a/orion/pipelines/pretrained/cisco/cisco_msl.json b/orion/pipelines/pretrained/cisco/cisco_msl.json deleted file mode 100644 index e4fe0c11..00000000 --- a/orion/pipelines/pretrained/cisco/cisco_msl.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { - "time_column": "timestamp", - "interval": 21600 - } -} diff --git a/orion/pipelines/pretrained/cisco/cisco_realadexchange.json b/orion/pipelines/pretrained/cisco/cisco_realadexchange.json deleted file mode 100644 index 6b8aac0a..00000000 --- a/orion/pipelines/pretrained/cisco/cisco_realadexchange.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { - "time_column": "timestamp", - "interval": 3600 - } -} diff --git a/orion/pipelines/pretrained/cisco/cisco_realawscloudwatch.json b/orion/pipelines/pretrained/cisco/cisco_realawscloudwatch.json deleted file mode 100644 index eebcc81d..00000000 --- a/orion/pipelines/pretrained/cisco/cisco_realawscloudwatch.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { - "time_column": "timestamp", - "interval": 600 - } -} diff --git a/orion/pipelines/pretrained/cisco/cisco_realtraffic.json b/orion/pipelines/pretrained/cisco/cisco_realtraffic.json deleted file mode 100644 index eebcc81d..00000000 --- a/orion/pipelines/pretrained/cisco/cisco_realtraffic.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { - "time_column": "timestamp", - "interval": 600 - } -} diff --git a/orion/pipelines/pretrained/cisco/cisco_realtweets.json b/orion/pipelines/pretrained/cisco/cisco_realtweets.json deleted file mode 100644 index eebcc81d..00000000 --- a/orion/pipelines/pretrained/cisco/cisco_realtweets.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { - "time_column": "timestamp", - "interval": 600 - } -} diff --git a/orion/pipelines/pretrained/cisco/cisco_smap.json b/orion/pipelines/pretrained/cisco/cisco_smap.json deleted file mode 100644 index e4fe0c11..00000000 --- a/orion/pipelines/pretrained/cisco/cisco_smap.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { - "time_column": "timestamp", - "interval": 21600 - } -} diff --git a/orion/pipelines/pretrained/cisco/cisco_ucr.json b/orion/pipelines/pretrained/cisco/cisco_ucr.json deleted file mode 100644 index f3ca6b04..00000000 --- a/orion/pipelines/pretrained/cisco/cisco_ucr.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { - "time_column": "timestamp", - "interval": 300 - } -} diff --git a/orion/pipelines/pretrained/cisco/cisco_yahooa1.json b/orion/pipelines/pretrained/cisco/cisco_yahooa1.json deleted file mode 100644 index 0fdb0776..00000000 --- a/orion/pipelines/pretrained/cisco/cisco_yahooa1.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { - "time_column": "timestamp", - "interval": 1 - } -} diff --git a/orion/pipelines/pretrained/cisco/cisco_yahooa2.json b/orion/pipelines/pretrained/cisco/cisco_yahooa2.json deleted file mode 100644 index 0fdb0776..00000000 --- a/orion/pipelines/pretrained/cisco/cisco_yahooa2.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { - "time_column": "timestamp", - "interval": 1 - } -} diff --git a/orion/pipelines/pretrained/cisco/cisco_yahooa3.json b/orion/pipelines/pretrained/cisco/cisco_yahooa3.json deleted file mode 100644 index 0fdb0776..00000000 --- a/orion/pipelines/pretrained/cisco/cisco_yahooa3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { - "time_column": "timestamp", - "interval": 1 - } -} diff --git a/orion/pipelines/pretrained/cisco/cisco_yahooa4.json b/orion/pipelines/pretrained/cisco/cisco_yahooa4.json deleted file mode 100644 index 0fdb0776..00000000 --- a/orion/pipelines/pretrained/cisco/cisco_yahooa4.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { - "time_column": "timestamp", - "interval": 1 - } -} diff --git a/orion/primitives/cisco.py b/orion/primitives/cisco.py deleted file mode 100644 index b11ca9ee..00000000 --- a/orion/primitives/cisco.py +++ /dev/null @@ -1,111 +0,0 @@ -""" -This primitive an implementation of Cisco's Time Series Foundation Model for timeseries forecasting. - -The model implementation can be found at -https://arxiv.org/pdf/2511.19841 - -We use code from https://github.com/splunk/cisco-time-series-model -in this primitive, which can be found in the relative import path. -""" - -import torch -import numpy as np - -try: - from ..pipelines.pretrained.cisco.cisco_modeling import CiscoTsmMR, TimesFmHparams, TimesFmCheckpoint -except ImportError as ie: - ie.msg += ( - '\n\nIt seems like `cisco` cannot be imported.\n' - 'It is likely that relative import is failing. Please flag this issue. \n' - ) - raise - - -class Cisco: - """Cisco model for timeseries forecasting. - - Args: - window_size (int): - Window size of each sample. Default to 256. - step (int): - Stride length between samples. Default to 1. - pred_len (int): - Prediction horizon length. Default to 1. - repo_id (str): - Directory of the model checkpoint. Default to "cisco-ai/cisco-time-series-model-1.0-preview" - batch_size(int): - Size of one batch. Default to 32. - freq (int): - Frequency. TimesFM expects a categorical indicator valued in {0, 1, 2}. - Default to 0. - target (int): - Index of target column in multivariate case. Default to 0. - start_time (datetime): - Start time of the timeseries. Default to Jan 1, 2020 00:00:00. - time_interval (int): - Time interval between two samples in seconds. Default to 600. - """ - - def __init__( - self, - window_size=30720, # note that cisco expects a large window size because it uses long term context - pred_len=1, - repo_id="cisco-ai/cisco-time-series-model-1.0-preview", - batch_size=32, - target=0, - return_quantile=None, - ): - self.window_size = int(window_size) - self.pred_len = int(pred_len) - self.batch_size = int(batch_size) - self.target = int(target) - self.return_quantile = return_quantile - - # Match the model-card example - backend = "gpu" if torch.cuda.is_available() else "cpu" - hparams = TimesFmHparams( - num_layers=50, - use_positional_embedding=False, - backend=backend, - ) - ckpt = TimesFmCheckpoint(huggingface_repo_id=repo_id) - - self.model = CiscoTsmMR( - hparams=hparams, - checkpoint=ckpt, - use_resolution_embeddings=True, - use_special_token=True, - ) - - def predict(self, X): - """Forecast. - - Args: - X (ndarray): shape (n_windows, window_size, n_features) - Returns: - ndarray: shape (n_windows, pred_len) - """ - - n_windows = X.shape[0] - - outs = [] - for i in range(0, n_windows, self.batch_size): - x_batch = X[i:i + self.batch_size, :self.window_size, self.target].astype(np.float32) - - series_list = [x_batch[j] for j in range(x_batch.shape[0])] # x_batch.shape[0] could be lower than self.batch_size - forecast_list = self.model.forecast(series_list, horizon_len=self.pred_len) - preds = np.stack([f["mean"] for f in forecast_list], axis=0) - - outs.append(preds) - - return np.concatenate(outs, axis=0) - - - - -if __name__ == "__main__": - cisco_predictor = CiscoPredictor(window_size=256, pred_len=16, batch_size=32, target=0, return_quantile=None) - X = np.random.rand(100, 256, 10).astype(np.float32) - y = cisco_predictor.predict(X) - print(y.shape) # should be (100, 16) - print(y[:2]) \ No newline at end of file diff --git a/orion/primitives/jsons/orion.primitives.cisco.Cisco.json b/orion/primitives/jsons/orion.primitives.cisco.Cisco.json deleted file mode 100644 index 826a6bd8..00000000 --- a/orion/primitives/jsons/orion.primitives.cisco.Cisco.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "name": "orion.primitives.cisco.Cisco", - "contributors": [ - "Allen Baranov " - ], - "documentation": "https://arxiv.org/pdf/2511.19841", - "description": "Cisco Time Series Foundation Model for timeseries forecasting", - "classifiers": { - "type": "estimator", - "subtype": "regressor" - }, - "modalities": [], - "primitive": "orion.primitives.cisco.Cisco", - "produce": { - "method": "predict", - "args": [ - { - "name": "X", - "type": "ndarray" - } - ], - "output": [ - { - "name": "y_hat", - "type": "ndarray" - } - ] - }, - "hyperparameters": { - "fixed": { - "window_size": { - "type": "int", - "default": 30720 - }, - "pred_len": { - "type": "int", - "default": 1 - }, - "repo_id": { - "type": "str", - "default": "cisco-ai/cisco-time-series-model-1.0-preview" - }, - "batch_size": { - "type": "int", - "default": 32 - }, - "target": { - "type": "int", - "default": 0 - }, - "return_quantile": { - "type": "float", - "default": null - } - } - } -} From 014b12c013953ecd16a669d57f36e213ad08177c Mon Sep 17 00:00:00 2001 From: AllenBaranov Date: Fri, 13 Feb 2026 01:59:44 -0500 Subject: [PATCH 05/10] Addressing minor issues in PR --- .../pretrained/chronos2/chronos2.json | 4 +-- orion/primitives/chronos2.py | 28 ++----------------- setup.py | 3 ++ 3 files changed, 7 insertions(+), 28 deletions(-) diff --git a/orion/pipelines/pretrained/chronos2/chronos2.json b/orion/pipelines/pretrained/chronos2/chronos2.json index aee16aaf..e6ad52d0 100644 --- a/orion/pipelines/pretrained/chronos2/chronos2.json +++ b/orion/pipelines/pretrained/chronos2/chronos2.json @@ -10,12 +10,12 @@ "init_params": { "mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1": { "time_column": "timestamp", - "interval": 600, + "interval": 21600, "method": "mean" }, "mlstars.custom.timeseries_preprocessing.rolling_window_sequences#1": { "target_column": 0, - "window_size": 256 + "window_size": 250 }, "orion.primitives.timeseries_anomalies.find_anomalies#1": { "window_size_portion": 0.33, diff --git a/orion/primitives/chronos2.py b/orion/primitives/chronos2.py index aa27ab5c..0ff960e4 100644 --- a/orion/primitives/chronos2.py +++ b/orion/primitives/chronos2.py @@ -13,23 +13,7 @@ import torch import numpy as np import pandas as pd -# if sys.version_info < (3, 11): -# msg = ( -# '`timesfm` requires Python >= 3.11 and your ' -# f'python version is {sys.version}.\n' -# 'Make sure you are using Python 3.11 or later.\n' -# ) -# raise RuntimeError(msg) - -try: - from chronos import Chronos2Pipeline -except ImportError as ie: - ie.msg += ( - '\n\nIt seems like `chronos` is not installed.\n' - 'Please install `chronos` using:\n' - '\n pip install chronos' - ) - raise +from chronos import Chronos2Pipeline class Chronos2: @@ -125,12 +109,4 @@ def convert_to_df(self, x_batch, start_batch_at=0): }) rows = pd.DataFrame(rows) - return rows - - -if __name__ == "__main__": - chronos2 = Chronos2() - X = np.random.rand(100, 256, 10) - y = chronos2.predict(X) - print(y.shape) - print(y) \ No newline at end of file + return rows \ No newline at end of file diff --git a/setup.py b/setup.py index 66348335..7c382b77 100644 --- a/setup.py +++ b/setup.py @@ -48,6 +48,9 @@ "timesfm[torch]>=1.2.0,<1.5;python_version>='3.11'", "jax;python_version>='3.11'", + #chronos2 + 'chronos', + 'wrapt>=1.14,<1.15', ] From 81de3ca881a59004203b8de48c8dcda4d919b509 Mon Sep 17 00:00:00 2001 From: AllenBaranov Date: Mon, 16 Feb 2026 15:24:05 -0500 Subject: [PATCH 06/10] Cleaned tutorial notebook with multivariate dataset --- tutorials/pipelines/chronos2.ipynb | 253 +++++++++++++++++++++++------ 1 file changed, 202 insertions(+), 51 deletions(-) diff --git a/tutorials/pipelines/chronos2.ipynb b/tutorials/pipelines/chronos2.ipynb index 0d21f507..5350cdfb 100644 --- a/tutorials/pipelines/chronos2.ipynb +++ b/tutorials/pipelines/chronos2.ipynb @@ -44,7 +44,26 @@ " \n", " \n", " timestamp\n", - " value\n", + " 0\n", + " 1\n", + " 2\n", + " 3\n", + " 4\n", + " 5\n", + " 6\n", + " 7\n", + " 8\n", + " ...\n", + " 15\n", + " 16\n", + " 17\n", + " 18\n", + " 19\n", + " 20\n", + " 21\n", + " 22\n", + " 23\n", + " 24\n", " \n", " \n", " \n", @@ -52,38 +71,143 @@ " 0\n", " 1222819200\n", " -0.366359\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " ...\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", " \n", " \n", " 1\n", " 1222840800\n", " -0.394108\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " ...\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", " \n", " \n", " 2\n", " 1222862400\n", " 0.403625\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " ...\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", " \n", " \n", " 3\n", " 1222884000\n", " -0.362759\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " ...\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", " \n", " \n", " 4\n", " 1222905600\n", " -0.370746\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " ...\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", " \n", " \n", "\n", + "

5 rows × 26 columns

\n", "" ], "text/plain": [ - " timestamp value\n", - "0 1222819200 -0.366359\n", - "1 1222840800 -0.394108\n", - "2 1222862400 0.403625\n", - "3 1222884000 -0.362759\n", - "4 1222905600 -0.370746" + " timestamp 0 1 2 3 4 5 6 7 8 ... 15 \\\n", + "0 1222819200 -0.366359 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n", + "1 1222840800 -0.394108 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n", + "2 1222862400 0.403625 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n", + "3 1222884000 -0.362759 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n", + "4 1222905600 -0.370746 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n", + "\n", + " 16 17 18 19 20 21 22 23 24 \n", + "0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n", + "1 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n", + "2 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n", + "3 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n", + "4 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n", + "\n", + "[5 rows x 26 columns]" ] }, "execution_count": 2, @@ -92,11 +216,10 @@ } ], "source": [ - "signal_name = 'S-1'\n", + "signal_name = 'multivariate/S-1'\n", "\n", "data = load_signal(signal_name)\n", - "\n", - "anomalies = load_anomalies(signal_name)\n", + "data = data[:10000]\n", "\n", "data.head()" ] @@ -120,13 +243,13 @@ "text": [ "/home/baranov/miniconda/envs/orion310/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", " from .autonotebook import tqdm as notebook_tqdm\n", - "2026-02-10 01:37:01.358594: I tensorflow/tsl/cuda/cudart_stub.cc:28] Could not find cuda drivers on your machine, GPU will not be used.\n", - "2026-02-10 01:37:01.392927: E tensorflow/compiler/xla/stream_executor/cuda/cuda_dnn.cc:9342] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n", - "2026-02-10 01:37:01.392954: E tensorflow/compiler/xla/stream_executor/cuda/cuda_fft.cc:609] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n", - "2026-02-10 01:37:01.392990: E tensorflow/compiler/xla/stream_executor/cuda/cuda_blas.cc:1518] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n", - "2026-02-10 01:37:01.400419: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n", + "2026-02-16 15:13:33.647160: I tensorflow/tsl/cuda/cudart_stub.cc:28] Could not find cuda drivers on your machine, GPU will not be used.\n", + "2026-02-16 15:13:33.679684: E tensorflow/compiler/xla/stream_executor/cuda/cuda_dnn.cc:9342] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n", + "2026-02-16 15:13:33.679717: E tensorflow/compiler/xla/stream_executor/cuda/cuda_fft.cc:609] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n", + "2026-02-16 15:13:33.679750: E tensorflow/compiler/xla/stream_executor/cuda/cuda_blas.cc:1518] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n", + "2026-02-16 15:13:33.686499: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n", "To enable the following instructions: AVX2 AVX512F FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n", - "2026-02-10 01:37:02.090769: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n" + "2026-02-16 15:13:34.373503: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n" ] } ], @@ -149,15 +272,12 @@ "\n", "| Primitive | Parameter | Default | Description |\n", "|-----------|-----------|---------|-------------|\n", - "| time_segments_aggregate | `interval` | 600 | Aggregation interval in seconds |\n", + "| time_segments_aggregate | `interval` | 21600 | Aggregation interval in seconds |\n", "| time_segments_aggregate | `method` | \"mean\" | Aggregation method (mean, median, sum) |\n", - "| rolling_window_sequences | `window_size` | 256 | Context window size |\n", - "| **Chronos2** | `window_size` | 256 | Must match rolling_window_sequences |\n", + "| rolling_window_sequences | `window_size` | 250 | Context window size |\n", "| **Chronos2** | `pred_len` | 1 | Prediction horizon length |\n", - "| **Chronos2** | `repo_id` | \"amazon/chronos-2\" | HuggingFace model repository |\n", "| **Chronos2** | `batch_size` | 32 | Batch size for inference |\n", "| **Chronos2** | `target` | 0 | Target column index (multivariate) |\n", - "| **Chronos2** | `time_interval` | 600 | Time interval between samples (seconds) |\n", "| find_anomalies | `window_size_portion` | 0.33 | Portion of data for window |\n", "| find_anomalies | `fixed_threshold` | True | Use fixed vs dynamic threshold |" ] @@ -171,22 +291,19 @@ "hyperparameters = {\n", " \"mlstars.custom.timeseries_preprocessing.time_segments_aggregate#1\": {\n", " \"time_column\": \"timestamp\",\n", - " \"interval\": 600, \n", + " \"interval\": 21600, \n", " \"method\": \"mean\" \n", " },\n", " \n", " \"mlstars.custom.timeseries_preprocessing.rolling_window_sequences#1\": {\n", " \"target_column\": 0,\n", - " \"window_size\": 256 \n", + " \"window_size\": 250\n", " },\n", " \n", " \"orion.primitives.chronos2.Chronos2#1\": {\n", - " \"window_size\": 256, \n", " \"pred_len\": 1, \n", - " \"repo_id\": \"amazon/chronos-2\", \n", " \"batch_size\": 32, \n", " \"target\": 0, \n", - " \"time_interval\": 600 \n", " },\n", " \n", " \"orion.primitives.timeseries_anomalies.find_anomalies#1\": {\n", @@ -280,11 +397,30 @@ "name": "stdout", "output_type": "stream", "text": [ - "entry at 1222819200 has value [-0.36635895]\n", - "entry at 1222819800 has value [nan]\n", - "entry at 1222820400 has value [nan]\n", - "entry at 1222821000 has value [nan]\n", - "entry at 1222821600 has value [nan]\n" + "entry at 1222819200 has value [-0.36635895 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. ]\n", + "entry at 1222840800 has value [-0.39410778 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. ]\n", + "entry at 1222862400 has value [0.4036246 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. ]\n", + "entry at 1222884000 has value [-0.36275906 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. ]\n", + "entry at 1222905600 has value [-0.37074649 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. ]\n" ] } ], @@ -377,10 +513,10 @@ "name": "stdout", "output_type": "stream", "text": [ - "X shape = (365073, 256, 1)\n", - "y shape = (365073, 1)\n", - "index shape = (365073,)\n", - "target index shape = (365073,)\n" + "X shape = (9750, 250, 25)\n", + "y shape = (9750, 1)\n", + "index shape = (9750,)\n", + "target index shape = (9750,)\n" ] } ], @@ -439,7 +575,7 @@ { "data": { "text/plain": [ - "(365073, 1)" + "(9750, 1)" ] }, "execution_count": 12, @@ -535,7 +671,8 @@ { "data": { "text/plain": [ - "array([[1.2229836e+09, 1.2231516e+09, 5.3378149e-01]])" + "array([[1.22821920e+09, 1.22945040e+09, 9.14337515e-01],\n", + " [1.40380560e+09, 1.40596560e+09, 2.28091527e-03]])" ] }, "execution_count": 15, @@ -625,7 +762,7 @@ { "data": { "text/plain": [ - "[(1222983600.0, 1223151600.0)]" + "[(1228219200.0, 1229450400.0), (1403805600.0, 1405965600.0)]" ] }, "execution_count": 17, @@ -648,7 +785,7 @@ { "data": { "text/plain": [ - "(None, 1, 1, 0)" + "(None, 1, 0, 1)" ] }, "execution_count": 18, @@ -669,21 +806,10 @@ "execution_count": 19, "metadata": {}, "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Invalid value encountered for precision 0.0/ recall 0.0.\n", - "Traceback (most recent call last):\n", - " File \"/home/baranov/projects/Orion/orion/evaluation/common.py\", line 70, in _f1_score\n", - " return 2 * (precision * recall) / (precision + recall)\n", - "ZeroDivisionError: float division by zero\n" - ] - }, { "data": { "text/plain": [ - "nan" + "0.6666666666666666" ] }, "execution_count": 19, @@ -694,6 +820,31 @@ "source": [ "contextual_f1_score(ground_truth, anomalies, start = start, end = end, weighted=False)" ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGvCAYAAACJsNWPAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjcsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvTLEjVAAAAAlwSFlzAAAPYQAAD2EBqD+naQAAbf5JREFUeJzt3Xl8E2X+B/BPerdAKaW0XIXKJZccgtSKCEgRXcXFVZd1VZCf4KpU0equsCqHovVEVkRRFEVdAe9jQVSq3CBaTrnkvtuCQE96JfP7o006M5mZzOSapPm8X6++oOkkeZJMZr7zPN/n+1gEQRBAREREZJIwsxtAREREoY3BCBEREZmKwQgRERGZisEIERERmYrBCBEREZmKwQgRERGZisEIERERmYrBCBEREZkqwuwG6GGz2XDy5Ek0adIEFovF7OYQERGRDoIgoKSkBK1bt0ZYmHr/R1AEIydPnkRqaqrZzSAiIiI3HDt2DG3btlX9e1AEI02aNAFQ+2Li4+PNa0hREfDJJ/W/33or0LSpee0hIiLvsB/fY2OB6GigshK4cIHHeQ8VFxcjNTXVcR5XExTBiH1oJj4+3txgRBCkO2p8fO0PEREFN/vxvVkzoFEjoKys9nYe573CVYoFE1iJiIjIVAxGiIiIyFQMRoiIiMhUDEaIiIjIVAxGiIiIyFQMRoiIiMhUDEaIiIjIVAxGiIiIyFQMRoiIiMhUDEaIiIjIVAxGiIiIyFQMRoiIiMhUIR2MbD9+Ho98vA0FxRVmN4WIiChkBcWqvb5y42vrAACnii7gowmXm9waIiKi0BTSPSN2B06Xmt0EIiKikMVgBIAgmN0CIiKi0MVghIiIiEzFYISIiIhMxWAEAEdpiIiIzMNghIiIiEzFYISIiIhMxWAEnE1DRERkJgYjREREZCoGI0RERGQqBiNERERkKgYjREREZCoGIwBYaYSIiMg8DEaIiIjIVAxGiIiIyFQMRsA6I0RERGZiMEJERESmYjBCREREpmIwQkRERKZiMEJERESmYjACVhkhIiIyE4MRIiIiMhWDESIiIjIVgxEAAguNEBERmYbBCBEREZmKwQgRERGZisEIERERmcqtYGTu3LlIS0tDTEwM0tPTsWnTJs3tz58/j4kTJ6JVq1aIjo5Gly5dsGzZMrcaTERERA1LhNE7LFmyBNnZ2Zg3bx7S09Mxe/ZsjBgxAnv37kVycrLT9lVVVRg+fDiSk5Px6aefok2bNjhy5AgSEhK80X6vYPoqERGReQwHI7NmzcKECRMwbtw4AMC8efOwdOlSLFiwAJMnT3bafsGCBTh79izWr1+PyMhIAEBaWppnrSYiIqIGw9AwTVVVFfLy8pCZmVn/AGFhyMzMxIYNGxTv8/XXXyMjIwMTJ05ESkoKevbsiWeffRZWq1X1eSorK1FcXCz5ISIioobJUDBy5swZWK1WpKSkSG5PSUlBfn6+4n0OHjyITz/9FFarFcuWLcOTTz6Jl19+GTNnzlR9npycHDRt2tTxk5qaaqSZhrHMCBERkXl8PpvGZrMhOTkZb731Fvr164fRo0fj8ccfx7x581TvM2XKFBQVFTl+jh075utmEhERkUkM5YwkJSUhPDwcBQUFktsLCgrQsmVLxfu0atUKkZGRCA8Pd9zWrVs35Ofno6qqClFRUU73iY6ORnR0tJGmeYQVWImIiMxjqGckKioK/fr1Q25uruM2m82G3NxcZGRkKN5n4MCB2L9/P2w2m+O233//Ha1atVIMRIiIiCi0GB6myc7Oxvz587Fw4ULs3r0b9913H8rKyhyza8aMGYMpU6Y4tr/vvvtw9uxZTJo0Cb///juWLl2KZ599FhMnTvTeq/CQxWIxuwlEREQhy/DU3tGjR+P06dOYOnUq8vPz0adPHyxfvtyR1Hr06FGEhdXHOKmpqfjuu+/w8MMPo1evXmjTpg0mTZqExx57zHuvwkMcpiEiIjKP4WAEALKyspCVlaX4t5UrVzrdlpGRgY0bN7rzVERERNTAcW0aIiIiMhWDEbAcPBERkZkYjBAREZGpGIwA7BohIiIyEYMRIiIiMhWDEQBgmREiIiLTMBgBOExDRERkIgYjREREZCoGI2DHCBERkZkYjBAREZGpGIwQERGRqRiMgAvlERERmYnBCBEREZmKwQgRERGZisEIERERmYrBCBEREZmKwQhYZ4SIiMhMDEaIiIjIVAxGiIiIyFQMRgCwzAgREZF5GIwQERGRqRiMEBERkakYjBAREZGpGIwQERGRqRiMABBYaYSIiMg0DEaIiIjIVAxGiIiIyFQMRsA6I0RERGZiMEJERESmYjBCREREpmIwAq7aS0REZCYGI0RERGQqBiNERERkKgYjREREZCoGI0RERGQqBiMAM1iJiIhMxGCEiIiITMVghIiIiEzFYARctZeIiMhMDEaIiIjIVAxGiIiIyFQMRoiIiMhUbgUjc+fORVpaGmJiYpCeno5Nmzapbvvee+/BYrFIfmJiYtxuMBERETUshoORJUuWIDs7G9OmTcPmzZvRu3dvjBgxAoWFhar3iY+Px6lTpxw/R44c8ajR3iYwf5WIiMg0hoORWbNmYcKECRg3bhy6d++OefPmIS4uDgsWLFC9j8ViQcuWLR0/KSkpHjWaiIiIGg5DwUhVVRXy8vKQmZlZ/wBhYcjMzMSGDRtU71daWor27dsjNTUVf/7zn7Fz507N56msrERxcbHkh4iIiBomQ8HImTNnYLVanXo2UlJSkJ+fr3ifiy++GAsWLMBXX32FDz/8EDabDVdccQWOHz+u+jw5OTlo2rSp4yc1NdVIMw3jKA0REZF5fD6bJiMjA2PGjEGfPn0wePBgfP7552jRogXefPNN1ftMmTIFRUVFjp9jx475uplERERkkggjGyclJSE8PBwFBQWS2wsKCtCyZUtdjxEZGYm+ffti//79qttER0cjOjraSNOIiIgoSBnqGYmKikK/fv2Qm5vruM1msyE3NxcZGRm6HsNqtWLHjh1o1aqVsZYSERFRg2SoZwQAsrOzMXbsWPTv3x8DBgzA7NmzUVZWhnHjxgEAxowZgzZt2iAnJwcA8NRTT+Hyyy9Hp06dcP78ebz44os4cuQIxo8f791XQkREREHJcDAyevRonD59GlOnTkV+fj769OmD5cuXO5Jajx49irCw+g6Xc+fOYcKECcjPz0ezZs3Qr18/rF+/Ht27d/feq/CQwEIjREREpjEcjABAVlYWsrKyFP+2cuVKye+vvPIKXnnlFXeehoiIiEIA16YhIiIiUzEYAeuMEBERmYnBCBEREZmKwQi4UB4REZGZGIwQERGRqRiMALBYzG4BERFR6GIwAg7TEBERmYnBCBEREZmKwQgRERGZisEIERERmYrBCBEREZmKwQgRERGZisEIERERmYrBCBEREZmKwQgRERGZisEIERERmYrBCBEREZmKwQgRERGZisEIERERmYrBCBEREZmKwQgRERGZisEIERERmYrBCBEREZmKwQgRERGZisEIERERmYrBCBEREZmKwQgRERGZisEIERERmYrBCBEREZmKwQgRERGZisEIERE1WLtOFuPPr63Fmn2nzW4KaWAwQkREDdb/vfcLth0vwp3vbDK7KaSBwQgRETVYp0srzW4C6cBghIiIGiyrTTC7CaQDgxEiIiIyFYMRIiIiMhWDESIiCgk2DtkELAYjREQUEkoqasxuAqlgMEJERCGhrIrBSKBiMEJERCGhxsphmkDFYISIiEJCjc1mdhNIBYMRIiIKCaw5ErjcCkbmzp2LtLQ0xMTEID09HZs26Suzu3jxYlgsFowaNcqdpyUiInJbDYORgGU4GFmyZAmys7Mxbdo0bN68Gb1798aIESNQWFioeb/Dhw/j0UcfxaBBg9xuLBERkbvYMxK4DAcjs2bNwoQJEzBu3Dh0794d8+bNQ1xcHBYsWKB6H6vVittvvx0zZsxAhw4dPGowERGRO6qtzBkJVIaCkaqqKuTl5SEzM7P+AcLCkJmZiQ0bNqje76mnnkJycjLuvvtuXc9TWVmJ4uJiyQ8REZEREz/aLPmdPSOBy1AwcubMGVitVqSkpEhuT0lJQX5+vuJ91q5di3feeQfz58/X/Tw5OTlo2rSp4yc1NdVIM4mIiLB0+ynJ79Wc2huwfDqbpqSkBHfeeSfmz5+PpKQk3febMmUKioqKHD/Hjh3zYSuJiCgUsGckcEUY2TgpKQnh4eEoKCiQ3F5QUICWLVs6bX/gwAEcPnwYI0eOdNxmq5vnHRERgb1796Jjx45O94uOjkZ0dLSRphEREWmqZp2RgGWoZyQqKgr9+vVDbm6u4zabzYbc3FxkZGQ4bd+1a1fs2LEDW7dudfzceOONGDp0KLZu3crhFyIi8hsrh2kClqGeEQDIzs7G2LFj0b9/fwwYMACzZ89GWVkZxo0bBwAYM2YM2rRpg5ycHMTExKBnz56S+yckJACA0+1ERES+xDojgctwMDJ69GicPn0aU6dORX5+Pvr06YPly5c7klqPHj2KsDAWdiUiosDCcvCBy3AwAgBZWVnIyspS/NvKlSs17/vee++585REREQeYQJr4GIXBhERhQRO7Q1cDEaIiKjBqai2Ot1m5TBNwGIwQkREDc7+wlKn29gzErgYjBARUYNjsTjfxpyRwMVghIiIGpwwhWhkX2GJCS0hPRiMEBn024kilFfVmN0MItKgFIx8uPGoCS0hPRiMEBmwbMcp3DBnLR5ctMXsphCRhjCFYRoKXAxGiAx4d90hAMCK3YUmt4SItFiUkkYoYDEYITLAAh7giILBsXPlZjeBDGAwQmQEYxGioDDu3V/MbgIZwGCEyIBQjEW+2XYS7284bHYziDw2qHOS2U0gFW6tTUMUqpQy9Bu6B+qSda/q3AJpSY1Mbg2R++JjI81uAqlgz0gQEwQW8PG3UF6Q+mx5ldlNIPKIlRVYA1YIH1qD24nzF5CR8yNeX7nf7KaElFBOYGXwS8GuhhVYAxaDkSB193u/IL+4Ai8s32t2U0JKqI3SiAMQK9cYoyDHhfICF4ORILUnn2WNzRBqtQvEnSFc14OC3U97T6OwuMLsZpACBiNEBoRWKAKIww8O01BDcP9/N5vdBFLAYITIgBDrGIFNFICwY4Qagl+PnDO7CaSAwQiRAaE2tVccjFjZM0JEPsJgJEjdfGlbs5sQkkJt8S1x/GFj1wgR+QiDkSAVEWpnxYARWu+7JBgJ4Z6Rak4lIvIpBiNBKozBiClC7W1nzgiw/sAZdH78W0z+bLvZTSFqsBiMBCn2jJgjxFJGpDkjIRqN/H3+zwCAxb8cM7klpNe5MlYLDjYMRoJUuCgYuVBlxbIdp1BSUW1iixqmr7aewLWzV+PwmTIAwNp9Z0xukX9V1tQPT4TyMA0Fl6xFnL4bbBiMBCnxFfrUr37D/f/djHs/zDOvQQ3UpMVbsSe/BP+q66Ivq7Ka3CL/euCjLY7/KwUjH248gq+2nvBnk4hcWrf/D7ObQAZx1d4gJR6m+STvOAB+AX2p+EI1KmtCKxABgA0H6/cp+SjNyfMX8MSXvwEAbuzdOuSq0xKR97BnJEidLGJJY3+yCQKOnb0guS3UKpLKp/aWVtbU/y203goi8jIGI0Fq6fZTZjchpNSu9ik944baCViewGrR+FtD0iGpkdlNIGrwGIy46d3ixpjy7f6QuzoOVbUnW4vCbaGjRrbiqXhYpiG/F1ERPEwS+Rq/ZW6acS4Ri7bmY/0B/+dpXAixJMpAUF1jQ+NoaYpVQz4BK6mRvV7x7PKGXCo+1JYAIDIDgxEPmTGddufJIr8/p6/kHTmHzFmrsPr306rbVNXYUFHt/wBMXHXzZFGF02yS8xdCq5aBPPgSTy+3WhtuMBLOmj5EPsdgxENmXBzfMm+D/5/UR+5852fsLyzFmAWbANQGd+MX/iqZLjrw+R9xyfTv/B6QLP8tX/K7PBjRCqAaohqrvGek/iRdbWu45dLZMRJ85PFj33YJprSD9GMw4qFQ66pXMn/1QTy4aIvie/H2moO47j9rcFalImK5bMjpzVUHsWJ3ASYt3gqgdsbK6ZJKVFsFHPmj3Ott1/LuukOS3+UjEdUNuDdAiTxnRPx+NNTvwT8/2YbtxxtOT6QvHDxditd+3CeZXWU2eW/WvDv6mdQS0ovBiIdYlRJ4ZtlufL3tJHJ3Fzj9bebS3dh9qhhzftyn67H2F5ZKfhef5Pz9Xourjyo9f0M9AauR54wIotlF8r8Fi7LKGry95iCOna0PdFfsKsD3O2t7xew1fEhd5qxVeOn735GzbLfZTXGQByMcagt8DEbc8PrZOMf/GYvU07oy0jvEknf0nOR38TlO6b3efvw8/vnJNhQWe7/uivwAJj/fhloRNHleiPj9CNackWeX7cbMpbtx/atrANQmh49//1fc80Eel1fQyb4f/HL4rLkNEQmXja1xLa/Ax2BEh6LyahSLDkwvnGni+L/S1frHvxzDzwd9M8tGXngqkGg1TS1okx8j5AcR6aqxzg9y42vr8EnecTzyyTbd7dTr/iEdVdsCAM8u2+PV3pGjf5Rj4HM/4p21h1xvbAKnnhHR+xGsOSP22XDFFbWBdJUoaVk+hEjatL//gl/LIMhXNecq54GPwYiC/YWl+M+KfSipqC0B3vup79Fr+veKJ55z5dKrp81Hz+Ffn23H6Lc2qj7+mn2nkTZ5qVuLrn0ZwOuA2GwCKqqtiu+T2hCLvIS4vDdC/Fhax7I1+85ggZdP4o1kU3mVDqY7Tngvn+CVFb/jxPkLePp/u7z2mN60/Ld8yXsg6RkJ4CBZi/wcteGAuPy982uqqLYi78jZoH29vqT2HRcEAaPf3IhRr6/328WU/DjCnpHAx2BEQeasVXhlxe94ZuluFBZXOm6/UG1FXkWUZNuZS6UnDvHYs5o736mdOXLHOz8bbtsGE+qa6FVSWYNe0793dHmLqR2D5Cd456ER/TkjT3n5JC5vs9Jr2OjFHrBAr2ext6AEf3ljveN38TCVfKaNLx04XYryqhpsP34e1/1njUcrKcv3N/Fik0qv6f7/bsbNb2zAvFUH3H7Ohurg6TLF28uqrNh0+Cy2HTuPUz4YTlUi/y4xZyTwMRjR8OuRc5LuPZsA3FzQUrKN/Pwo7tpVm0HiiUA7X50vr3+NP+0pRJXVhj35JU7bqQUS8hN822ax0r+Lev/9fS0qD5SUXkON1XvDE+FB8G3ccvS84/+f5dX30sln2vju+c9h2MurkPnyKoxdsAm7TxW7FdTbaQWA185e7XTbj3sKAQDvrT/s9nOGGvE77K+hGnnsEREWBF+uEMdPSINNECQ7tdHvkTdPVHa+uHr+ausJ3PLGeqck0N9OFGH5b9pr4Iint2oFX3rfu2HdUhz/t9kESQDg79L78qdTOt96c3qvBQEWabpQJkpY9tdsGnvtl5NFFSi64HmCqdYVc5lGzoi7ww1nSisx45ud2KsQsDdU0u+wv55VljMSXF+tkMRgRIMgSBMq9ZS8Fm/vi++dL3pGJi3eil+PnMPMpdKpeTfMWYt7P9yMbcfOq95XfDDfdapYdbsT5y+o/k3yeLIS41uO1c+u8ffUXsFpYTz9uTDu0PPZ1lhtXjkJe4P4YtNfORTinkpxvtFCN3sq3O2+d7f8/eTPduDddYcxQqHXpaEyI7dZ/rHKc9Mo8LgVjMydOxdpaWmIiYlBeno6Nm3apLrt559/jv79+yMhIQGNGjVCnz598MEHH7jdYH+yCYIkwN5ZUKq+sZ1oe1+cPH35pTpXrtyzIa/9IaZ2LJf3Ypw4pzMYEZcYtwn4+WD9dEF/5iUAzgdRpY8zqXG0020fbDyCP7+21vAwnZ7P9oY5a9F7xvco8NPYu5ZretQPWVb7oBdQiTjYF79b077eidMllc53cMHdnkatnpEtR89h3X7lPJZdDWgpByVZH212uk0cuG3VuLDxpkDPvyJnhoORJUuWIDs7G9OmTcPmzZvRu3dvjBgxAoWFhYrbJyYm4vHHH8eGDRuwfft2jBs3DuPGjcN3333nceO9acvRc5i/+iD2FdR3n9oEQdK9cefinYr3PXymDGfLqpB35JxsfNR5W0+vIPdo9D54Si120gqq1P4kf516cwrEJ+QamzQY9PcMBvmzjXp9ndM2HVo4Ly//5Je/YdvxIryaW1/o7cT5CxAEAWfLqnDnOz/j620nne6nlDNSbbVJTnz2fJzc3crfN3+KFjXYnc/GZhPw+Bc7sGjTUd33EfeMyE84y3ZoDykqcffkqPVyb3p9PW5/+2fF2jfh4Q37JPm/7c6fgfj48dNe/+y3HJYJPhGuN5GaNWsWJkyYgHHjxgEA5s2bh6VLl2LBggWYPHmy0/ZDhgyR/D5p0iQsXLgQa9euxYgRI9xrtQ/c9HrtLIFnRFUEbTZ9Qy3//HQbdpwoQkW1DY8M7+K4XekAPWnxFrfbuOHAH9gsSiD0NrUTitZ7oPa3GpuAiPD63+Ur3qoRn2CsVkHyu7+rfOqpuKrVJnsRuA82HsGTX/6GcQPTUG21Yc2+M1iz7wxu7N1asr385FptteGqF35CYqMoLH1wkLQtHva6lVXWOE1d1kM8RVLcAnc+m1X7TuO/P9cGIrcNaKfrPpWi4nnyi99DZ5Rnc6jxZK0jPb2eBcWVSI6Pkdwmr6MjVlVjwyOfbMOgTkn462Wpbrct0IiDaX/1brrqZbTZBNYeCTCGekaqqqqQl5eHzMzM+gcIC0NmZiY2bHC9eJsgCMjNzcXevXtx1VVXqW5XWVmJ4uJiyY8ZBEHQddA5X16NiuraK/+VosXTqhS6rpWuHPT6YZdzuXU5T5I8VV+rxkOq3afGJi1ydFlaotM2Z0ql3epjF2ySnGBqbDZJT5M4GFCrfurNJFc9D6VVedTeXnuZ7HfXHcaHG+t7AeRDG+LX98LyPfjtRBFOFVVg58lip9d1UmcOjpJ31h5Cj2nfSRYjlFu77wyeWeo8VVocdIg/e3cqsJZUGF/L5M3VBx3/16pJo4cnQ0taNTUc7VHYRusE+Pnm4/hm20n867PtbrcrEInfh6+3nUTeEd9XanU1eUYrv43MYSgYOXPmDKxWK1JSUiS3p6SkID8/X+VeQFFRERo3boyoqChcf/31mDNnDoYPH666fU5ODpo2ber4SU015yrBJug7IYmvyMQH6zvf/lky48BTeqZ+yo/HF6qs+HzzcV35C14dprEKLhdS2yLr5Vn1+2lM+XyH5D6JjerrutgDnOlf78TFTyzHrpPOBxRv9p7oCWzkz/fUN/Un8At1szHUroYHPvej5H0RXzW+vvIA3l5TX8RNPmvnjZXu17mwF1WzL0ao5I53fsZ80fOLrazrahe/dHem9np6YSrvSTL62XuSV6D2csW7jNL3Rq0WBwD81kDzSeTf/Zvf8P2q464+W38uullSUY1nl+3GDi64qMkvs2maNGmCrVu34pdffsEzzzyD7OxsrFy5UnX7KVOmoKioyPFz7NgxfzTTiVVnz0iN5IRSf5Q6WVSB9GdzvdYePd2K8pPC00t3IfvjbbhTRy2G7SfOK96udQUrn3Eibsfu/PpgQSkYcRVc1dgEdG8d7/i92mrDxU8sd9R4eGXF70730TrYG6Xn1CZ+XWWVNVggWunXfkWodlwsLKnEH2X1vUPyfW2n6OSk1Mtmlrve/QWAtL3uBIHiE8YLy42X1pevhWR0uq28YN1L3+3VfV+1YTJPpqKLaxQVB9i6OBsP/oHb3tqomcxuJ3/d+/Qk/nuZq2DEXwnXAPDid3vx1uqDGPnaWr89ZzAyFIwkJSUhPDwcBQXS4YKCggK0bNlS5V61QzmdOnVCnz598Mgjj+CWW25BTk6O6vbR0dGIj4+X/JjBvny9EfIDqjeW1bbaBNRYbbq6wuWBw//qEiV3KvQiyNmHmuSe0ViNU+14W2MTMH7hr47flQ7errrprTZBUtH294ISyUlZ6blXKKwc7K6qGtcHLPHrkm9vPzlqTR+1372kotpphVhx8KmnLf4mPunsOF5kOJgQnzBeX3kAn2/2bIVcpWRiLXeL9k8AeO2n/brvq6eIn9H4LEoUnfea/r2uas7+8re3NmLDwT9wzwe/utxW/rpX7zutvKEPuer08iRfyKhQqinjCUPBSFRUFPr164fc3PqrfZvNhtzcXGRkZOh+HJvNhspK49Pw/M0m1Ce26uXtiFsQBHT89zJ0evxbvK1j7ZX+M1dIfo/wcVlPtWAk/dlcnCqqn02gnMy7VfOxB73wEx77rH7YxvnK1/kxvXnS1rMqr7gnbLNsxWGrIOCXw2ed1i8Ss79/ikGUeBgkgHpG7MSdcG+uPognv/rN0P3lQdoxndO/1bRtFud6Iy/RM6RptFs+QjbT5qutJ3Di/AX85sX1jzyVX+R6Srn4GHihympKtVqlnpEberVy/H+yaDjY1+SfKykzfKbKzs7G/PnzsXDhQuzevRv33XcfysrKHLNrxowZgylTpji2z8nJwQ8//ICDBw9i9+7dePnll/HBBx/gjjvu8N6r8BF36oQc8OIwAeDZdNYzpZU+KUkvpvc98kbNFfkjKL014gPh8XPleHfdIZRXudc7peetn6txNV1WWYNb52mPj9t7VpQOnuKrt+oAXJhN/pl+9LPzFN2Simq8sfIAjiqM0cvjZE9rUEz8aDP+t915yrQ/id+S11dK940/SrUvwOQly6222ryiG+asDZheEj3HI/GQ3Uvf7/Vj1dV6F0RDXvdc1QEAMOe2vv5vCFjzRC/Dwcjo0aPx0ksvYerUqejTpw+2bt2K5cuXO5Jajx49ilOn6meMlJWV4f7770ePHj0wcOBAfPbZZ/jwww8xfvx4770KH/HXCpNaPGnB1S+tVP3bhSorPss77jSjBQCO/FGG4bNWqd7PnfZ5Y0qf/OSnFOCIg5EbX1uHGd/swrMaw0xy246dd8wy0RNAaQWfvxw+p/o3u3N1waLSAeuk6CpU3jPSJiFWvrnfKb07C9YewpXP/+g4eT71zS48v3wP/qSweKJ8+uXq31135w+4yHlWlljWR+5PnfcGrTwaV3vTxS2bSH4XDwHuDpDZH3q+E+Lh5PVeXtjTZhOQd+Scy2EWccXnR6+5GIDz/uZp3aLKGiu+3HLC5VA+q7/qY7zQAICsrCxkZWUp/k2emDpz5kzMnDnTnacxnRkRvd3uU8XIL65A//bN3Lp/cUU1ijVyMp5dthsfbDyCzsmNJbfnHTmHm99QHpp6NXcfZv3wO9696zIM7ZoMQH+Snjd6Ro6elXbjKz2k+Phi7xVaY2BV1z/PrS1s1rZZrOHPv31zYzkLQO2smLm3X6oj4U7amCs7JRl+Lm9T+uztKyfnfLsbr9/eDxsP1Z6MlHKntGpuqLn8okRsOuT7qaHuEr8j8hwvV5+x8yKRomRYj1vmPvH6VHrO39Wi8bsIL9fyWLDuEGYu3Y0rOyXhw/Hpuu6j9rZXW20IDwtX/qMOr+buw9yfDqBts1isfexq1e04SqOPW8FIqPD3Wihi1/3H+UpSr7NlVbj06R80t/m27gCzT5YdrxaIAMCsH2pnrzz+xQ6snzIMgP6AzRvVU7+RVS1dpXAlfXmH5l557v2FpaqBlsWi/LrduQCyz5pwtUaKfJaUagKlB8WcBEHAqNfXIyE2Utf2lRr5OcUXaoMPrROwO+vCBHqhKkntFYPHD/nm4vs/u2w3LktLlEx194d1+8/g3g/rS7zrml0oCsK8/Xl9sPEIAGCtSrl9IzwtA/D9zto8r+Mucp1OnvfN0g1fbjmB/YWleOSaLg2i94UL5WnQWrXTCH/PhFh/wPUX1cg45p2Xt5f8Lv4O6z3e+qt6qlIA4VapckH9KlDtas+d2NW+b7i6gqyxCpKuaaUTXd6Rc+g943un3I28I+fw8S+up8evP/AHth07rxjkyf1eUKKZgGw/WWj1frjzfunpTbGv7GsGQfRVdxqmcfGCnXOi6m858kc5Ln36B4xdsMnQ8LGeJGwtO2TJs4JQO4yrRRw4e7tXwJ36IJEqSfyeJoXrPYbuP+3Z1ObTJZV4ZukuHJA9zkNLtuK1n/Zj48HA7Sk0gsGIH3R54ltcMu07vwUlek6+Rq5K5duKD5L+TGDVQ+m1uxOMWG3qNWbk74d90Tp3qr/apyq7+jyqrDb844M8x+9KT/Xgoi0oqazBv7+QzhS4+Y31+Ndn27Hx4B+asyFuf9t1LRq7V35wrvGiROvK2J2S9nrWdrn3wzyX2/hCtdWGnw/V50iMv/Iiyd+VdsP8ogrH/umUE6Vwh1W/n8Z2nbNr8o6cw8VPLMdDHixBofTxDX5xpeZ9xD0j8qRcO3e+K0r5bZ6QD30aJd63J7yvPuXZ3ZWh7bI/3or5aw7hxjn1dUp+PVwfgJz2wvtyprTSr7VXlDAY8ZOSyhq8t9711Fwj1K6mvR2MyGc5SHpGdD6G1SZg1g+/4+Y31vt0jn+1rAw94HyQX3/gDN5ec1DzgCgIgmpCpfzq/I66k7g7hzY9tUiA2gP89uPn6++n0HZXj3HgdCkuz/FOET6968Bo9WS4E6BaEDjd0fL957lv9+AeUcDYXLais/w9W7f/DC7PycXdC2uLyK3cI11ETu3coPeK3j7k+uVW92cY6bn6j4mUnkb0VOOVH6LOlVXh7TUHUViiHizLyxZ4yp2qwWLiDpcfdhUonsx3nSw2dBFaUFzhFIRuratUbe+p319YiltEs/Q8nWhx+EwZ+s9cgesVksz9icGIHz27bI/TbZ7sSGrHCT1DIkYSy+TBiOBGz4jVJuDV3H3IO3IOn29WXxPFXfYD4p5Txbg8J9cxtgw4vx9/n/8zZi7dLVlHSM4mAD/tVf67fPjOnnfjzsn15n5tAegJRmySbZQ+YvHf1yok7T7+hbE6IFr2uCjkZG+KdsE3d4bPzJ/hZpf10RbkfLsbM+uSdt+R1QGSf7eXyqYd23uiVtbtZws3HJH8XU9hNV/Tk4sgDzjF3zd57Z36baQn6IeWbMXMpbtx14Jf3Giluq6yGUqSNnjYMyJ/3UoXgfJZZFr7/IpdBUh/NhcPLdkqub1Elvy9J186s0rpeX87UYQpn++QzCpSs6wuf/B3EyrlijEYMdk20dWu3U979C2zrXaVqKdnxJNiaOJH13tuEHeJllfVeHVBO6B+XPj1lQdQUFyJJ7+sP/GqvR9KtS9c3UeLOy8ppm5pY1fBSLVNkAUj0iebt+qA5Mr7Dh3l/33JJgBFF6o1FyTz8MLUdEt3nMKbqw7i7bWHJDNO7KplL9BokqFat7mefXOawQJ0alxds/x88A+n4FzPSV6+jT1PyVsL2CU3qe2Veiizs3ob6t7HovJq5O5W7tnQIh+C1HMRqLWJvQLw19u0e7LkvVVKw52TFm/Bok1HFRe7lHNnVpsvhHQwclGS8amYnmiukAkv34GPnS3HuPd0Xh34qWdETvyl1VsuX3wlNG/VQbdO3M3i1Gd5REeo78pqvU/yg7r4tbhzBe7OfWp0D9PYJAcN8Ws6X16F57517nUz2191FnzT67cTRVi0ybmwWiAQzzixk0/tNXrMVzupv/DdHsm6RUrkvSzucnUoGf3WRqfb7Pt03pFzqsciXye0J8fXBiPREepTd+3DXX+bvxF3L/wV8zYaW45AfgzVM3ymFPB88usx/G/7SV0LR37081Hc/1/pvqZ0fLPXP1r+W77L3ndPc1q8JaSDEX9/Bn8oVEOVT1c1sjS82gnYquNL4ckOKF5T5q53N+m6j/jAeqa00q38imYa0xqjNHp67Ce94opqvPhd/UlbHIwcP1eOy55Zofg3vdwJsOxBmqv7VlttGHxxC8fvP+yqLx9/wY/rbBixt0B7KMdo79gNc9a6nEZpV1xRjReW71EspS4IAh75eJuh53bH8p31s3qO/FGGd9cdNnR/ec+K3Zaj53H9q2txtqwKX2454bMcrK+2nnCs8GyE/aSsVSbAG1P9tTh2LY3DnL231l5Q7ovfnIdlc8tjcNUbvyLviPOMFXdWjZZvc7qkEv/8dDuyPtqi65goT04HtL//NqE22BKbk7sPaZOX4s1VtSt/MxgJAIEwN/t92RWMkR3jio7ONTUAfV8KPQvn6aFVa0JMfkVwqsj4OiRaK/JGafaM1P6bs2w35v50oP520clQPjTmzswnt4KRugOiqxNztVVAu8T6nrwam4C31xwEoL3PbFEZs5f7YMNhXdt5ky/PR2Pe2YTXVx7ADXOcV0o9WVSBzzxclE8P8XcsRyFfzBVX5fEzcmrzC55ZWlth2GYTsGbfaZwv984SEK7WjlKjZ5aKvBfBnUPxLo1jmP3rpPWw//feL5KEWft3sNQKvHYMOFBhwd2nk3H0fAXufMf5okv+vdMzPCXvLSsT5YNsqUtUNcpVEUD531+umwmXU9ebymAkAATGRyA9UestEnTXFWmqeR/y+eje4OkBTn6Amv618SsuLVrBSJXVhk2HzmLRJmmtjV+P1J+o5YGp3iDLbsWuAreWCLf3jLg6MStl/s+sOwmpXVJZbQIeWKQ9rbO4ohonz1/Ak1/t1Nzu6VE9tRuowz2y6Y++TEbdrpCLBdSu97M33zuBuBHu1PvQCr5rH7N2n/h620mcLqnEnB/34853NuHG19a51UYjjv5RjrTJSxX/ZnT9GsB13oJS78/L3+9V3d7RMaLxuPnFFbh9fn1u1aFzFThQHYHpJ2Lw0tEwDNtTX526XKHmlFMwoiMJSt7bpScQiHQxnf1bD+vqBMraOSFdgdWTz2BkXBm+KfdOzsn3Owtwfd2KknpzOQRBUP0Cy0+63tDnKWlF14XrD2ODgXUn5FdC57x09WanVtjI7q9vOucviIc65LR6l9Kax+HFW3tLFsEbr1FnQIv9eWb9oH5gBWqDOfkUSgBYuv0UJn7knK9Qex+by96a8korPvnVdS/BkC4tXG7jyve7ClBeVYO4qNrDjvwtdnXQNULt4/v7/I3Y7OYVqCfUZmZ5Q2WNVTLEeNQPi+pd9eJPqn9TG14SkwcsYWEW1Q+tuKIalz7lXFFaa/FIey+Hqz1KXoH6lvwUnLPpKxHvNEzjRuKunmCk9nm0H3vK59tx+Ew5Phyfbringz0jAcCTmgX/F689Hm7EWdGJWW+UahUEU3eiaV/vlIyJuyI/cOQd0Td8oJdWz4g7tKqQTr6uK0o11v0xwn5wcrWoXo1VwNLtzjM21AIRoHY82tU+YhMEXUuce2uMv6K6/kQlT6xLbRbnlefQYkYgoocnU/zF76kaX+doiOk5KcuHbbV6RtbvP6N4cXDojOseYKMXnK4CkdqF+s7iQpXVOYFVRxA2/eudkl4ePcdwPReoizYdw4aDf7h1XGUwEgA86RkJA/DxHZd4pR3i/AS9O4bVFjjda3p4WnrZFVc9I2pyvt2N/20/6bQvaK2S2jg6UrHb1h16ZxWcKrogGVbSY9ALP7m8SrbaBF09Eu5US1WiVb23SmEf2ZtfguGzVmHZDudAzCi1mhe+pjWcYOfr2SW3zFtfN0RVgpK69ZB8xapnuEIUsCxcf1g1CfObbScVZyoB0KwmXJ8z4t1j5LvrD+PmNzZg3HubFIZpXH+Gy3fmY27dFF5xO7UYWd9HbXpyscZnLg4EvV1ywYgQD0bc31ErBQsGpDb1SjvEwYjeAEMQBHhQKsTvzpX79gCoNbVXy5urDhpedj7M4r2eGL1BmtJMLG+wCYKufc5mE9AlpbHL7fQ8Tv1zS/+mdEX90JKt2FdY6jSd0R13GCh3701zftzvchsjs+jcseXoefzjgzyMmL0aV7+8yqfPpSeBdYloraRpX6vnK2nlPGk9j1A3rCHftYdc7Nlw439/rp1wsPHgWbcSWIHaWkgAsHJvIf5PRxmHEgO9sGoBkVZyrPh1+GsNMSVBdDrzPk9iZu900tcSJ7cJOie92gQBo/q28WIrgpu7PSN2Rq6gLBaL28GPnN7uc1/1gel9/rjoCHxwt74l2zWfT6NnROmq7kKV975p3urNMmKfi+nNdgvWHfJxS+qHHvXWBtIyVOOkrme44sc96vla3qA2m6Z1QqxHjxspWmvHnaJnQP137q53f1Et8rbrZDFGzlmruiSFGrXhvuq6C97/rNjn9DdxhVdPq9J6IrSDEQ+O8Pad/Z2x/T1uR2xk/Til3qqUVhtwRcckxb9d0ka5x0bcBdevfTP9DfSB3qkJXn08T4c99QaBQO1+42nwY1djE/C+CdNq7WyCgLTm2onYl7RpijYJsUiJj/H4+X7YVeAYM5cfOJWGabwxFKl3UT81S+653O373r1QX2KzfIq/r3m6nonWiub6ckZ8XGfE/h/Z7nPzpa4v4BqHqbdNnF8lz3MZNXedZlK8Ef/48FfsOFGEMQv01XGyK6+y4s9znWdTVdbYYLUJeGWF9LsgCIJjOQNAX/KxrzAYcZO1bi8f1i3F43Z0FnV/6z0p2gOLq7smA6it7nqVixkP4sjd2wmkRukZVzbivMYwUFyU6+x4I+u2hFksijNb3LGvsBRTXUyrBXw3DbaqRkB8rHplWwAYNzDNa8839audmP71TixcfxiTP5cWcFI8iXmhS+g/ufs8SuBM76Bcz0cPd+rp+IOnJx2t2hZ6egjcmepsRP1sGukO1K99Ito3106ULrWp73TiZFKl/D6t1XuN0DqeaVm24xS2KdSnOX+hClc+/6PT7cNeXiUpY8CeEZN4ctXlza+S+CpB7znH3t390q298eDVnfD5/Vcgqi5q36GyxLiZO5qcnhkARmjNCPH2TIIwC9DHSz07erthq2p889l9seW4y0DH23nSi385ppgnIB+mEQTBZa0NvcxaHt2fs1iM8OWxQE8e1JlS3+RA2dXXGXH+W/EF9070s374HduO1x9bfTkLxUieiJhafaRlO07hlELC70HZStKermTsiZAORjzZlbx5jBEfsPQGI/a7JDaKQvY1F6N980Yuhw7M7IKT21/o3cJs4qEuOW/3Klgs/q/eqzSE4Q3z1xxyHYz4qTxgjU2QDCVqraps1IjZq732WEYEaCyiWkJeEASPk2k9TYLUWzVYk0YFVneT6V/NleZbeLK+l+8ov/ebj5zXdW/2jAShGi8eoMVXbXpPnErbuVqJV16KuCHp0Vp9ZpO3r07N+MJWu1GeXi9Xu5wvK6XKiXsJD3mpVwQAjmis0KxlwV21OWHeLMgWCD7NUy509+J3e3HFcz+qVlfVw9Pvx02vq69pI9a1ZRPVv+mpwOopPVNu2zbzLGHWKLWvqt71qxiMmMWDHdWb5zfxDqA7GFFogKsDZiD1jHjToM5JmgcGb1+dmjErw5fDDK52OV8PNQzvXp93JVkaIQDO//Extfk0WgsxBqP8YuUaHfZpp5747WSRx/Uqvt52EmdKtWf9aF2AOHJGfLgPuSphD7hfcsBdRpexkGMCq0k8Odilx3g+Pc7OPk63aNNR3VcFSkGLqwOmOOjpb/JsGm9KTYzz64nLyMwbJeKTr15qeUDe4CoA9nXPiPiALQ5GAmEhS3sTojWGAYORpyctLV9tPYmvZauRK9FabfjBRVuQkZOreX+tAN3RM+KyFe7TcxxQ++p8Z6B6tRHlHk6FNzPHKaSDEXd31O0dC5EY7r0vs32MdcrnzstDq1H6HrrKGXGnB0avkb1bo11iHO66Is2rj6tHTES4T6vRNo6WLuHkzsWDOFB87i/GK/cWeqE2hBpX+4Kvcz8jw8McJ33xMI2RANNX3eH2t6ah9YxUVtt8WhVZvuLvPVd1cNrm3XWHNR/D1fRfzWDEnjPiw+OCnvO22nfrHx/kebk1tTw9qpuV6A2EejDi5o4aH679kWuNZSq5UGXFM0uNrWKr1A3qao0RcRect+PfhzM7Y/W/huK2Ae0M31cr+VSPUX1b+7Rn5KIkaR0OV++dUteseAiteeNo3HG5+vvUxsPCTEZVubhKFhcqa94oSvK33m09r0IcZrE4iknp7Rn58ZHBkt8//keGx+1QsrVummRkhPm9NN60Zt9pdJ/2HT7Y6J/6Jn8ozJ7xNIldu2fE98M0eq7n/N3RoFVpVQ/mjJhEaz/15ErL6JSv//58FPPXGKvAqLRWiJFhGm9/SewnDj2Lrsn96ZJWHj13r7YJfk1Us9dIefeuyxS3V3prI2UByq39UlWfz58JowCQ8+0ezb+L85PkZem9URY/zFIfrNVIekbUP9MOLepr84zJaI/WCbH4OmugW8+vdfFgn8WkJz/Abuaonm61w58KSypRVWPDk1/qr6/jiYoaK164pZfkNk/XCqpSOXHmF1Xg2NnaGUE+HabR+J5abQLueneTX1ZQ1qNbq3hd27EcvEm0DnZXdJQWOtrz9LW4vmsSXm7+h9fb4c4Oe+fl7Z1uczlMI+4Z8fIJz36+FpdL1kvehdu3XYLk97EZzq/V+fl9d9jJGtpJ8rv9Czu0ruCcnFJPg3waoNZn5e8DwqEz2rNWxO2RB7wRbnzecuFhFsdMMPEUZr1X7fZx7l5tE/AXHRU25QZrlDa3f5ZGFivz9grSDYHNJuCv/VMllZdd7XeuVKkUThNX3PXtMI3693TNvtNYudd7U9M9ped437ppjFfWn3JXaH9rNPZTmwCkiSr1xUSGY+5NXXFzY9dfoKYuKlrK6dlRnh7VEzf1bYOHMjvji/uvUKz8aiRnxNsX3/ZaFO70jFzcsgk2/XuY43d5V6GeE4Evh2mGd0/Bxin17evaUt9Vhpi8tyxKo9vfnROqL4l7RuQnWlczHvSwWCyOfdfe9b7zZJHmysmS9ol2ZneCYa372AMdIz0jDS2/xBtqHO+j9x5TLadE3Gvsy54RtZL4b60+EFAFJgF9uSB92zVDkxhj5y5vCulvjdaOKgjALf3auvW4TWIiXG7TXWe3mV1Gh0S8MroPHsrsgr7tlGfCuAoEHv54q+PgqhTVi4Mvo1PS7Mdqd4IRAJKS5M6VOKXbKg2h+bIaIgC0bBqDNf8ais/uy0Cn5PqrB3lyq5L/PXCl08lMrUfhrivS8Mjwiz1rrJeJD+7y6eP7vFC8LswCR/XgGquA7cfP4/pX1+q+vzinx539T+s+GXWl4OX712Vp6rPR3P0OmElrZoseSgmqYvZkWfn7mK9QFVQvtROseB/1Zc7I0u2nFG9/dtkehAfYPnBAR80eX86w0iOkgxEtAgSEu9kFrWd6VKIoEVBPr7yerntXV2QHT5dh+W/5EARB8Tlf+/uljv93bGGsu87ee+HOlSkgHcaQB0LyniOlXh1/TANNTYxDv/aJkttiXax7k9wkGj3bNHU6OMlzSOyu6ZEScN384v05Lsp18GVUmEU6THPja84LfSn57L4MZA3thLGiGVzuLGCodGGQPbwLZo7qiYy64Vr5MKBWrRlvLaLoLwdPl6Lrk8sx+bPtbj/GY9d21fy7vRdDHox4soihWuK1ONBXOha/eWc/t59Tr8Cszqrtl8Pq6w35Q3B9a7xMK89AEAC9x5SEOGnXlp7AQc/ibZLH1NHtp6dK5MSPNuOeD/IUh4bEJ0GjFSftWxsZWxcTV4+VL0xmE4AXRclvSr06Zn33XR107LuCvGckUuV+RoYDPGFkBpN4mGbyddKTjjeaK0Bw7G9Gphb2a5+IR0dcjOiI+tfizkmgaWyk02ycvu0ScMfl7VUTs3eeVB9CCrZhmjdXHQRQu2aQu8LDLEhNVE/6t+8n8mBkya/Gn9O+76rtK+LnUJqxM6JHS8PPaZSve2p9ocjNNXu8Jbi+NV6mdSC9tF2CIy8jqXG05uP874Er8fifujl+t9oETB/ZXfM+/7rWWFd8Zx2JRWrl4OUH6B92FSj2Loiv6FobnF5qD+w8ORAvuedy/GNwB9x95UWS222CgFv7188+ES8ildktWfL8/qbWJf/iLb3QJCYCr99e29skD9LUrp79dRAzMpQgHqaRD5F5I/do6fZTjvfD07F2raXt1VTU2HBxinRGjfyCQp7ErCXYeka8NaQw9YYeqn9zJAJ74Xv64fh0AOo5I+Ljnb9nptm9sHyvz5/D1SrtRjXRMeTsS8H1rfGjoV2T0bFFY6ybfDXW/Guo5rZtm8VhgmjM1GoTcNfAi5D7yGAM65qML+6/wuk+LZvqP9k/95dLJFd/atQCAaUls+Vf0txHBku+xA8O64wbeumfcms/xsRGhUsCMyPSOzTHlOu6OfUayTuaSitrsCL7KkwYdBGev7m2x8S8nhHl9/zW/qnYNvUaDLgosW47Wc6IygnAX1VHjZwwu4hO1L4Ils6VVzva8/iXrgv//e+BK1X/tuHAGcPPbxMEp89Dvo7TNaKr6Wdv0i5a50nOyKWymWT+4K3eOK38qSqVnBF32Idx1YZpxM9hVg0ve30aX/J2WoqrIWdfYzCiwr5Dt0mINfwh2a+qOrZojHfuukwx4dTIdzI6Ut/HpHYQVFqlUh6MdGzRWHKCio+NlOSQuCI+nk24qgOm1fUMjerTWvdj2MlP8EpDSp2Sm+Dx67ujeV2vlVmlw7UOruLeEPkVodk9I3qfZ+aonriuZ/2JWF4Armcb4zOLlNhnzuhZ0C45Xr2nUk/QLicoBCNKQ63/HZ+OScM6Y/Rl6jViPNUu0fnCwddWuVgd+aa++mZ3ac0Qsw+peJpLMXFoR6eZV3KSYESlZ+SN2/Uf27xhZG/jx0G59Iuk+Wre7g321crgejEYUeHJsuneLrOsty1qwzRny5yrHyplV0uy0HW2zU7+xbjrijT88PBVePmvfQw+knNQpaer1bRhGp0HV/lrUg1G/PQ6tIbTxH8T500AQJOYSPwsmoadECutyKrmkjbalVqN1FbR+qwbRRsPRqw258RrpZyugZ2S8PDwLi4DOa0E9mZxkeggC+jEXK287Quu6hxdrVJPR06rt83ei3G5LB/MqIczuziOU2onT/HnI68YbHedh4UWjXrg6k6OIVt3/XxImmD6e2GJR48n56oSs68xGFHhyWJo3l5sSO/5ydMTmfhA6OlrsFgs6JzSxK0r/YgwC1o0qb/61dOUNl5am8RoKXa9XfLyz0btfbGfE30dkxhNUBZLiY9x/F9+dTqoc5LXn09O65GM1vgBai8e5Dk9V3ZSfh16aCXhjh/UAd1bq/cmKfWM+GP2hxZ5gr4arQJ49vyOsVekuQxM1XRo0QgR4WGORHu191kcPA/04HP0pqjwMI8rTcvZq8x6C4ORAOXJuVita1AsLipCs1aBmN6rfndO/PExEfi8LqdFfEVsv1L9dtIgyfav334pHhnexelxtIIXo2v1WCwWrJ98teN3PT0jV3VOwqPXdME7Y/sbei65E+eNfcH1Tv/WO8vI/hl+em8GureKxxPXu5d/o/d5lOipk2MnPiF8ND4dH9ydrrid1ifYUhTc6KH1fXCnaJP8ij4hLtLtWWGAdhKuzSYgRmMmU1xUOLY8OVxymz9mf2jR2zOrNUxzad1QdWR4GO7UUVFZycG63twoxzCN6wTWAFj4GYB5PbdGmFkKHgjxYETrHGfz4IPROyPgoUznk7oSvfuxO+Ox7/3fAMeBQnyVbw8uushmGfzpklbIuroTlj4oTSLUuhpccNdliNGZ92InPkHo+SwsFguyru6sWJnWl9Sm6Mrp/Wzs73u/9olYNmkQrugovbLroXFVDdSezMboONhrdakv/L8B6NYqHm/puCKvsQlYNOFyzLixh6MmhxKtXIieBq+Utb4PRr8D3VvFO5X117O//UOjyJfWQd0qCJqJnhaLBc1UhhbMovctVdunHhneBVNFsws9nfpsfx6rTVC8CBIH2oESAug5hu96aoThx1VaFsRdRo/R3hbSwYgWT4Yp9N5Xb5VT3T0jbnSFix87QmGsVekK2mKxoEdr6QmktcbsoNYJsW7PsAGM91Jdb2AWkKeU3h+lbm29vVbyQFY+vOFqX9g5YwT+cqnrysFK7Zk/pj82PT4MPds0xbeTBklmkKiptgrI6NgcY69I00wibhanfYK93kAXttbzGO0dXDZpkNN99FwhNtIIKMRrQMnZbIJk6rp8leuALE9hqU3elS90J6eW7/LAsM6S4TNPpz6LCwYqXQSJP0+Nj8Kv9PS0uVNQ8GkvLsr4oUqvpr8wGFHhSVa73mBE75dS7/FJHEyIF6TS+9gWiwXfZF2JT+7NMHR19sxNPV1+2dRWjdRa+t1+BTlEYyEzJXpK7f9F5wwBV0GUPGckqXEUFk243Hk7heGcHx8Z7PL55Qd4V3GpxWJx2Tvwj8EdFIcKkhpHIbmJsSETpZPBrbJlFDonN8akYZ1VFzxsEhOBYd30JUkC2u+BN2Yj6Sm8pvU8NVYBC/9vgOLfrIIgCVblibJ6LzyM5MbIPw+jLLBgYKck/LV/Kv45Qr0+kt68IE/L5YufRymJNUYUrFg8OMOlRHovktG7WzbTmZ8jZrSAppL5Y/qjf1qi6w19iMGICnfGjP9WN+XvIYWcCrHlD9XmYWgFI/GicfuDOle3FB8gZ9yoXoBITH7wu6RtU1xmcKfUE3v1T0vEnNv6SmpEdGzRyFGHQ8mPjw7Gu3ddhlF9agOHnL/U1nd49Brt97dbK+0clXsHd8Ss0X1ctnnmqJ6S+jFK5EHGW2P6KwZeSgfgDgol9+X5MfLAQs8UZq1y8oO7tMCU67opnkyNjGvbpyreN7ij09+eHtUTw7vXD5ct/L8BaNYoCjP+7HwV17VlEzx2bVeUVtY4/U2NVjs9yfWwU8tF0NuGaqsNg1UKUllt0n1GPk3Y/rDP33wJIsIseHfcZYqPY2R1VU9P/uKXqpW/onf4RWs7Pbug+P7VCkmXS3fUrxkT78HCb5E63zaltbLk9H63HhzWWfVvaoGg2urhRgRChxyDES/K+csl+Pnfw3CjwpzyYXU7zO3p7RyrvmolfIlPKHqznMXBTXudPTsllZ6XANabXzOyd2tJfoCr7vDkJjEY2jXZcYK5bUA7bH5yOLKuVv/CAsDQi5Px3F8uwddZAxX/bq9bsnfmtfjkXvWeGT3HD3mwoHbQ0ZvLIH9H5CeSgqIKlxWBtYJc+xCC0swrI8HI7NF9sOqfQ3CzwlV3TGQ4hotyd7R6Cr+cOBAtm8YYKk+v1Up5Dk//9vqSxI3S+jy19mubICAmMswxPp/WvBHevas+4LAHm6Mva4c9T1+LoRcrn2hc7QNiHs+ME/1fvEiknN5pyVr7p56ApnaV59pWKQWOvxw+p6sdrujdI/XMktH71ZJfSPRqW3+8HHpxMr6a6HxM08pbS1MoeKnErEq1YgxGROzjb6+M7u3W/S0Wi2Tao9irt/XFO2P748kb6hO5tL6U4hOD3h1FfJ+EuEjkPjLYZfXYM6XONUiMcjcL253S34k6ho8sFgv+NqAderVNUPy7vWs3OiJc8/HW7XddzVPew6B2WNAzfBAdEYbesjbLg4b84gosvsd5GEhM80RZ954rXS0bSfgPD7OgfXP1ehkxoq5jrfwK+z5rZLxcK2iSz27SO1xplLgHZopsvR61AAKoHTKxWCzYOvUa7HpqBKIiwiTHAfFHp3VyNzJN1NOyR3p7m7wxTKO3DomrwmfeUKrzofXk/umdkSSvdyPuYQsLq92f7WXgJwyqzT3S2k9e0dEDDHi/HIU73ApG5s6di7S0NMTExCA9PR2bNm1S3Xb+/PkYNGgQmjVrhmbNmiEzM1NzezPdeXl77H7qWtzU17MxViWNoiMwrFuKZKxeaye6IFrSW2/QKp3SZkHHFo2R6qKHRKm6qb+Y9QUQB0HyL7/4PRSvgaNGb96PnqvGvTOvcw5uFE68nZIbS674U2QVSbWGaezvua9XFRVf4YoDPvnwQv0yAtrvz5zb+jrdR8m1omqxTaIjDK3fYX/f5O+nEvH5VFyhedvUa9CyqfIFycpHh6Bz3ey0mMhwRwAmLTao/OJWZA/GXy6tz3NydeJ/5qb6ITFPr3rlzyRfXqJf3b6od8Vute/MiuzBmovtKT2GL5e9/6PG82EnO71fN3mgJj4e2IPwheMuw+p/DsW/6/LZtL7LkeFhLi9eAPOn9QJuBCNLlixBdnY2pk2bhs2bN6N3794YMWIECgsLFbdfuXIlbrvtNvz000/YsGEDUlNTcc011+DEiRMeN94X/FmfX+uAIj4R6qlbAtR2+SrR+rLoOVAtmnA5msRE4KMJ0mzrfwzugI4tGuGv/d0L3vS+Lk/0U+imF19NRcqGysRfSj0HOr0zMQao5OHIF2iTU9tHxEHtqLpkXPtJWs8QglJPjTcPSC2aKPc4vTP2MslrtrfCVVAnHpfXCkYGXJSIZQ8OwrZp1yDvyeFIMJDo+fl9V2BY12TVWili4vdP/P+mGgmIasXDxLND1D66TsmN8fKt9T22bRLULzIGdmqO29Prk4XlZcSNkr/f8rV5ltSd7MLCLLqqtaodj1LiozGwo74iZf7oGdHrx73K5z4xV0Og9tWw5T154p5R+/8sFgvaNY9TXVFarGOLxpoXJ3ZBOUwza9YsTJgwAePGjUP37t0xb948xMXFYcGCBYrb//e//8X999+PPn36oGvXrnj77bdhs9mQm5vrceODnd6EL705I03jIrHy0SHYMOVqye2j+qqvi6BnrD6jY3PsmD7CqebFlOu6IfeRIW4VmgL80zOilBshPulqVY3UE4zIr/TVlgK4Pb0dnrmpJ1ZkS2fQpHfQPlGozZoQByNjMtLwyujeWP3P2iG5SI2Dz611gWO7ROfA1ZvLGFzarhkmDevsNOSZmhiH/4qCWvsBtaNCMq+Y+HNy1eXdvXU8msZGOg2BuNKzTVO8c9dlTrV1lIh7rNQqH38uWyBTLflYfBzQOmdZLBas+ddQ5D4yGImN1YcXK6trP8c1/xqK//ytj2TFa/dIG3WFrJ6MuNdPT9FB+QWAXZjFgszuynWCEhtF4b/j6/ebKEfOiPM+29dLiw32iK3vndaqDXO1xrCcnT0YUcq5yX1kMO6tSwTv2KIx7roirf5+st5uJVrHsNiocF29oJ6ulu0NhiY2V1VVIS8vD1OmTHHcFhYWhszMTGzYsEHXY5SXl6O6uhqJieoH4crKSlRWVjp+Ly4uNtJM3Twp+e4Neg+UeuuRAECawroXfxvQDh//elxx+0GdvbsMtRH+uKpR7AEQPa9WQFhZ7Xo5+vGDOuCjn4/iZFFF7WOrBFgR4WGSq1W7x67tipT4GMnwgpjaAUjcgxcZZpEMLSq9pocyO2NQ5xboW3fl9fDwzli64yQKiuu/Z3pmkehlsVjwsMqssqTG0Xj1tr6IiQhzfD6tXZThF1/9GRlh0koS90SESs+I2KWyBTLVAo1ISTCi3V77sGthSYXqNvYgOjUxzuUwrR7yJmm1Uc9sL7WTY5jGtPSfHhki6XXSKgk/rGsythw979GU5v90seFUSQ12Xqj9nn1ybwZ+PvgHpn+zy2nbznqC17qP+LP7rkDvGd9L/ybb9m8DUvHe+sMApD0qavu9Wu+pvSdNK1ixC7qckTNnzsBqtSIlRRq9pqSkID8/X9djPPbYY2jdujUyMzNVt8nJyUHTpk0dP6mpvlsl00yeTrnTS2ss11+rxCrxxxdAKSFOfNKVr4gsnjasp2ckJjIcT4iSko0GWI2iIzBxaCeXPQNycaKeEfkJQOmAHhkehn7tmzmutJrERGL26L6SbbSKdXnbjb1b6yqqZifPh9J/P+d9X09OiCvi7414HSUtaq2OkgzTuJfoKFahEEQvuedyjMloj/Gigmt6efsIofYaw8I0PlvZzfYArqrG+RhiH3Hw5Pj65xZAo/D6x66qseGugcrvnZ6hfftrbhob6dRzIz8M2mdbAtJAUHWmnsoFlX2mm/x9GDcwzWnboMwZ8cRzzz2HxYsX44svvkBMjHpxpSlTpqCoqMjxc+zYMT+20n9cJXzZq6AaKQilROtptNbJ8DV/fAHuHeJcJ0QcMMREhuPpP/fA06N6Yots2vB9Q5xraCgRL8blzd4FLeL8A/l4r1KAqTTUJx9LDoSuWiUv3tLLcYUfFRFmqGdEaciqjxdm2FzfqxXaJcbhln5tMbhLC4y/8iJJTocStROtO6tlaw3FVdQ4ByPpHZrjqT/3xBAdQwpy5VWuewjVKH2H1K7UNevHqAQjSsF//V7sWRiVFFH/SFoXGWfLKlX/ptSS52+WVrJVuiizT8kVX0ypfTtdDcPIjwfTRjrXoLIGQKlaQ8M0SUlJCA8PR0FBgeT2goICtGypfZXz0ksv4bnnnsOKFSvQq5d2WeHo6GhER3t+9RLoXE2Z++mfQ3Dy/AVJpOyOJtHuF/7xJX/0jERHOAdb8unXd2akSX5f86+h2FdYgqu76lvnRpzX0TvVvRVJtSy+53L87a2NktvuHnQR3lx9EIDzgdJisWD+mP6Y8P6vjtuUDqby4RxxTYNAYs95+G3GCIRbLIZ6RpS6sHP+0gttEvbjr5e5340fFxWBVf8c4miLuHdMrEfreOw8WTvMrNozIp7aq/PyUCv5XSsp2p3Vk7UK0sVrLKo49Ybu+D+Fnhi112gPRl68pRf2F5bit5NFWLf/D8nf7OzBmFKQbY/NPV2b7pqm9a9baxq7nt4scbJ+l5QmePW2vnhw0RYAyj2Sn9x7BUorayS9bmoBg6thGF05I8HWMxIVFYV+/fpJkk/tyagZGerFo1544QU8/fTTWL58Ofr392xV1VASHxPpcSACAO2axyFDYbgiV0c5cl8yY5xyZO/WeMRFBdfUxDjdgYjdzhkj8PO/hxkup66H0lBTUqNoJMRFIjYyXLG2zfDuKWgjysNQOtiIe0YeyuyM5gYKaZmhcXSE4dluSvkziY2iMHVkd4+/W3qCohjJcJryNuKcEb2TGrR6VbVy0fQWJhNTKmpoL0E+WKGnxb7qs1qPrtrQsP3mW/unYsqfuiEmQv2900pgtecCGo1FuqdIA45wC7C+zQn8MOFS3UNxauSVYG8Q1YlR6pFs0SQaFyU1kgSPap0ztw1IRVREGP7cR3migtL7La/mGnQ5IwCQnZ2N+fPnY+HChdi9ezfuu+8+lJWVYdy4cQCAMWPGSBJcn3/+eTz55JNYsGAB0tLSkJ+fj/z8fJSWlnrvVZBLU/7U1ek2o3kKwUo8LXTObX2R4GLRNnc0io5QLXjnDVfWDQXZDyxhYRb8/O9h2DJ1uOrJR3wQUrqCFB/oPD3YBipPF2XzlHitEbUraPGQy1odhfYAaa+qPQ8kIS4SyU2iHdNEFZ/LjZ4RpUB22YODMGlYZzz9Z+cu/41ThmHtY0NVexOaN1Le1+TBnSSQk4UWjpwRpWDEjZ6RDi0a4fMxvZEcXoOHW9YPu7SOsKJzknMScBNRj5Croe61jw11uk38+WkVXhQHnWo5XcnxMfht+gjMVilwpvT48kkR3rjo9ZThZQJHjx6N06dPY+rUqcjPz0efPn2wfPlyR1Lr0aNHESZ6A9944w1UVVXhlltukTzOtGnTMH36dM9a7yVDLm4hqYzqT91axWP3qWI8ek0XvPT97z57Hj1zzRuq98YNwCMfb9Vc9yHQvTfuMuw6VSxZLVlpCEpM3D2rOEzjRuKkL7VoEo3TJa7H340wOxh5cFhnrNitXYdCHCD8fPCs4ee4pkdLPDy8CxpFR0AQBM0eG3feD6WTYFpSI9XZUo2iIzSr7kZFhGHzk8Nx6dM/aD6vuBdMPvOxfjaNQgJr3b96q54CwI+PDAGKirCp7UmgWTMAykPb/xxxMeb8uA8zR/XEpMVbAQCDOqvXRrmkTVO0baY8o+m/49NxrrxKc8aTOGhpodFzKT++DxUtLqpU3Vi8H7x6W19cqfEa/MX4msUAsrKykJWVpfi3lStXSn4/fPiwO0/hV7f0a2taL8GyB6/E+fJqnDh/wafBiNkHZTN1Sm6Mr7KudL1hAIsID1Mtb68m3FUwIp5S6nbLvCe1WawPghFzX5naiUhMfPWrNBNGzX1DOmJ/YSn6i2ZJuRo6cuc4MLiL5wuxyelZ1mFEj5b4NK+2JIH8hKpZ9Kyua8QX8fXEoZ3wj6s64OjZcsdtWjMStRJfxYnvWr6cOBClFTVINtDz6ioXXVwCQmktNTO4FYyQ91gsFjRrFIXTpdKD8Au3aCf5GqW3wBo1HOL8gBt7t3H6u/hqKgCGjPH0qJ7467wNCAuz6CrFr4eRZFdfSGwUhWdu6onIsDDV7nzx1a+RYOSxa9WHY9ToPQ58dt8V6JOagIpqq2Yvhy9ldkvGi7f0kvQG2kU5pvaqz6Zx9cn/7bJULP7lGP50if4p5kDt90pc8l8rgdQbiaFGZn9d3TUZP+4pxB3p7RT/bg88ruqchMf/1A0Xt3RdI8VfQjoYCYAKuA7iK5bP77/CqWCSp4wUTqOGQTxMo9QNK97n/FljRE2P1k2xbdo1iAgPQ9rkpV573PFXXoS31x7y2uMZpVTsTs09V+mbTu4uvcO1FkvtFb9ZgUhtGyyq1WMjtRJYHTkj2uHI9Bt7YESPlroX5xOLi4rAhilXIzzMotkzsr/Qv7mRr99+KY78Ue4UZCy4qz++3ZGPGXU5PhaLBROuci57YCaeoQKE+CCR6IMEy1DOGQlVrgraifcJf9VHccWd2R6uPHFDdyQHSYJumkKypDfpPQ4ozaAJJJoJrDora8dEhmNo12S31yNr1TTWMXvukeFd0K1VPLZNuwYXKVTB9peYyHDF3o6ru6bgxVt7G1od2994hgoQetencFco54yEKlf1BcR/9+a6NN7g7eAhEBJ09fD1sJI4GMnUKKaodJL3JfvaLHrZZyBVa1Rg9edH/sCwzvh20iA0jY3ET48O8d8TNyA8Q8FY1rWviA8SvihAw56R0OOqZ0R84guEokdi3g6en7u5dqVZeX2FQPGPwR3Qt10CRvQwVt/GKHFCr9ZH7s9qvO0S4zSnIyuJ0lGBNRCO66Qfz1ABQpzT4YsDgZ4qfP5inw8/c1RPcxvSwD1+fTcAwP06ytoHwlLsYq0TvFuzZcjFydj91LWYOLSTVx/XW6Zc1w1f3D/Q5XRtT0VJCqxJjzOpifX1ePy5P7hTcEtrobxAygUk/QJ3ACnEiK8EfXEgMHtWgdiovm0wokdLt8dqSZ9ebROwd+a1uk5wgbYuzay/9sGUz3d4NcmO+5v0OCD/xFs0jsaxsxcA+DcYcSd52t7Do7SYpaMCa+Ac8kgHBiMBQtyl3tjEDHZ/4YnBP/ReaVcHwGwasdTEOHw4Pt3sZoSUiHBzEprd6RnRrjNS+w9jkeDCYZoAMnt0HzxxfTdJQRpfCdSxczJHoPWMkO/c1LcNuqQ0xlWdW0hu7yuqZ6GnKJm3uBP46Fm1NxB6Rlo19d0SEQ1Nw78EDyKj+joXpvKFv1zaJmDHzskcgTabhnznldF9IAgClvxyTHJ7jU3AG7dfit35Jbiio/HaG0ZNH9kd07/Zhdl/62P4vtFa5eAdFVjNi0Z+n3kdVv9+Ghl+eB8bipAORkL1WjAQVmikwFLFnpGQYrFYnGq6VNXYcN0lrXCdaEVZX7pr4EX4e3p7t2b6adUZ+W5nAQBzh2miIsKQ2d23M6MaGg7ThCB2yZOcNcByRsj35Ov2KJVW9zV3Sw44hmlkbbbaBMe6MX+UVXnWOPIrBiMhKNCmcZJ5HhzWGYmNovDA1cG7ojG5R57c7O9CZ56wB1LyNttE83r/KPXuoovkWwxGEBiJTv7EYITssod3wa+PZ2ouY04N09CuLdC9Vbzj92AKRtTqjIhrjATKEgekD4OREMQvKYmFBVBBPPKf6IhwLJs0yPG7fMgjkNUP00iPZeJ1aYIpuCIGIyGJX1Iikgum40KUSgKrtGckeF4PMRgJKZd3SAQA3DZAeVluIgpdZiSwuitSoxy8HYOR4BLSU3tDzXvjBuDg6TJ0a+W8xDQRhbagCkbqEli1Ao5gej3EYCSkxESGo3vreNcbElHICcphmhr1YRoGI8EltIdpmMdJRCFuUOckAMAd6e1Nbol+9eXg1RNYmagfXNgzQkQUwt4e2x8HCoNr+FatAqukZySIenqIwQgRUUiLjgi+4dvoyNpgpLLaKrld3BfCBNbgEtrDNHVYZYGIKHjERNZWj61wyhkRDdMwZySoMBghIqKgElsXjFTV2CQLf0p7RpgzEkwYjBARUVCxByMAUCEaqhHnjNzSv60/m0QeYjBCRERBJVq02q84GBF3jTz+p25+bBF5isEIEREFlbAwiyMguSDuGRFFI+KAhQIfPy0iIgo6sVF1SawqwzSWUFuOPciFdDAisOoZEVFQsueNXKiqnzUjPqIzFAkuIR2MEBFRcKqf3mtV/Ds7RoILgxFwpyUiCjYxjp4R8TANe7uDFYMRIiIKOrGRSgms9ZgzElwYjBARUdBxDNOoJLBScGEwQkREQSdWKRip6xthp0jwYTBCRERBJybKOWfEPk7DWCT4MBghIqKgExPhvFiefZSG+SLBh8EIEREFndiougTWKuecEYYiwSekgxEmOxERBSfmjDQsIR2MEBFRcHLUGVGYTWNh30jQYTACgJ16RETBRbHomf0/PKQHHQYjREQUdBpHRwAAyqpqHLfZK7AyFgk+DEaIiCjoNKoLRkorndemYc5I8HErGJk7dy7S0tIQExOD9PR0bNq0SXXbnTt34uabb0ZaWhosFgtmz57tbluJiIgA1PeMlFZUO27jpITgZTgYWbJkCbKzszFt2jRs3rwZvXv3xogRI1BYWKi4fXl5OTp06IDnnnsOLVu29LjBREREjmEapZ4RDtQEHcPByKxZszBhwgSMGzcO3bt3x7x58xAXF4cFCxYobn/ZZZfhxRdfxN/+9jdER0d73GAiIqLGMfZhmvqcke93FQCQzrCh4GAoGKmqqkJeXh4yMzPrHyAsDJmZmdiwYYPXGlVZWYni4mLJDxERkV3j6NrZNOJg5On/7TKrOeQhQ8HImTNnYLVakZKSIrk9JSUF+fn5XmtUTk4OmjZt6vhJTU312mOLcXiRiCg41Sew1jhm0VDwCsjZNFOmTEFRUZHj59ixY2Y3iYiIAog9Z8RqE1BZtz5NVERAntJIhwgjGyclJSE8PBwFBQWS2wsKCryanBodHe3X/BJOAyMiCi6NoupPX6WVNYiJDEejqHBUiRbOo+BhKIyMiopCv379kJub67jNZrMhNzcXGRkZXm8cERGRkrAwCxpF1eWNVNTmjcRFGbq+pgBi+JPLzs7G2LFj0b9/fwwYMACzZ89GWVkZxo0bBwAYM2YM2rRpg5ycHAC1Sa+7du1y/P/EiRPYunUrGjdujE6dOnnxpRARUShpFB2BsiqrI4k1qXEUTpy/YHKryB2Gg5HRo0fj9OnTmDp1KvLz89GnTx8sX77ckdR69OhRhIXVd7icPHkSffv2dfz+0ksv4aWXXsLgwYOxcuVKz18BERGFpMbRESgsqXQEI4M6t8C240Umt4rc4VafVlZWFrKyshT/Jg8w0tLSmOlMREReZ681Uiaa3gsAYzPam9Ec8gBTj4mIKCjZZ9SU1OWMCHUFGyyclRB0GIwQEVFQSoiLBACcL68CwLVpgllIByMcPiIiCl4JcVEAgHPltYvl2Y/o7BgJPiEdjNhxvyUiCj6JdcGIvGckjNFI0GEwQkREQck+THPW0TNSlzNiWovIXQxGiIgoKCU2kvaM2Mdp2DESfBiMEBFRUGpWN0xztqxumKbuds6mCT4MRoiIKCg1c/SM1A7T2GwcpglWDEaIiCgoNavLGTlXLu0ZYTQSfBiMEBFRULL3jJRXWVFRbXXMprEwGgk6DEaIiCgoNYmOQFRE7WnsdEmlYzZNGGORoBPS6y1f2q4Z4mMj0bxxtNlNISIigywWC5KbROP4uQsoLKmo7xlhMBJ0QjoYeeKG7mY3gYiIPJASH1MbjBRXOm7jME3w4TANEREFreQmtT3bBcUVjiU+2DMSfBiMEBFR0EqJjwEAFJZUwuZIYKVgw2CEiIiCVgtHz0h9Aiu7RoIPgxEiIgpa9T0jFaKF8kxsELklpBNYiRoqq9WK6upqs5vRIEVGRiI8PNzsZlAde85IYXElUhPjADCBNRgxGCFqQARBQH5+Ps6fP292Uxq0hIQEtGzZkmugBAB7z0h+Maf2BjMGI0QNiD0QSU5ORlxcHE+WXiYIAsrLy1FYWAgAaNWqlcktojbNYgEARReqUVJR2xvIvT74MBghaiCsVqsjEGnevLnZzWmwYmNrT36FhYVITk7mkI3JGkdHoHmjKPxRVoWjZ8sBsGckGDGBlaiBsOeIxMXFmdyShs/+HjMvJzDYc0WO/GEPRhiNBBsGI0QNDA/Evsf3OLDYg5GiC3XDNPx4gg6DESIiCmrtEmMlv3M2TfBhMEJEREGtXaJ0aJI9I8GHwQgREQW19s0bSX5nLBJ8GIwQUVARBAE1NTVOt1dVVbn1eO7ejwLHxSlNJL+zZyT4MBghItPZbDbk5OTgoosuQmxsLHr37o1PP/0UALBy5UpYLBZ8++236NevH6Kjo7F27VoMGTIEWVlZeOihh5CUlIQRI0YAAFatWoUBAwYgOjoarVq1wuTJkyXBi9L9BEHA9OnT0a5dO0RHR6N169Z48MEHTXkvyLhmjaKQEh/t+J05I8GHdUaIGjBBEHCh2mrKc8dGhuuedZKTk4MPP/wQ8+bNQ+fOnbF69WrccccdaNGihWObyZMn46WXXkKHDh3QrFkzAMDChQtx3333Yd26dQCAEydO4E9/+hPuuusuvP/++9izZw8mTJiAmJgYTJ8+3fFY8vt99tlneOWVV7B48WL06NED+fn52LZtm5feCfKHi1vGo6D4NAD2jAQjBiNEDdiFaiu6T/3OlOfe9dQIxEW5PsRUVlbi2WefxYoVK5CRkQEA6NChA9auXYs333wT99xzDwDgqaeewvDhwyX37dy5M1544QXH748//jhSU1Px2muvwWKxoGvXrjh58iQee+wxTJ06FWFhYYr3W7p0KVq2bInMzExERkaiXbt2GDBggMfvAflPt5ZNsPp3ezDCaCTYcJiGiEy1f/9+lJeXY/jw4WjcuLHj5/3338eBAwcc2/Xv39/pvv369ZP8vnv3bmRkZEhORgMHDkRpaSmOHz+uer9bb70VFy5cQIcOHTBhwgR88cUXinkpFLh6tU1w/J+hSPBhzwhRAxYbGY5dT40w7bn1KC0tBVDbO9GmTRvJ36Kjox0BSaNGjZzuq3SbHvL7paamYu/evVixYgV++OEH3H///XjxxRexatUqREZGuvUc5F/905o5/m/W0CS5j8EIUQNmsVh0DZWYqXv37oiOjsbRo0cxePBgp7+Le0dc6datGz777DMIguDoHVm3bh2aNGmCtm3bat43NjYWI0eOxMiRIzFx4kR07doVO3bswKWXXmrsBZEp7Kv3AsDWY+fNawi5JbCPUkTU4DVp0gSPPvooHn74YdhsNlx55ZUoKirCunXrEB8fj/bt2+t+rPvvvx+zZ8/GAw88gKysLOzduxfTpk1Ddna2I19EyXvvvQer1Yr09HTExcXhww8/RGxsrKHnJvP1bZeALUfPY0SPlmY3hQxiMEJEpnv66afRokUL5OTk4ODBg0hISMCll16Kf//737DZbLofp02bNli2bBn++c9/onfv3khMTMTdd9+NJ554QvN+CQkJeO6555CdnQ2r1YpLLrkE33zzDVc/DjIf3J2OHceLkH5RotlNIYMsgiAIZjfCleLiYjRt2hRFRUWIj483ryFFRcBHHwHR0UBlJfD3vwNNm5rXHiKRiooKHDp0CBdddBFiYmJc34Hcxve6AbIf35s1Axo1AsrKgHPneJz3kN7zN2fTEBERkakYjBAREZGpGIwQERGRqRiMEBERkakYjBA1MEGQkx70+B4TeZdbwcjcuXORlpaGmJgYpKenY9OmTZrbf/LJJ+jatStiYmJwySWXYNmyZW41lojU2SuFlpeXm9yShs/+HrM6K5F3GK4zsmTJEmRnZ2PevHlIT0/H7NmzMWLECOzduxfJyclO269fvx633XYbcnJycMMNN+Cjjz7CqFGjsHnzZvTs2dMrL4KIgPDwcCQkJKCwsBAAEBcXxwXDvEwQBJSXl6OwsBAJCQkID9dX8p6ItBmuM5Keno7LLrsMr732GgDAZrMhNTUVDzzwACZPnuy0/ejRo1FWVob//e9/jtsuv/xy9OnTB/PmzdP1nKwzQqSPIAjIz8/H+fPnzW5Kg5aQkICWLVsy2GtIWGfEJ/Sevw31jFRVVSEvLw9Tpkxx3BYWFobMzExs2LBB8T4bNmxAdna25LYRI0bgyy+/VH2eyspKVFZWOn4vLi420kyikGWxWNCqVSskJyejurra7OY0SJGRkewRIfIyQ8HImTNnYLVakZKSIrk9JSUFe/bsUbxPfn6+4vb5+fmqz5OTk4MZM2YYaRoRiYSHh/OESURBIyBn00yZMgVFRUWOn2PHjpndJCIiIvIRQz0jSUlJCA8PR0FBgeT2goICtGypvEpiy5YtDW0PANHR0YiOjjbSNCIiIgpShnpGoqKi0K9fP+Tm5jpus9lsyM3NRUZGhuJ9MjIyJNsDwA8//KC6PREREYUWw1N7s7OzMXbsWPTv3x8DBgzA7NmzUVZWhnHjxgEAxowZgzZt2iAnJwcAMGnSJAwePBgvv/wyrr/+eixevBi//vor3nrrLd3PaZ/wY3oia3ExcOFC7Y/9d2bTExEFP/vxHQDKy2tnTF64wOO8h+znbZcTdwU3zJkzR2jXrp0QFRUlDBgwQNi4caPjb4MHDxbGjh0r2f7jjz8WunTpIkRFRQk9evQQli5dauj5jh07JgDgD3/4wx/+8Ic/Qfhz7NgxzfO84TojZrDZbDh58iSaNGnCef1uKC4uRmpqKo4dO2ZunRbShZ9X8OBnFTz4WZlDEASUlJSgdevWCAtTzwwxPExjhrCwMLRt29bsZgS9+Ph4fgmDCD+v4MHPKnjws/K/pjqKxgXk1F4iIiIKHQxGiIiIyFQMRkJAdHQ0pk2bxtotQYKfV/DgZxU8+FkFtqBIYCUiIqKGiz0jREREZCoGI0RERGQqBiNERERkKgYjREREZCoGI0Fo9erVGDlyJFq3bg2LxYIvv/xSc/vPP/8cw4cPR4sWLRAfH4+MjAx89913km2mT58Oi8Ui+enatasPX0VoMPpZrV27FgMHDkTz5s0RGxuLrl274pVXXnHabu7cuUhLS0NMTAzS09OxadMmH72C0OGLz4rfK98w+lmJrVu3DhEREejTp4/T3/i9Mg+DkSBUVlaG3r17Y+7cubq2X716NYYPH45ly5YhLy8PQ4cOxciRI7FlyxbJdj169MCpU6ccP2vXrvVF80OK0c+qUaNGyMrKwurVq7F792488cQTeOKJJyQLSy5ZsgTZ2dmYNm0aNm/ejN69e2PEiBEoLCz01csICb74rAB+r3zB6Gdld/78eYwZMwbDhg1z+hu/VyYztGIdBRwAwhdffGH4ft27dxdmzJjh+H3atGlC7969vdcwcuLuZ3XTTTcJd9xxh+P3AQMGCBMnTnT8brVahdatWws5OTneaCYJ3vus+L3yPSOf1ejRo4UnnnhC8XPh98pc7BkJQTabDSUlJUhMTJTcvm/fPrRu3RodOnTA7bffjqNHj5rUQrLbsmUL1q9fj8GDBwMAqqqqkJeXh8zMTMc2YWFhyMzMxIYNG8xqJsH5s7Lj9yowvPvuuzh48CCmTZvm9Dd+r8zHYCQEvfTSSygtLcVf//pXx23p6el47733sHz5crzxxhs4dOgQBg0ahJKSEhNbGrratm2L6Oho9O/fHxMnTsT48eMBAGfOnIHVakVKSopk+5SUFOTn55vR1JCn9lkB/F4Fin379mHy5Mn48MMPERHhvD4sv1fmC4pVe8l7PvroI8yYMQNfffUVkpOTHbdfd911jv/36tUL6enpaN++PT7++GPcfffdZjQ1pK1ZswalpaXYuHEjJk+ejE6dOuG2224zu1mkQOuz4vfKfFarFX//+98xY8YMdOnSxezmkAoGIyFk8eLFGD9+PD755BNJd6SShIQEdOnSBfv37/dT60jsoosuAgBccsklKCgowPTp03HbbbchKSkJ4eHhKCgokGxfUFCAli1bmtHUkKf2WSnh98r/SkpK8Ouvv2LLli3IysoCUDtULQgCIiIi8P333+PKK6/k98pkHKYJEYsWLcK4ceOwaNEiXH/99S63Ly0txYEDB9CqVSs/tI602Gw2VFZWAgCioqLQr18/5ObmSv6em5uLjIwMs5pIdcSflRJ+r/wvPj4eO3bswNatWx0/9957Ly6++GJs3boV6enp/F4FAPaMBKHS0lLJldWhQ4ewdetWJCYmol27dpgyZQpOnDiB999/H0Dt0MzYsWPxn//8B+np6Y4x0NjYWDRt2hQA8Oijj2LkyJFo3749Tp48iWnTpiE8PJxDAx4y+lnNnTsX7dq1c9SiWL16NV566SU8+OCDjsfIzs7G2LFj0b9/fwwYMACzZ89GWVkZxo0b598X18D44rPi98o3jHxWYWFh6Nmzp+T+ycnJiImJkdzO75XJzJ7OQ8b99NNPAgCnn7FjxwqCIAhjx44VBg8e7Nh+8ODBmtsLQu2Ut1atWglRUVFCmzZthNGjRwv79+/37wtrgIx+Vq+++qrQo0cPIS4uToiPjxf69u0rvP7664LVapU87pw5c4R27doJUVFRwoABA4SNGzf68VU1TL74rPi98g2jn5Wc2pRrfq/MYxEEQfBb5ENEREQkw5wRIiIiMhWDESIiIjIVgxEiIiIyFYMRIiIiMhWDESIiIjIVgxEiIiIyFYMRIiIiMhWDESIiohC1evVqjBw5Eq1bt4bFYsGXX35p+DE+/vhj9OnTB3FxcWjfvj1efPFFw4/BYISIiChElZWVoXfv3pg7d65b9//2229x++23495778Vvv/2G119/Ha+88gpee+01Q4/DCqxEREQEi8WCL774AqNGjXLcVllZiccffxyLFi3C+fPn0bNnTzz//PMYMmQIAODvf/87qqur8cknnzjuM2fOHLzwwgs4evQoLBaLrudmzwgREREpysrKwoYNG7B48WJs374dt956K6699lrs27cPQG2wEhMTI7lPbGwsjh8/jiNHjuh+HgYjRERE5OTo0aN499138cknn2DQoEHo2LEjHn30UVx55ZV49913AQAjRozA559/jtzcXNhsNvz+++94+eWXAQCnTp3S/VwRPnkFREREFNR27NgBq9WKLl26SG6vrKxE8+bNAQATJkzAgQMHcMMNN6C6uhrx8fGYNGkSpk+fjrAw/f0dDEaIiIjISWlpKcLDw5GXl4fw8HDJ3xo3bgygNs/k+eefx7PPPov8/Hy0aNECubm5AIAOHTrofi4GI0REROSkb9++sFqtKCwsxKBBgzS3DQ8PR5s2bQAAixYtQkZGBlq0aKH7uRiMEBERhajS0lLs37/f8fuhQ4ewdetWJCYmokuXLrj99tsxZswYvPzyy+jbty9Onz6N3Nxc9OrVC9dffz3OnDmDTz/9FEOGDEFFRYUjx2TVqlWG2sGpvURERCFq5cqVGDp0qNPtY8eOxXvvvYfq6mrMnDkT77//Pk6cOIGkpCRcfvnlmDFjBi655BKcOXMGI0eOxI4dOyAIAjIyMvDMM88gPT3dUDsYjBAREZGpOLWXiIiITMVghIiIiEzFYISIiIhMxWCEiIiITMVghIiIiEzFYISIiIhMxWCEiIiITMVghIiIiEzFYISIiIhMxWCEiIiITMVghIiIiEzFYISIiIhM9f+EBS9SRG6h4QAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import matplotlib.pyplot as plt\n", + "plt.plot(context['index'], context['errors'], label='errors')\n", + "for anomaly in context['anomalies']:\n", + " plt.axvspan(anomaly[0], anomaly[1], color='red', alpha=0.3)\n", + "plt.legend()\n", + "plt.show()" + ] } ], "metadata": { From afd81e242a62be8b75c7746b8f83f5e2bb838c69 Mon Sep 17 00:00:00 2001 From: AllenBaranov Date: Mon, 16 Feb 2026 15:26:35 -0500 Subject: [PATCH 07/10] Updated primitive to have cleaner hyperparameters --- orion/primitives/chronos2.py | 13 ++----------- .../jsons/orion.primitives.chronos2.Chronos2.json | 4 ---- 2 files changed, 2 insertions(+), 15 deletions(-) diff --git a/orion/primitives/chronos2.py b/orion/primitives/chronos2.py index 0ff960e4..9923069f 100644 --- a/orion/primitives/chronos2.py +++ b/orion/primitives/chronos2.py @@ -20,19 +20,12 @@ class Chronos2: """Chronos2 model for timeseries forecasting. Args: - window_size (int): - Window size of each sample. Default to 256. - step (int): - Stride length between samples. Default to 1. pred_len (int): Prediction horizon length. Default to 1. repo_id (str): Directory of the model checkpoint. Default to "amazon/chronos-2" batch_size(int): Size of one batch. Default to 32. - freq (int): - Frequency. TimesFM expects a categorical indicator valued in {0, 1, 2}. - Default to 0. target (int): Index of target column in multivariate case. Default to 0. start_time (datetime): @@ -42,7 +35,6 @@ class Chronos2: """ def __init__(self, - window_size=256, pred_len=1, repo_id="amazon/chronos-2", batch_size=32, @@ -50,7 +42,6 @@ def __init__(self, start_time=pd.to_datetime("2000-01-01 00:00:00"), time_interval=600): - self.window_size = window_size self.pred_len = pred_len self.batch_size = batch_size self.target = f"{target}" @@ -65,7 +56,7 @@ def predict(self, X, force=False): Args: X (ndarray): - input timeseries. + input timeseries with shape (n_windows, window_size, n_features). Return: ndarray: forecasted timeseries. @@ -75,7 +66,7 @@ def predict(self, X, force=False): outs = [] for i in range(0, n_windows, self.batch_size): - x_batch = self.convert_to_df(X[i:i+self.batch_size, :self.window_size], start_batch_at = i) + x_batch = self.convert_to_df(X[i:i+self.batch_size], start_batch_at = i) y_batch = self.model.predict_df( df=x_batch, prediction_length=self.pred_len, diff --git a/orion/primitives/jsons/orion.primitives.chronos2.Chronos2.json b/orion/primitives/jsons/orion.primitives.chronos2.Chronos2.json index 9630e596..1fdc5a29 100644 --- a/orion/primitives/jsons/orion.primitives.chronos2.Chronos2.json +++ b/orion/primitives/jsons/orion.primitives.chronos2.Chronos2.json @@ -33,10 +33,6 @@ }, "hyperparameters": { "fixed": { - "window_size": { - "type": "int", - "default": 256 - }, "pred_len": { "type": "int", "default": 1 From 4413c8a0088225fed94b920b9f982198df1098ed Mon Sep 17 00:00:00 2001 From: AllenBaranov Date: Wed, 18 Feb 2026 14:36:38 -0500 Subject: [PATCH 08/10] Fixed lints --- orion/primitives/chronos2.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/orion/primitives/chronos2.py b/orion/primitives/chronos2.py index 9923069f..2526eee4 100644 --- a/orion/primitives/chronos2.py +++ b/orion/primitives/chronos2.py @@ -4,15 +4,14 @@ The model implementation can be found at https://huggingface.co/amazon/chronos-2 -Note: This primitive assumes that Chronos2 doesn't care about specific timestamps +Note: This primitive assumes that Chronos2 doesn't care about specific timestamps of the data. We fill in the timestamps with a linear sequence of timestamps in order for the model to work. """ -import sys -import torch import numpy as np import pandas as pd +import torch from chronos import Chronos2Pipeline @@ -66,7 +65,7 @@ def predict(self, X, force=False): outs = [] for i in range(0, n_windows, self.batch_size): - x_batch = self.convert_to_df(X[i:i+self.batch_size], start_batch_at = i) + x_batch = self.convert_to_df(X[i:i + self.batch_size], start_batch_at=i) y_batch = self.model.predict_df( df=x_batch, prediction_length=self.pred_len, @@ -75,7 +74,7 @@ def predict(self, X, force=False): timestamp_column="timestamp", target=self.target, ) - + y_batch = y_batch.sort_values(["item_id", "timestamp"]) preds = np.stack( y_batch.groupby("item_id", sort=False)["predictions"] @@ -86,10 +85,9 @@ def predict(self, X, force=False): return np.concatenate(outs, axis=0) - def convert_to_df(self, x_batch, start_batch_at=0): n_windows_in_batch, window_size, n_features = x_batch.shape - + rows = [] for window in range(n_windows_in_batch): for data_entry in range(window_size): @@ -100,4 +98,4 @@ def convert_to_df(self, x_batch, start_batch_at=0): }) rows = pd.DataFrame(rows) - return rows \ No newline at end of file + return rows From 99dd6171383cd6465e2f6ea32786c91c8c149870 Mon Sep 17 00:00:00 2001 From: AllenBaranov Date: Wed, 18 Feb 2026 17:18:08 -0500 Subject: [PATCH 09/10] Fix failed tests --- orion/primitives/chronos2.py | 1 + setup.py | 2 +- tutorials/pipelines/chronos2.ipynb | 47 ++++++++---------------------- 3 files changed, 14 insertions(+), 36 deletions(-) diff --git a/orion/primitives/chronos2.py b/orion/primitives/chronos2.py index 2526eee4..de9c4aab 100644 --- a/orion/primitives/chronos2.py +++ b/orion/primitives/chronos2.py @@ -12,6 +12,7 @@ import numpy as np import pandas as pd import torch + from chronos import Chronos2Pipeline diff --git a/setup.py b/setup.py index 427f671e..3341ecd3 100644 --- a/setup.py +++ b/setup.py @@ -49,7 +49,7 @@ "jax;python_version>='3.11'", #chronos2 - 'chronos', + 'chronos-forecasting>=2.2.0,<2.3.0', 'wrapt>=1.14,<1.15', ] diff --git a/tutorials/pipelines/chronos2.ipynb b/tutorials/pipelines/chronos2.ipynb index 5350cdfb..3fe76fab 100644 --- a/tutorials/pipelines/chronos2.ipynb +++ b/tutorials/pipelines/chronos2.ipynb @@ -234,22 +234,24 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "/home/baranov/miniconda/envs/orion310/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + "/home/baranov/miniconda/envs/test_orion_dependencies/lib/python3.11/site-packages/mlblocks/discovery.py:17: UserWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html. The pkg_resources package is slated for removal as early as 2025-11-30. Refrain from using this package or pin to Setuptools<81.\n", + " import pkg_resources\n", + "/home/baranov/miniconda/envs/test_orion_dependencies/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", " from .autonotebook import tqdm as notebook_tqdm\n", - "2026-02-16 15:13:33.647160: I tensorflow/tsl/cuda/cudart_stub.cc:28] Could not find cuda drivers on your machine, GPU will not be used.\n", - "2026-02-16 15:13:33.679684: E tensorflow/compiler/xla/stream_executor/cuda/cuda_dnn.cc:9342] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n", - "2026-02-16 15:13:33.679717: E tensorflow/compiler/xla/stream_executor/cuda/cuda_fft.cc:609] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n", - "2026-02-16 15:13:33.679750: E tensorflow/compiler/xla/stream_executor/cuda/cuda_blas.cc:1518] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n", - "2026-02-16 15:13:33.686499: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n", + "2026-02-18 17:13:34.414001: I tensorflow/tsl/cuda/cudart_stub.cc:28] Could not find cuda drivers on your machine, GPU will not be used.\n", + "2026-02-18 17:13:34.447258: E tensorflow/compiler/xla/stream_executor/cuda/cuda_dnn.cc:9342] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n", + "2026-02-18 17:13:34.447291: E tensorflow/compiler/xla/stream_executor/cuda/cuda_fft.cc:609] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n", + "2026-02-18 17:13:34.447322: E tensorflow/compiler/xla/stream_executor/cuda/cuda_blas.cc:1518] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n", + "2026-02-18 17:13:34.454352: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n", "To enable the following instructions: AVX2 AVX512F FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n", - "2026-02-16 15:13:34.373503: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n" + "2026-02-18 17:13:35.442879: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n" ] } ], @@ -820,36 +822,11 @@ "source": [ "contextual_f1_score(ground_truth, anomalies, start = start, end = end, weighted=False)" ] - }, - { - "cell_type": "code", - "execution_count": 24, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGvCAYAAACJsNWPAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjcsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvTLEjVAAAAAlwSFlzAAAPYQAAD2EBqD+naQAAbf5JREFUeJzt3Xl8E2X+B/BPerdAKaW0XIXKJZccgtSKCEgRXcXFVZd1VZCf4KpU0equsCqHovVEVkRRFEVdAe9jQVSq3CBaTrnkvtuCQE96JfP7o006M5mZzOSapPm8X6++oOkkeZJMZr7zPN/n+1gEQRBAREREZJIwsxtAREREoY3BCBEREZmKwQgRERGZisEIERERmYrBCBEREZmKwQgRERGZisEIERERmYrBCBEREZkqwuwG6GGz2XDy5Ek0adIEFovF7OYQERGRDoIgoKSkBK1bt0ZYmHr/R1AEIydPnkRqaqrZzSAiIiI3HDt2DG3btlX9e1AEI02aNAFQ+2Li4+PNa0hREfDJJ/W/33or0LSpee0hIiLvsB/fY2OB6GigshK4cIHHeQ8VFxcjNTXVcR5XExTBiH1oJj4+3txgRBCkO2p8fO0PEREFN/vxvVkzoFEjoKys9nYe573CVYoFE1iJiIjIVAxGiIiIyFQMRoiIiMhUDEaIiIjIVAxGiIiIyFQMRoiIiMhUDEaIiIjIVAxGiIiIyFQMRoiIiMhUDEaIiIjIVAxGiIiIyFQMRoiIiMhUIR2MbD9+Ho98vA0FxRVmN4WIiChkBcWqvb5y42vrAACnii7gowmXm9waIiKi0BTSPSN2B06Xmt0EIiKikMVgBIAgmN0CIiKi0MVghIiIiEzFYISIiIhMxWAEAEdpiIiIzMNghIiIiEzFYISIiIhMxWAEnE1DRERkJgYjREREZCoGI0RERGQqBiNERERkKgYjREREZCoGIwBYaYSIiMg8DEaIiIjIVAxGiIiIyFQMRsA6I0RERGZiMEJERESmYjBCREREpmIwQkRERKZiMEJERESmYjACVhkhIiIyE4MRIiIiMhWDESIiIjIVgxEAAguNEBERmYbBCBEREZmKwQgRERGZisEIERERmcqtYGTu3LlIS0tDTEwM0tPTsWnTJs3tz58/j4kTJ6JVq1aIjo5Gly5dsGzZMrcaTERERA1LhNE7LFmyBNnZ2Zg3bx7S09Mxe/ZsjBgxAnv37kVycrLT9lVVVRg+fDiSk5Px6aefok2bNjhy5AgSEhK80X6vYPoqERGReQwHI7NmzcKECRMwbtw4AMC8efOwdOlSLFiwAJMnT3bafsGCBTh79izWr1+PyMhIAEBaWppnrSYiIqIGw9AwTVVVFfLy8pCZmVn/AGFhyMzMxIYNGxTv8/XXXyMjIwMTJ05ESkoKevbsiWeffRZWq1X1eSorK1FcXCz5ISIioobJUDBy5swZWK1WpKSkSG5PSUlBfn6+4n0OHjyITz/9FFarFcuWLcOTTz6Jl19+GTNnzlR9npycHDRt2tTxk5qaaqSZhrHMCBERkXl8PpvGZrMhOTkZb731Fvr164fRo0fj8ccfx7x581TvM2XKFBQVFTl+jh075utmEhERkUkM5YwkJSUhPDwcBQUFktsLCgrQsmVLxfu0atUKkZGRCA8Pd9zWrVs35Ofno6qqClFRUU73iY6ORnR0tJGmeYQVWImIiMxjqGckKioK/fr1Q25uruM2m82G3NxcZGRkKN5n4MCB2L9/P2w2m+O233//Ha1atVIMRIiIiCi0GB6myc7Oxvz587Fw4ULs3r0b9913H8rKyhyza8aMGYMpU6Y4tr/vvvtw9uxZTJo0Cb///juWLl2KZ599FhMnTvTeq/CQxWIxuwlEREQhy/DU3tGjR+P06dOYOnUq8vPz0adPHyxfvtyR1Hr06FGEhdXHOKmpqfjuu+/w8MMPo1evXmjTpg0mTZqExx57zHuvwkMcpiEiIjKP4WAEALKyspCVlaX4t5UrVzrdlpGRgY0bN7rzVERERNTAcW0aIiIiMhWDEbAcPBERkZkYjBAREZGpGIwA7BohIiIyEYMRIiIiMhWDEQBgmREiIiLTMBgBOExDRERkIgYjREREZCoGI2DHCBERkZkYjBAREZGpGIwQERGRqRiMgAvlERERmYnBCBEREZmKwQgRERGZisEIERERmYrBCBEREZmKwQhYZ4SIiMhMDEaIiIjIVAxGiIiIyFQMRgCwzAgREZF5GIwQERGRqRiMEBERkakYjBAREZGpGIwQERGRqRiMABBYaYSIiMg0DEaIiIjIVAxGiIiIyFQMRsA6I0RERGZiMEJERESmYjBCREREpmIwAq7aS0REZCYGI0RERGQqBiNERERkKgYjREREZCoGI0RERGQqBiMAM1iJiIhMxGCEiIiITMVghIiIiEzFYARctZeIiMhMDEaIiIjIVAxGiIiIyFQMRoiIiMhUbgUjc+fORVpaGmJiYpCeno5Nmzapbvvee+/BYrFIfmJiYtxuMBERETUshoORJUuWIDs7G9OmTcPmzZvRu3dvjBgxAoWFhar3iY+Px6lTpxw/R44c8ajR3iYwf5WIiMg0hoORWbNmYcKECRg3bhy6d++OefPmIS4uDgsWLFC9j8ViQcuWLR0/KSkpHjWaiIiIGg5DwUhVVRXy8vKQmZlZ/wBhYcjMzMSGDRtU71daWor27dsjNTUVf/7zn7Fz507N56msrERxcbHkh4iIiBomQ8HImTNnYLVanXo2UlJSkJ+fr3ifiy++GAsWLMBXX32FDz/8EDabDVdccQWOHz+u+jw5OTlo2rSp4yc1NdVIMw3jKA0REZF5fD6bJiMjA2PGjEGfPn0wePBgfP7552jRogXefPNN1ftMmTIFRUVFjp9jx475uplERERkkggjGyclJSE8PBwFBQWS2wsKCtCyZUtdjxEZGYm+ffti//79qttER0cjOjraSNOIiIgoSBnqGYmKikK/fv2Qm5vruM1msyE3NxcZGRm6HsNqtWLHjh1o1aqVsZYSERFRg2SoZwQAsrOzMXbsWPTv3x8DBgzA7NmzUVZWhnHjxgEAxowZgzZt2iAnJwcA8NRTT+Hyyy9Hp06dcP78ebz44os4cuQIxo8f791XQkREREHJcDAyevRonD59GlOnTkV+fj769OmD5cuXO5Jajx49irCw+g6Xc+fOYcKECcjPz0ezZs3Qr18/rF+/Ht27d/feq/CQwEIjREREpjEcjABAVlYWsrKyFP+2cuVKye+vvPIKXnnlFXeehoiIiEIA16YhIiIiUzEYAeuMEBERmYnBCBEREZmKwQi4UB4REZGZGIwQERGRqRiMALBYzG4BERFR6GIwAg7TEBERmYnBCBEREZmKwQgRERGZisEIERERmYrBCBEREZmKwQgRERGZisEIERERmYrBCBEREZmKwQgRERGZisEIERERmYrBCBEREZmKwQgRERGZisEIERERmYrBCBEREZmKwQgRERGZisEIERERmYrBCBEREZmKwQgRERGZisEIERERmYrBCBEREZmKwQgRERGZisEIERERmYrBCBEREZmKwQgRERGZisEIERE1WLtOFuPPr63Fmn2nzW4KaWAwQkREDdb/vfcLth0vwp3vbDK7KaSBwQgRETVYp0srzW4C6cBghIiIGiyrTTC7CaQDgxEiIiIyFYMRIiIiMhWDESIiCgk2DtkELAYjREQUEkoqasxuAqlgMEJERCGhrIrBSKBiMEJERCGhxsphmkDFYISIiEJCjc1mdhNIBYMRIiIKCaw5ErjcCkbmzp2LtLQ0xMTEID09HZs26Suzu3jxYlgsFowaNcqdpyUiInJbDYORgGU4GFmyZAmys7Mxbdo0bN68Gb1798aIESNQWFioeb/Dhw/j0UcfxaBBg9xuLBERkbvYMxK4DAcjs2bNwoQJEzBu3Dh0794d8+bNQ1xcHBYsWKB6H6vVittvvx0zZsxAhw4dPGowERGRO6qtzBkJVIaCkaqqKuTl5SEzM7P+AcLCkJmZiQ0bNqje76mnnkJycjLuvvtuXc9TWVmJ4uJiyQ8REZEREz/aLPmdPSOBy1AwcubMGVitVqSkpEhuT0lJQX5+vuJ91q5di3feeQfz58/X/Tw5OTlo2rSp4yc1NdVIM4mIiLB0+ynJ79Wc2huwfDqbpqSkBHfeeSfmz5+PpKQk3febMmUKioqKHD/Hjh3zYSuJiCgUsGckcEUY2TgpKQnh4eEoKCiQ3F5QUICWLVs6bX/gwAEcPnwYI0eOdNxmq5vnHRERgb1796Jjx45O94uOjkZ0dLSRphEREWmqZp2RgGWoZyQqKgr9+vVDbm6u4zabzYbc3FxkZGQ4bd+1a1fs2LEDW7dudfzceOONGDp0KLZu3crhFyIi8hsrh2kClqGeEQDIzs7G2LFj0b9/fwwYMACzZ89GWVkZxo0bBwAYM2YM2rRpg5ycHMTExKBnz56S+yckJACA0+1ERES+xDojgctwMDJ69GicPn0aU6dORX5+Pvr06YPly5c7klqPHj2KsDAWdiUiosDCcvCBy3AwAgBZWVnIyspS/NvKlSs17/vee++585REREQeYQJr4GIXBhERhQRO7Q1cDEaIiKjBqai2Ot1m5TBNwGIwQkREDc7+wlKn29gzErgYjBARUYNjsTjfxpyRwMVghIiIGpwwhWhkX2GJCS0hPRiMEBn024kilFfVmN0MItKgFIx8uPGoCS0hPRiMEBmwbMcp3DBnLR5ctMXsphCRhjCFYRoKXAxGiAx4d90hAMCK3YUmt4SItFiUkkYoYDEYITLAAh7giILBsXPlZjeBDGAwQmQEYxGioDDu3V/MbgIZwGCEyIBQjEW+2XYS7284bHYziDw2qHOS2U0gFW6tTUMUqpQy9Bu6B+qSda/q3AJpSY1Mbg2R++JjI81uAqlgz0gQEwQW8PG3UF6Q+mx5ldlNIPKIlRVYA1YIH1qD24nzF5CR8yNeX7nf7KaElFBOYGXwS8GuhhVYAxaDkSB193u/IL+4Ai8s32t2U0JKqI3SiAMQK9cYoyDHhfICF4ORILUnn2WNzRBqtQvEnSFc14OC3U97T6OwuMLsZpACBiNEBoRWKAKIww8O01BDcP9/N5vdBFLAYITIgBDrGIFNFICwY4Qagl+PnDO7CaSAwQiRAaE2tVccjFjZM0JEPsJgJEjdfGlbs5sQkkJt8S1x/GFj1wgR+QiDkSAVEWpnxYARWu+7JBgJ4Z6Rak4lIvIpBiNBKozBiClC7W1nzgiw/sAZdH78W0z+bLvZTSFqsBiMBCn2jJgjxFJGpDkjIRqN/H3+zwCAxb8cM7klpNe5MlYLDjYMRoJUuCgYuVBlxbIdp1BSUW1iixqmr7aewLWzV+PwmTIAwNp9Z0xukX9V1tQPT4TyMA0Fl6xFnL4bbBiMBCnxFfrUr37D/f/djHs/zDOvQQ3UpMVbsSe/BP+q66Ivq7Ka3CL/euCjLY7/KwUjH248gq+2nvBnk4hcWrf/D7ObQAZx1d4gJR6m+STvOAB+AX2p+EI1KmtCKxABgA0H6/cp+SjNyfMX8MSXvwEAbuzdOuSq0xKR97BnJEidLGJJY3+yCQKOnb0guS3UKpLKp/aWVtbU/y203goi8jIGI0Fq6fZTZjchpNSu9ik944baCViewGrR+FtD0iGpkdlNIGrwGIy46d3ixpjy7f6QuzoOVbUnW4vCbaGjRrbiqXhYpiG/F1ERPEwS+Rq/ZW6acS4Ri7bmY/0B/+dpXAixJMpAUF1jQ+NoaYpVQz4BK6mRvV7x7PKGXCo+1JYAIDIDgxEPmTGddufJIr8/p6/kHTmHzFmrsPr306rbVNXYUFHt/wBMXHXzZFGF02yS8xdCq5aBPPgSTy+3WhtuMBLOmj5EPsdgxENmXBzfMm+D/5/UR+5852fsLyzFmAWbANQGd+MX/iqZLjrw+R9xyfTv/B6QLP8tX/K7PBjRCqAaohqrvGek/iRdbWu45dLZMRJ85PFj33YJprSD9GMw4qFQ66pXMn/1QTy4aIvie/H2moO47j9rcFalImK5bMjpzVUHsWJ3ASYt3gqgdsbK6ZJKVFsFHPmj3Ott1/LuukOS3+UjEdUNuDdAiTxnRPx+NNTvwT8/2YbtxxtOT6QvHDxditd+3CeZXWU2eW/WvDv6mdQS0ovBiIdYlRJ4ZtlufL3tJHJ3Fzj9bebS3dh9qhhzftyn67H2F5ZKfhef5Pz9Xourjyo9f0M9AauR54wIotlF8r8Fi7LKGry95iCOna0PdFfsKsD3O2t7xew1fEhd5qxVeOn735GzbLfZTXGQByMcagt8DEbc8PrZOMf/GYvU07oy0jvEknf0nOR38TlO6b3efvw8/vnJNhQWe7/uivwAJj/fhloRNHleiPj9CNackWeX7cbMpbtx/atrANQmh49//1fc80Eel1fQyb4f/HL4rLkNEQmXja1xLa/Ax2BEh6LyahSLDkwvnGni+L/S1frHvxzDzwd9M8tGXngqkGg1TS1okx8j5AcR6aqxzg9y42vr8EnecTzyyTbd7dTr/iEdVdsCAM8u2+PV3pGjf5Rj4HM/4p21h1xvbAKnnhHR+xGsOSP22XDFFbWBdJUoaVk+hEjatL//gl/LIMhXNecq54GPwYiC/YWl+M+KfSipqC0B3vup79Fr+veKJ55z5dKrp81Hz+Ffn23H6Lc2qj7+mn2nkTZ5qVuLrn0ZwOuA2GwCKqqtiu+T2hCLvIS4vDdC/Fhax7I1+85ggZdP4o1kU3mVDqY7Tngvn+CVFb/jxPkLePp/u7z2mN60/Ld8yXsg6RkJ4CBZi/wcteGAuPy982uqqLYi78jZoH29vqT2HRcEAaPf3IhRr6/328WU/DjCnpHAx2BEQeasVXhlxe94ZuluFBZXOm6/UG1FXkWUZNuZS6UnDvHYs5o736mdOXLHOz8bbtsGE+qa6FVSWYNe0793dHmLqR2D5Cd456ER/TkjT3n5JC5vs9Jr2OjFHrBAr2ext6AEf3ljveN38TCVfKaNLx04XYryqhpsP34e1/1njUcrKcv3N/Fik0qv6f7/bsbNb2zAvFUH3H7Ohurg6TLF28uqrNh0+Cy2HTuPUz4YTlUi/y4xZyTwMRjR8OuRc5LuPZsA3FzQUrKN/Pwo7tpVm0HiiUA7X50vr3+NP+0pRJXVhj35JU7bqQUS8hN822ax0r+Lev/9fS0qD5SUXkON1XvDE+FB8G3ccvS84/+f5dX30sln2vju+c9h2MurkPnyKoxdsAm7TxW7FdTbaQWA185e7XTbj3sKAQDvrT/s9nOGGvE77K+hGnnsEREWBF+uEMdPSINNECQ7tdHvkTdPVHa+uHr+ausJ3PLGeqck0N9OFGH5b9pr4Iint2oFX3rfu2HdUhz/t9kESQDg79L78qdTOt96c3qvBQEWabpQJkpY9tdsGnvtl5NFFSi64HmCqdYVc5lGzoi7ww1nSisx45ud2KsQsDdU0u+wv55VljMSXF+tkMRgRIMgSBMq9ZS8Fm/vi++dL3pGJi3eil+PnMPMpdKpeTfMWYt7P9yMbcfOq95XfDDfdapYdbsT5y+o/k3yeLIS41uO1c+u8ffUXsFpYTz9uTDu0PPZ1lhtXjkJe4P4YtNfORTinkpxvtFCN3sq3O2+d7f8/eTPduDddYcxQqHXpaEyI7dZ/rHKc9Mo8LgVjMydOxdpaWmIiYlBeno6Nm3apLrt559/jv79+yMhIQGNGjVCnz598MEHH7jdYH+yCYIkwN5ZUKq+sZ1oe1+cPH35pTpXrtyzIa/9IaZ2LJf3Ypw4pzMYEZcYtwn4+WD9dEF/5iUAzgdRpY8zqXG0020fbDyCP7+21vAwnZ7P9oY5a9F7xvco8NPYu5ZretQPWVb7oBdQiTjYF79b077eidMllc53cMHdnkatnpEtR89h3X7lPJZdDWgpByVZH212uk0cuG3VuLDxpkDPvyJnhoORJUuWIDs7G9OmTcPmzZvRu3dvjBgxAoWFhYrbJyYm4vHHH8eGDRuwfft2jBs3DuPGjcN3333nceO9acvRc5i/+iD2FdR3n9oEQdK9cefinYr3PXymDGfLqpB35JxsfNR5W0+vIPdo9D54Si120gqq1P4kf516cwrEJ+QamzQY9PcMBvmzjXp9ndM2HVo4Ly//5Je/YdvxIryaW1/o7cT5CxAEAWfLqnDnOz/j620nne6nlDNSbbVJTnz2fJzc3crfN3+KFjXYnc/GZhPw+Bc7sGjTUd33EfeMyE84y3ZoDykqcffkqPVyb3p9PW5/+2fF2jfh4Q37JPm/7c6fgfj48dNe/+y3HJYJPhGuN5GaNWsWJkyYgHHjxgEA5s2bh6VLl2LBggWYPHmy0/ZDhgyR/D5p0iQsXLgQa9euxYgRI9xrtQ/c9HrtLIFnRFUEbTZ9Qy3//HQbdpwoQkW1DY8M7+K4XekAPWnxFrfbuOHAH9gsSiD0NrUTitZ7oPa3GpuAiPD63+Ur3qoRn2CsVkHyu7+rfOqpuKrVJnsRuA82HsGTX/6GcQPTUG21Yc2+M1iz7wxu7N1asr385FptteGqF35CYqMoLH1wkLQtHva6lVXWOE1d1kM8RVLcAnc+m1X7TuO/P9cGIrcNaKfrPpWi4nnyi99DZ5Rnc6jxZK0jPb2eBcWVSI6Pkdwmr6MjVlVjwyOfbMOgTkn462Wpbrct0IiDaX/1brrqZbTZBNYeCTCGekaqqqqQl5eHzMzM+gcIC0NmZiY2bHC9eJsgCMjNzcXevXtx1VVXqW5XWVmJ4uJiyY8ZBEHQddA5X16NiuraK/+VosXTqhS6rpWuHPT6YZdzuXU5T5I8VV+rxkOq3afGJi1ydFlaotM2Z0ql3epjF2ySnGBqbDZJT5M4GFCrfurNJFc9D6VVedTeXnuZ7HfXHcaHG+t7AeRDG+LX98LyPfjtRBFOFVVg58lip9d1UmcOjpJ31h5Cj2nfSRYjlFu77wyeWeo8VVocdIg/e3cqsJZUGF/L5M3VBx3/16pJo4cnQ0taNTUc7VHYRusE+Pnm4/hm20n867PtbrcrEInfh6+3nUTeEd9XanU1eUYrv43MYSgYOXPmDKxWK1JSUiS3p6SkID8/X+VeQFFRERo3boyoqChcf/31mDNnDoYPH666fU5ODpo2ber4SU015yrBJug7IYmvyMQH6zvf/lky48BTeqZ+yo/HF6qs+HzzcV35C14dprEKLhdS2yLr5Vn1+2lM+XyH5D6JjerrutgDnOlf78TFTyzHrpPOBxRv9p7oCWzkz/fUN/Un8At1szHUroYHPvej5H0RXzW+vvIA3l5TX8RNPmvnjZXu17mwF1WzL0ao5I53fsZ80fOLrazrahe/dHem9np6YSrvSTL62XuSV6D2csW7jNL3Rq0WBwD81kDzSeTf/Zvf8P2q464+W38uullSUY1nl+3GDi64qMkvs2maNGmCrVu34pdffsEzzzyD7OxsrFy5UnX7KVOmoKioyPFz7NgxfzTTiVVnz0iN5IRSf5Q6WVSB9GdzvdYePd2K8pPC00t3IfvjbbhTRy2G7SfOK96udQUrn3Eibsfu/PpgQSkYcRVc1dgEdG8d7/i92mrDxU8sd9R4eGXF70730TrYG6Xn1CZ+XWWVNVggWunXfkWodlwsLKnEH2X1vUPyfW2n6OSk1Mtmlrve/QWAtL3uBIHiE8YLy42X1pevhWR0uq28YN1L3+3VfV+1YTJPpqKLaxQVB9i6OBsP/oHb3tqomcxuJ3/d+/Qk/nuZq2DEXwnXAPDid3vx1uqDGPnaWr89ZzAyFIwkJSUhPDwcBQXS4YKCggK0bNlS5V61QzmdOnVCnz598Mgjj+CWW25BTk6O6vbR0dGIj4+X/JjBvny9EfIDqjeW1bbaBNRYbbq6wuWBw//qEiV3KvQiyNmHmuSe0ViNU+14W2MTMH7hr47flQ7errrprTZBUtH294ISyUlZ6blXKKwc7K6qGtcHLPHrkm9vPzlqTR+1372kotpphVhx8KmnLf4mPunsOF5kOJgQnzBeX3kAn2/2bIVcpWRiLXeL9k8AeO2n/brvq6eIn9H4LEoUnfea/r2uas7+8re3NmLDwT9wzwe/utxW/rpX7zutvKEPuer08iRfyKhQqinjCUPBSFRUFPr164fc3PqrfZvNhtzcXGRkZOh+HJvNhspK49Pw/M0m1Ce26uXtiFsQBHT89zJ0evxbvK1j7ZX+M1dIfo/wcVlPtWAk/dlcnCqqn02gnMy7VfOxB73wEx77rH7YxvnK1/kxvXnS1rMqr7gnbLNsxWGrIOCXw2ed1i8Ss79/ikGUeBgkgHpG7MSdcG+uPognv/rN0P3lQdoxndO/1bRtFud6Iy/RM6RptFs+QjbT5qutJ3Di/AX85sX1jzyVX+R6Srn4GHihympKtVqlnpEberVy/H+yaDjY1+SfKykzfKbKzs7G/PnzsXDhQuzevRv33XcfysrKHLNrxowZgylTpji2z8nJwQ8//ICDBw9i9+7dePnll/HBBx/gjjvu8N6r8BF36oQc8OIwAeDZdNYzpZU+KUkvpvc98kbNFfkjKL014gPh8XPleHfdIZRXudc7peetn6txNV1WWYNb52mPj9t7VpQOnuKrt+oAXJhN/pl+9LPzFN2Simq8sfIAjiqM0cvjZE9rUEz8aDP+t915yrQ/id+S11dK940/SrUvwOQly6222ryiG+asDZheEj3HI/GQ3Uvf7/Vj1dV6F0RDXvdc1QEAMOe2vv5vCFjzRC/Dwcjo0aPx0ksvYerUqejTpw+2bt2K5cuXO5Jajx49ilOn6meMlJWV4f7770ePHj0wcOBAfPbZZ/jwww8xfvx4770KH/HXCpNaPGnB1S+tVP3bhSorPss77jSjBQCO/FGG4bNWqd7PnfZ5Y0qf/OSnFOCIg5EbX1uHGd/swrMaw0xy246dd8wy0RNAaQWfvxw+p/o3u3N1waLSAeuk6CpU3jPSJiFWvrnfKb07C9YewpXP/+g4eT71zS48v3wP/qSweKJ8+uXq31135w+4yHlWlljWR+5PnfcGrTwaV3vTxS2bSH4XDwHuDpDZH3q+E+Lh5PVeXtjTZhOQd+Scy2EWccXnR6+5GIDz/uZp3aLKGiu+3HLC5VA+q7/qY7zQAICsrCxkZWUp/k2emDpz5kzMnDnTnacxnRkRvd3uU8XIL65A//bN3Lp/cUU1ijVyMp5dthsfbDyCzsmNJbfnHTmHm99QHpp6NXcfZv3wO9696zIM7ZoMQH+Snjd6Ro6elXbjKz2k+Phi7xVaY2BV1z/PrS1s1rZZrOHPv31zYzkLQO2smLm3X6oj4U7amCs7JRl+Lm9T+uztKyfnfLsbr9/eDxsP1Z6MlHKntGpuqLn8okRsOuT7qaHuEr8j8hwvV5+x8yKRomRYj1vmPvH6VHrO39Wi8bsIL9fyWLDuEGYu3Y0rOyXhw/Hpuu6j9rZXW20IDwtX/qMOr+buw9yfDqBts1isfexq1e04SqOPW8FIqPD3Wihi1/3H+UpSr7NlVbj06R80t/m27gCzT5YdrxaIAMCsH2pnrzz+xQ6snzIMgP6AzRvVU7+RVS1dpXAlfXmH5l557v2FpaqBlsWi/LrduQCyz5pwtUaKfJaUagKlB8WcBEHAqNfXIyE2Utf2lRr5OcUXaoMPrROwO+vCBHqhKkntFYPHD/nm4vs/u2w3LktLlEx194d1+8/g3g/rS7zrml0oCsK8/Xl9sPEIAGCtSrl9IzwtA/D9zto8r+Mucp1OnvfN0g1fbjmB/YWleOSaLg2i94UL5WnQWrXTCH/PhFh/wPUX1cg45p2Xt5f8Lv4O6z3e+qt6qlIA4VapckH9KlDtas+d2NW+b7i6gqyxCpKuaaUTXd6Rc+g943un3I28I+fw8S+up8evP/AHth07rxjkyf1eUKKZgGw/WWj1frjzfunpTbGv7GsGQfRVdxqmcfGCnXOi6m858kc5Ln36B4xdsMnQ8LGeJGwtO2TJs4JQO4yrRRw4e7tXwJ36IJEqSfyeJoXrPYbuP+3Z1ObTJZV4ZukuHJA9zkNLtuK1n/Zj48HA7Sk0gsGIH3R54ltcMu07vwUlek6+Rq5K5duKD5L+TGDVQ+m1uxOMWG3qNWbk74d90Tp3qr/apyq7+jyqrDb844M8x+9KT/Xgoi0oqazBv7+QzhS4+Y31+Ndn27Hx4B+asyFuf9t1LRq7V35wrvGiROvK2J2S9nrWdrn3wzyX2/hCtdWGnw/V50iMv/Iiyd+VdsP8ogrH/umUE6Vwh1W/n8Z2nbNr8o6cw8VPLMdDHixBofTxDX5xpeZ9xD0j8qRcO3e+K0r5bZ6QD30aJd63J7yvPuXZ3ZWh7bI/3or5aw7hxjn1dUp+PVwfgJz2wvtyprTSr7VXlDAY8ZOSyhq8t9711Fwj1K6mvR2MyGc5SHpGdD6G1SZg1g+/4+Y31vt0jn+1rAw94HyQX3/gDN5ec1DzgCgIgmpCpfzq/I66k7g7hzY9tUiA2gP89uPn6++n0HZXj3HgdCkuz/FOET6968Bo9WS4E6BaEDjd0fL957lv9+AeUcDYXLais/w9W7f/DC7PycXdC2uLyK3cI11ETu3coPeK3j7k+uVW92cY6bn6j4mUnkb0VOOVH6LOlVXh7TUHUViiHizLyxZ4yp2qwWLiDpcfdhUonsx3nSw2dBFaUFzhFIRuratUbe+p319YiltEs/Q8nWhx+EwZ+s9cgesVksz9icGIHz27bI/TbZ7sSGrHCT1DIkYSy+TBiOBGz4jVJuDV3H3IO3IOn29WXxPFXfYD4p5Txbg8J9cxtgw4vx9/n/8zZi7dLVlHSM4mAD/tVf67fPjOnnfjzsn15n5tAegJRmySbZQ+YvHf1yok7T7+hbE6IFr2uCjkZG+KdsE3d4bPzJ/hZpf10RbkfLsbM+uSdt+R1QGSf7eXyqYd23uiVtbtZws3HJH8XU9hNV/Tk4sgDzjF3zd57Z36baQn6IeWbMXMpbtx14Jf3Giluq6yGUqSNnjYMyJ/3UoXgfJZZFr7/IpdBUh/NhcPLdkqub1Elvy9J186s0rpeX87UYQpn++QzCpSs6wuf/B3EyrlijEYMdk20dWu3U979C2zrXaVqKdnxJNiaOJH13tuEHeJllfVeHVBO6B+XPj1lQdQUFyJJ7+sP/GqvR9KtS9c3UeLOy8ppm5pY1fBSLVNkAUj0iebt+qA5Mr7Dh3l/33JJgBFF6o1FyTz8MLUdEt3nMKbqw7i7bWHJDNO7KplL9BokqFat7mefXOawQJ0alxds/x88A+n4FzPSV6+jT1PyVsL2CU3qe2Veiizs3ob6t7HovJq5O5W7tnQIh+C1HMRqLWJvQLw19u0e7LkvVVKw52TFm/Bok1HFRe7lHNnVpsvhHQwclGS8amYnmiukAkv34GPnS3HuPd0Xh34qWdETvyl1VsuX3wlNG/VQbdO3M3i1Gd5REeo78pqvU/yg7r4tbhzBe7OfWp0D9PYJAcN8Ws6X16F57517nUz2191FnzT67cTRVi0ybmwWiAQzzixk0/tNXrMVzupv/DdHsm6RUrkvSzucnUoGf3WRqfb7Pt03pFzqsciXye0J8fXBiPREepTd+3DXX+bvxF3L/wV8zYaW45AfgzVM3ymFPB88usx/G/7SV0LR37081Hc/1/pvqZ0fLPXP1r+W77L3ndPc1q8JaSDEX9/Bn8oVEOVT1c1sjS82gnYquNL4ckOKF5T5q53N+m6j/jAeqa00q38imYa0xqjNHp67Ce94opqvPhd/UlbHIwcP1eOy55Zofg3vdwJsOxBmqv7VlttGHxxC8fvP+yqLx9/wY/rbBixt0B7KMdo79gNc9a6nEZpV1xRjReW71EspS4IAh75eJuh53bH8p31s3qO/FGGd9cdNnR/ec+K3Zaj53H9q2txtqwKX2454bMcrK+2nnCs8GyE/aSsVSbAG1P9tTh2LY3DnL231l5Q7ovfnIdlc8tjcNUbvyLviPOMFXdWjZZvc7qkEv/8dDuyPtqi65goT04HtL//NqE22BKbk7sPaZOX4s1VtSt/MxgJAIEwN/t92RWMkR3jio7ONTUAfV8KPQvn6aFVa0JMfkVwqsj4OiRaK/JGafaM1P6bs2w35v50oP520clQPjTmzswnt4KRugOiqxNztVVAu8T6nrwam4C31xwEoL3PbFEZs5f7YMNhXdt5ky/PR2Pe2YTXVx7ADXOcV0o9WVSBzzxclE8P8XcsRyFfzBVX5fEzcmrzC55ZWlth2GYTsGbfaZwv984SEK7WjlKjZ5aKvBfBnUPxLo1jmP3rpPWw//feL5KEWft3sNQKvHYMOFBhwd2nk3H0fAXufMf5okv+vdMzPCXvLSsT5YNsqUtUNcpVEUD531+umwmXU9ebymAkAATGRyA9UestEnTXFWmqeR/y+eje4OkBTn6Amv618SsuLVrBSJXVhk2HzmLRJmmtjV+P1J+o5YGp3iDLbsWuAreWCLf3jLg6MStl/s+sOwmpXVJZbQIeWKQ9rbO4ohonz1/Ak1/t1Nzu6VE9tRuowz2y6Y++TEbdrpCLBdSu97M33zuBuBHu1PvQCr5rH7N2n/h620mcLqnEnB/34853NuHG19a51UYjjv5RjrTJSxX/ZnT9GsB13oJS78/L3+9V3d7RMaLxuPnFFbh9fn1u1aFzFThQHYHpJ2Lw0tEwDNtTX526XKHmlFMwoiMJSt7bpScQiHQxnf1bD+vqBMraOSFdgdWTz2BkXBm+KfdOzsn3Owtwfd2KknpzOQRBUP0Cy0+63tDnKWlF14XrD2ODgXUn5FdC57x09WanVtjI7q9vOucviIc65LR6l9Kax+HFW3tLFsEbr1FnQIv9eWb9oH5gBWqDOfkUSgBYuv0UJn7knK9Qex+by96a8korPvnVdS/BkC4tXG7jyve7ClBeVYO4qNrDjvwtdnXQNULt4/v7/I3Y7OYVqCfUZmZ5Q2WNVTLEeNQPi+pd9eJPqn9TG14SkwcsYWEW1Q+tuKIalz7lXFFaa/FIey+Hqz1KXoH6lvwUnLPpKxHvNEzjRuKunmCk9nm0H3vK59tx+Ew5Phyfbringz0jAcCTmgX/F689Hm7EWdGJWW+UahUEU3eiaV/vlIyJuyI/cOQd0Td8oJdWz4g7tKqQTr6uK0o11v0xwn5wcrWoXo1VwNLtzjM21AIRoHY82tU+YhMEXUuce2uMv6K6/kQlT6xLbRbnlefQYkYgoocnU/zF76kaX+doiOk5KcuHbbV6RtbvP6N4cXDojOseYKMXnK4CkdqF+s7iQpXVOYFVRxA2/eudkl4ePcdwPReoizYdw4aDf7h1XGUwEgA86RkJA/DxHZd4pR3i/AS9O4bVFjjda3p4WnrZFVc9I2pyvt2N/20/6bQvaK2S2jg6UrHb1h16ZxWcKrogGVbSY9ALP7m8SrbaBF09Eu5US1WiVb23SmEf2ZtfguGzVmHZDudAzCi1mhe+pjWcYOfr2SW3zFtfN0RVgpK69ZB8xapnuEIUsCxcf1g1CfObbScVZyoB0KwmXJ8z4t1j5LvrD+PmNzZg3HubFIZpXH+Gy3fmY27dFF5xO7UYWd9HbXpyscZnLg4EvV1ywYgQD0bc31ErBQsGpDb1SjvEwYjeAEMQBHhQKsTvzpX79gCoNbVXy5urDhpedj7M4r2eGL1BmtJMLG+wCYKufc5mE9AlpbHL7fQ8Tv1zS/+mdEX90JKt2FdY6jSd0R13GCh3701zftzvchsjs+jcseXoefzjgzyMmL0aV7+8yqfPpSeBdYloraRpX6vnK2nlPGk9j1A3rCHftYdc7Nlw439/rp1wsPHgWbcSWIHaWkgAsHJvIf5PRxmHEgO9sGoBkVZyrPh1+GsNMSVBdDrzPk9iZu900tcSJ7cJOie92gQBo/q28WIrgpu7PSN2Rq6gLBaL28GPnN7uc1/1gel9/rjoCHxwt74l2zWfT6NnROmq7kKV975p3urNMmKfi+nNdgvWHfJxS+qHHvXWBtIyVOOkrme44sc96vla3qA2m6Z1QqxHjxspWmvHnaJnQP137q53f1Et8rbrZDFGzlmruiSFGrXhvuq6C97/rNjn9DdxhVdPq9J6IrSDEQ+O8Pad/Z2x/T1uR2xk/Til3qqUVhtwRcckxb9d0ka5x0bcBdevfTP9DfSB3qkJXn08T4c99QaBQO1+42nwY1djE/C+CdNq7WyCgLTm2onYl7RpijYJsUiJj/H4+X7YVeAYM5cfOJWGabwxFKl3UT81S+653O373r1QX2KzfIq/r3m6nonWiub6ckZ8XGfE/h/Z7nPzpa4v4BqHqbdNnF8lz3MZNXedZlK8Ef/48FfsOFGEMQv01XGyK6+y4s9znWdTVdbYYLUJeGWF9LsgCIJjOQNAX/KxrzAYcZO1bi8f1i3F43Z0FnV/6z0p2gOLq7smA6it7nqVixkP4sjd2wmkRukZVzbivMYwUFyU6+x4I+u2hFksijNb3LGvsBRTXUyrBXw3DbaqRkB8rHplWwAYNzDNa8839audmP71TixcfxiTP5cWcFI8iXmhS+g/ufs8SuBM76Bcz0cPd+rp+IOnJx2t2hZ6egjcmepsRP1sGukO1K99Ito3106ULrWp73TiZFKl/D6t1XuN0DqeaVm24xS2KdSnOX+hClc+/6PT7cNeXiUpY8CeEZN4ctXlza+S+CpB7znH3t390q298eDVnfD5/Vcgqi5q36GyxLiZO5qcnhkARmjNCPH2TIIwC9DHSz07erthq2p889l9seW4y0DH23nSi385ppgnIB+mEQTBZa0NvcxaHt2fs1iM8OWxQE8e1JlS3+RA2dXXGXH+W/EF9070s374HduO1x9bfTkLxUieiJhafaRlO07hlELC70HZStKermTsiZAORjzZlbx5jBEfsPQGI/a7JDaKQvY1F6N980Yuhw7M7IKT21/o3cJs4qEuOW/3Klgs/q/eqzSE4Q3z1xxyHYz4qTxgjU2QDCVqraps1IjZq732WEYEaCyiWkJeEASPk2k9TYLUWzVYk0YFVneT6V/NleZbeLK+l+8ov/ebj5zXdW/2jAShGi8eoMVXbXpPnErbuVqJV16KuCHp0Vp9ZpO3r07N+MJWu1GeXi9Xu5wvK6XKiXsJD3mpVwQAjmis0KxlwV21OWHeLMgWCD7NUy509+J3e3HFcz+qVlfVw9Pvx02vq69pI9a1ZRPVv+mpwOopPVNu2zbzLGHWKLWvqt71qxiMmMWDHdWb5zfxDqA7GFFogKsDZiD1jHjToM5JmgcGb1+dmjErw5fDDK52OV8PNQzvXp93JVkaIQDO//Extfk0WgsxBqP8YuUaHfZpp5747WSRx/Uqvt52EmdKtWf9aF2AOHJGfLgPuSphD7hfcsBdRpexkGMCq0k8Odilx3g+Pc7OPk63aNNR3VcFSkGLqwOmOOjpb/JsGm9KTYzz64nLyMwbJeKTr15qeUDe4CoA9nXPiPiALQ5GAmEhS3sTojWGAYORpyctLV9tPYmvZauRK9FabfjBRVuQkZOreX+tAN3RM+KyFe7TcxxQ++p8Z6B6tRHlHk6FNzPHKaSDEXd31O0dC5EY7r0vs32MdcrnzstDq1H6HrrKGXGnB0avkb1bo11iHO66Is2rj6tHTES4T6vRNo6WLuHkzsWDOFB87i/GK/cWeqE2hBpX+4Kvcz8jw8McJ33xMI2RANNX3eH2t6ah9YxUVtt8WhVZvuLvPVd1cNrm3XWHNR/D1fRfzWDEnjPiw+OCnvO22nfrHx/kebk1tTw9qpuV6A2EejDi5o4aH679kWuNZSq5UGXFM0uNrWKr1A3qao0RcRect+PfhzM7Y/W/huK2Ae0M31cr+VSPUX1b+7Rn5KIkaR0OV++dUteseAiteeNo3HG5+vvUxsPCTEZVubhKFhcqa94oSvK33m09r0IcZrE4iknp7Rn58ZHBkt8//keGx+1QsrVummRkhPm9NN60Zt9pdJ/2HT7Y6J/6Jn8ozJ7xNIldu2fE98M0eq7n/N3RoFVpVQ/mjJhEaz/15ErL6JSv//58FPPXGKvAqLRWiJFhGm9/SewnDj2Lrsn96ZJWHj13r7YJfk1Us9dIefeuyxS3V3prI2UByq39UlWfz58JowCQ8+0ezb+L85PkZem9URY/zFIfrNVIekbUP9MOLepr84zJaI/WCbH4OmugW8+vdfFgn8WkJz/Abuaonm61w58KSypRVWPDk1/qr6/jiYoaK164pZfkNk/XCqpSOXHmF1Xg2NnaGUE+HabR+J5abQLueneTX1ZQ1qNbq3hd27EcvEm0DnZXdJQWOtrz9LW4vmsSXm7+h9fb4c4Oe+fl7Z1uczlMI+4Z8fIJz36+FpdL1kvehdu3XYLk97EZzq/V+fl9d9jJGtpJ8rv9Czu0ruCcnFJPg3waoNZn5e8DwqEz2rNWxO2RB7wRbnzecuFhFsdMMPEUZr1X7fZx7l5tE/AXHRU25QZrlDa3f5ZGFivz9grSDYHNJuCv/VMllZdd7XeuVKkUThNX3PXtMI3693TNvtNYudd7U9M9ped437ppjFfWn3JXaH9rNPZTmwCkiSr1xUSGY+5NXXFzY9dfoKYuKlrK6dlRnh7VEzf1bYOHMjvji/uvUKz8aiRnxNsX3/ZaFO70jFzcsgk2/XuY43d5V6GeE4Evh2mGd0/Bxin17evaUt9Vhpi8tyxKo9vfnROqL4l7RuQnWlczHvSwWCyOfdfe9b7zZJHmysmS9ol2ZneCYa372AMdIz0jDS2/xBtqHO+j9x5TLadE3Gvsy54RtZL4b60+EFAFJgF9uSB92zVDkxhj5y5vCulvjdaOKgjALf3auvW4TWIiXG7TXWe3mV1Gh0S8MroPHsrsgr7tlGfCuAoEHv54q+PgqhTVi4Mvo1PS7Mdqd4IRAJKS5M6VOKXbKg2h+bIaIgC0bBqDNf8ais/uy0Cn5PqrB3lyq5L/PXCl08lMrUfhrivS8Mjwiz1rrJeJD+7y6eP7vFC8LswCR/XgGquA7cfP4/pX1+q+vzinx539T+s+GXWl4OX712Vp6rPR3P0OmElrZoseSgmqYvZkWfn7mK9QFVQvtROseB/1Zc7I0u2nFG9/dtkehAfYPnBAR80eX86w0iOkgxEtAgSEu9kFrWd6VKIoEVBPr7yerntXV2QHT5dh+W/5EARB8Tlf+/uljv93bGGsu87ee+HOlSkgHcaQB0LyniOlXh1/TANNTYxDv/aJkttiXax7k9wkGj3bNHU6OMlzSOyu6ZEScN384v05Lsp18GVUmEU6THPja84LfSn57L4MZA3thLGiGVzuLGCodGGQPbwLZo7qiYy64Vr5MKBWrRlvLaLoLwdPl6Lrk8sx+bPtbj/GY9d21fy7vRdDHox4soihWuK1ONBXOha/eWc/t59Tr8Cszqrtl8Pq6w35Q3B9a7xMK89AEAC9x5SEOGnXlp7AQc/ibZLH1NHtp6dK5MSPNuOeD/IUh4bEJ0GjFSftWxsZWxcTV4+VL0xmE4AXRclvSr06Zn33XR107LuCvGckUuV+RoYDPGFkBpN4mGbyddKTjjeaK0Bw7G9Gphb2a5+IR0dcjOiI+tfizkmgaWyk02ycvu0ScMfl7VUTs3eeVB9CCrZhmjdXHQRQu2aQu8LDLEhNVE/6t+8n8mBkya/Gn9O+76rtK+LnUJqxM6JHS8PPaZSve2p9ocjNNXu8Jbi+NV6mdSC9tF2CIy8jqXG05uP874Er8fifujl+t9oETB/ZXfM+/7rWWFd8Zx2JRWrl4OUH6B92FSj2Loiv6FobnF5qD+w8ORAvuedy/GNwB9x95UWS222CgFv7188+ES8ildktWfL8/qbWJf/iLb3QJCYCr99e29skD9LUrp79dRAzMpQgHqaRD5F5I/do6fZTjvfD07F2raXt1VTU2HBxinRGjfyCQp7ErCXYeka8NaQw9YYeqn9zJAJ74Xv64fh0AOo5I+Ljnb9nptm9sHyvz5/D1SrtRjXRMeTsS8H1rfGjoV2T0bFFY6ybfDXW/Guo5rZtm8VhgmjM1GoTcNfAi5D7yGAM65qML+6/wuk+LZvqP9k/95dLJFd/atQCAaUls+Vf0txHBku+xA8O64wbeumfcms/xsRGhUsCMyPSOzTHlOu6OfUayTuaSitrsCL7KkwYdBGev7m2x8S8nhHl9/zW/qnYNvUaDLgosW47Wc6IygnAX1VHjZwwu4hO1L4Ils6VVzva8/iXrgv//e+BK1X/tuHAGcPPbxMEp89Dvo7TNaKr6Wdv0i5a50nOyKWymWT+4K3eOK38qSqVnBF32Idx1YZpxM9hVg0ve30aX/J2WoqrIWdfYzCiwr5Dt0mINfwh2a+qOrZojHfuukwx4dTIdzI6Ut/HpHYQVFqlUh6MdGzRWHKCio+NlOSQuCI+nk24qgOm1fUMjerTWvdj2MlP8EpDSp2Sm+Dx67ujeV2vlVmlw7UOruLeEPkVodk9I3qfZ+aonriuZ/2JWF4Armcb4zOLlNhnzuhZ0C45Xr2nUk/QLicoBCNKQ63/HZ+OScM6Y/Rl6jViPNUu0fnCwddWuVgd+aa++mZ3ac0Qsw+peJpLMXFoR6eZV3KSYESlZ+SN2/Uf27xhZG/jx0G59Iuk+Wre7g321crgejEYUeHJsuneLrOsty1qwzRny5yrHyplV0uy0HW2zU7+xbjrijT88PBVePmvfQw+knNQpaer1bRhGp0HV/lrUg1G/PQ6tIbTxH8T500AQJOYSPwsmoadECutyKrmkjbalVqN1FbR+qwbRRsPRqw258RrpZyugZ2S8PDwLi4DOa0E9mZxkeggC+jEXK287Quu6hxdrVJPR06rt83ei3G5LB/MqIczuziOU2onT/HnI68YbHedh4UWjXrg6k6OIVt3/XxImmD6e2GJR48n56oSs68xGFHhyWJo3l5sSO/5ydMTmfhA6OlrsFgs6JzSxK0r/YgwC1o0qb/61dOUNl5am8RoKXa9XfLyz0btfbGfE30dkxhNUBZLiY9x/F9+dTqoc5LXn09O65GM1vgBai8e5Dk9V3ZSfh16aCXhjh/UAd1bq/cmKfWM+GP2hxZ5gr4arQJ49vyOsVekuQxM1XRo0QgR4WGORHu191kcPA/04HP0pqjwMI8rTcvZq8x6C4ORAOXJuVita1AsLipCs1aBmN6rfndO/PExEfi8LqdFfEVsv1L9dtIgyfav334pHhnexelxtIIXo2v1WCwWrJ98teN3PT0jV3VOwqPXdME7Y/sbei65E+eNfcH1Tv/WO8vI/hl+em8GureKxxPXu5d/o/d5lOipk2MnPiF8ND4dH9ydrrid1ifYUhTc6KH1fXCnaJP8ij4hLtLtWWGAdhKuzSYgRmMmU1xUOLY8OVxymz9mf2jR2zOrNUxzad1QdWR4GO7UUVFZycG63twoxzCN6wTWAFj4GYB5PbdGmFkKHgjxYETrHGfz4IPROyPgoUznk7oSvfuxO+Ox7/3fAMeBQnyVbw8uushmGfzpklbIuroTlj4oTSLUuhpccNdliNGZ92InPkHo+SwsFguyru6sWJnWl9Sm6Mrp/Wzs73u/9olYNmkQrugovbLroXFVDdSezMboONhrdakv/L8B6NYqHm/puCKvsQlYNOFyzLixh6MmhxKtXIieBq+Utb4PRr8D3VvFO5X117O//UOjyJfWQd0qCJqJnhaLBc1UhhbMovctVdunHhneBVNFsws9nfpsfx6rTVC8CBIH2oESAug5hu96aoThx1VaFsRdRo/R3hbSwYgWT4Yp9N5Xb5VT3T0jbnSFix87QmGsVekK2mKxoEdr6QmktcbsoNYJsW7PsAGM91Jdb2AWkKeU3h+lbm29vVbyQFY+vOFqX9g5YwT+cqnrysFK7Zk/pj82PT4MPds0xbeTBklmkKiptgrI6NgcY69I00wibhanfYK93kAXttbzGO0dXDZpkNN99FwhNtIIKMRrQMnZbIJk6rp8leuALE9hqU3elS90J6eW7/LAsM6S4TNPpz6LCwYqXQSJP0+Nj8Kv9PS0uVNQ8GkvLsr4oUqvpr8wGFHhSVa73mBE75dS7/FJHEyIF6TS+9gWiwXfZF2JT+7NMHR19sxNPV1+2dRWjdRa+t1+BTlEYyEzJXpK7f9F5wwBV0GUPGckqXEUFk243Hk7heGcHx8Z7PL55Qd4V3GpxWJx2Tvwj8EdFIcKkhpHIbmJsSETpZPBrbJlFDonN8akYZ1VFzxsEhOBYd30JUkC2u+BN2Yj6Sm8pvU8NVYBC/9vgOLfrIIgCVblibJ6LzyM5MbIPw+jLLBgYKck/LV/Kv45Qr0+kt68IE/L5YufRymJNUYUrFg8OMOlRHovktG7WzbTmZ8jZrSAppL5Y/qjf1qi6w19iMGICnfGjP9WN+XvIYWcCrHlD9XmYWgFI/GicfuDOle3FB8gZ9yoXoBITH7wu6RtU1xmcKfUE3v1T0vEnNv6SmpEdGzRyFGHQ8mPjw7Gu3ddhlF9agOHnL/U1nd49Brt97dbK+0clXsHd8Ss0X1ctnnmqJ6S+jFK5EHGW2P6KwZeSgfgDgol9+X5MfLAQs8UZq1y8oO7tMCU67opnkyNjGvbpyreN7ij09+eHtUTw7vXD5ct/L8BaNYoCjP+7HwV17VlEzx2bVeUVtY4/U2NVjs9yfWwU8tF0NuGaqsNg1UKUllt0n1GPk3Y/rDP33wJIsIseHfcZYqPY2R1VU9P/uKXqpW/onf4RWs7Pbug+P7VCkmXS3fUrxkT78HCb5E63zaltbLk9H63HhzWWfVvaoGg2urhRgRChxyDES/K+csl+Pnfw3CjwpzyYXU7zO3p7RyrvmolfIlPKHqznMXBTXudPTsllZ6XANabXzOyd2tJfoCr7vDkJjEY2jXZcYK5bUA7bH5yOLKuVv/CAsDQi5Px3F8uwddZAxX/bq9bsnfmtfjkXvWeGT3HD3mwoHbQ0ZvLIH9H5CeSgqIKlxWBtYJc+xCC0swrI8HI7NF9sOqfQ3CzwlV3TGQ4hotyd7R6Cr+cOBAtm8YYKk+v1Up5Dk//9vqSxI3S+jy19mubICAmMswxPp/WvBHevas+4LAHm6Mva4c9T1+LoRcrn2hc7QNiHs+ME/1fvEiknN5pyVr7p56ApnaV59pWKQWOvxw+p6sdrujdI/XMktH71ZJfSPRqW3+8HHpxMr6a6HxM08pbS1MoeKnErEq1YgxGROzjb6+M7u3W/S0Wi2Tao9irt/XFO2P748kb6hO5tL6U4hOD3h1FfJ+EuEjkPjLYZfXYM6XONUiMcjcL253S34k6ho8sFgv+NqAderVNUPy7vWs3OiJc8/HW7XddzVPew6B2WNAzfBAdEYbesjbLg4b84gosvsd5GEhM80RZ954rXS0bSfgPD7OgfXP1ehkxoq5jrfwK+z5rZLxcK2iSz27SO1xplLgHZopsvR61AAKoHTKxWCzYOvUa7HpqBKIiwiTHAfFHp3VyNzJN1NOyR3p7m7wxTKO3DomrwmfeUKrzofXk/umdkSSvdyPuYQsLq92f7WXgJwyqzT3S2k9e0dEDDHi/HIU73ApG5s6di7S0NMTExCA9PR2bNm1S3Xb+/PkYNGgQmjVrhmbNmiEzM1NzezPdeXl77H7qWtzU17MxViWNoiMwrFuKZKxeaye6IFrSW2/QKp3SZkHHFo2R6qKHRKm6qb+Y9QUQB0HyL7/4PRSvgaNGb96PnqvGvTOvcw5uFE68nZIbS674U2QVSbWGaezvua9XFRVf4YoDPvnwQv0yAtrvz5zb+jrdR8m1omqxTaIjDK3fYX/f5O+nEvH5VFyhedvUa9CyqfIFycpHh6Bz3ey0mMhwRwAmLTao/OJWZA/GXy6tz3NydeJ/5qb6ITFPr3rlzyRfXqJf3b6od8Vute/MiuzBmovtKT2GL5e9/6PG82EnO71fN3mgJj4e2IPwheMuw+p/DsW/6/LZtL7LkeFhLi9eAPOn9QJuBCNLlixBdnY2pk2bhs2bN6N3794YMWIECgsLFbdfuXIlbrvtNvz000/YsGEDUlNTcc011+DEiRMeN94X/FmfX+uAIj4R6qlbAtR2+SrR+rLoOVAtmnA5msRE4KMJ0mzrfwzugI4tGuGv/d0L3vS+Lk/0U+imF19NRcqGysRfSj0HOr0zMQao5OHIF2iTU9tHxEHtqLpkXPtJWs8QglJPjTcPSC2aKPc4vTP2MslrtrfCVVAnHpfXCkYGXJSIZQ8OwrZp1yDvyeFIMJDo+fl9V2BY12TVWili4vdP/P+mGgmIasXDxLND1D66TsmN8fKt9T22bRLULzIGdmqO29Prk4XlZcSNkr/f8rV5ltSd7MLCLLqqtaodj1LiozGwo74iZf7oGdHrx73K5z4xV0Og9tWw5T154p5R+/8sFgvaNY9TXVFarGOLxpoXJ3ZBOUwza9YsTJgwAePGjUP37t0xb948xMXFYcGCBYrb//e//8X999+PPn36oGvXrnj77bdhs9mQm5vrceODnd6EL705I03jIrHy0SHYMOVqye2j+qqvi6BnrD6jY3PsmD7CqebFlOu6IfeRIW4VmgL80zOilBshPulqVY3UE4zIr/TVlgK4Pb0dnrmpJ1ZkS2fQpHfQPlGozZoQByNjMtLwyujeWP3P2iG5SI2Dz611gWO7ROfA1ZvLGFzarhkmDevsNOSZmhiH/4qCWvsBtaNCMq+Y+HNy1eXdvXU8msZGOg2BuNKzTVO8c9dlTrV1lIh7rNQqH38uWyBTLflYfBzQOmdZLBas+ddQ5D4yGImN1YcXK6trP8c1/xqK//ytj2TFa/dIG3WFrJ6MuNdPT9FB+QWAXZjFgszuynWCEhtF4b/j6/ebKEfOiPM+29dLiw32iK3vndaqDXO1xrCcnT0YUcq5yX1kMO6tSwTv2KIx7roirf5+st5uJVrHsNiocF29oJ6ulu0NhiY2V1VVIS8vD1OmTHHcFhYWhszMTGzYsEHXY5SXl6O6uhqJieoH4crKSlRWVjp+Ly4uNtJM3Twp+e4Neg+UeuuRAECawroXfxvQDh//elxx+0GdvbsMtRH+uKpR7AEQPa9WQFhZ7Xo5+vGDOuCjn4/iZFFF7WOrBFgR4WGSq1W7x67tipT4GMnwgpjaAUjcgxcZZpEMLSq9pocyO2NQ5xboW3fl9fDwzli64yQKiuu/Z3pmkehlsVjwsMqssqTG0Xj1tr6IiQhzfD6tXZThF1/9GRlh0koS90SESs+I2KWyBTLVAo1ISTCi3V77sGthSYXqNvYgOjUxzuUwrR7yJmm1Uc9sL7WTY5jGtPSfHhki6XXSKgk/rGsythw979GU5v90seFUSQ12Xqj9nn1ybwZ+PvgHpn+zy2nbznqC17qP+LP7rkDvGd9L/ybb9m8DUvHe+sMApD0qavu9Wu+pvSdNK1ixC7qckTNnzsBqtSIlRRq9pqSkID8/X9djPPbYY2jdujUyMzNVt8nJyUHTpk0dP6mpvlsl00yeTrnTS2ss11+rxCrxxxdAKSFOfNKVr4gsnjasp2ckJjIcT4iSko0GWI2iIzBxaCeXPQNycaKeEfkJQOmAHhkehn7tmzmutJrERGL26L6SbbSKdXnbjb1b6yqqZifPh9J/P+d9X09OiCvi7414HSUtaq2OkgzTuJfoKFahEEQvuedyjMloj/Gigmt6efsIofYaw8I0PlvZzfYArqrG+RhiH3Hw5Pj65xZAo/D6x66qseGugcrvnZ6hfftrbhob6dRzIz8M2mdbAtJAUHWmnsoFlX2mm/x9GDcwzWnboMwZ8cRzzz2HxYsX44svvkBMjHpxpSlTpqCoqMjxc+zYMT+20n9cJXzZq6AaKQilROtptNbJ8DV/fAHuHeJcJ0QcMMREhuPpP/fA06N6Yots2vB9Q5xraCgRL8blzd4FLeL8A/l4r1KAqTTUJx9LDoSuWiUv3tLLcYUfFRFmqGdEaciqjxdm2FzfqxXaJcbhln5tMbhLC4y/8iJJTocStROtO6tlaw3FVdQ4ByPpHZrjqT/3xBAdQwpy5VWuewjVKH2H1K7UNevHqAQjSsF//V7sWRiVFFH/SFoXGWfLKlX/ptSS52+WVrJVuiizT8kVX0ypfTtdDcPIjwfTRjrXoLIGQKlaQ8M0SUlJCA8PR0FBgeT2goICtGypfZXz0ksv4bnnnsOKFSvQq5d2WeHo6GhER3t+9RLoXE2Z++mfQ3Dy/AVJpOyOJtHuF/7xJX/0jERHOAdb8unXd2akSX5f86+h2FdYgqu76lvnRpzX0TvVvRVJtSy+53L87a2NktvuHnQR3lx9EIDzgdJisWD+mP6Y8P6vjtuUDqby4RxxTYNAYs95+G3GCIRbLIZ6RpS6sHP+0gttEvbjr5e5340fFxWBVf8c4miLuHdMrEfreOw8WTvMrNozIp7aq/PyUCv5XSsp2p3Vk7UK0sVrLKo49Ybu+D+Fnhi112gPRl68pRf2F5bit5NFWLf/D8nf7OzBmFKQbY/NPV2b7pqm9a9baxq7nt4scbJ+l5QmePW2vnhw0RYAyj2Sn9x7BUorayS9bmoBg6thGF05I8HWMxIVFYV+/fpJkk/tyagZGerFo1544QU8/fTTWL58Ofr392xV1VASHxPpcSACAO2axyFDYbgiV0c5cl8yY5xyZO/WeMRFBdfUxDjdgYjdzhkj8PO/hxkup66H0lBTUqNoJMRFIjYyXLG2zfDuKWgjysNQOtiIe0YeyuyM5gYKaZmhcXSE4dluSvkziY2iMHVkd4+/W3qCohjJcJryNuKcEb2TGrR6VbVy0fQWJhNTKmpoL0E+WKGnxb7qs1qPrtrQsP3mW/unYsqfuiEmQv2900pgtecCGo1FuqdIA45wC7C+zQn8MOFS3UNxauSVYG8Q1YlR6pFs0SQaFyU1kgSPap0ztw1IRVREGP7cR3migtL7La/mGnQ5IwCQnZ2N+fPnY+HChdi9ezfuu+8+lJWVYdy4cQCAMWPGSBJcn3/+eTz55JNYsGAB0tLSkJ+fj/z8fJSWlnrvVZBLU/7U1ek2o3kKwUo8LXTObX2R4GLRNnc0io5QLXjnDVfWDQXZDyxhYRb8/O9h2DJ1uOrJR3wQUrqCFB/oPD3YBipPF2XzlHitEbUraPGQy1odhfYAaa+qPQ8kIS4SyU2iHdNEFZ/LjZ4RpUB22YODMGlYZzz9Z+cu/41ThmHtY0NVexOaN1Le1+TBnSSQk4UWjpwRpWDEjZ6RDi0a4fMxvZEcXoOHW9YPu7SOsKJzknMScBNRj5Croe61jw11uk38+WkVXhQHnWo5XcnxMfht+gjMVilwpvT48kkR3rjo9ZThZQJHjx6N06dPY+rUqcjPz0efPn2wfPlyR1Lr0aNHESZ6A9944w1UVVXhlltukTzOtGnTMH36dM9a7yVDLm4hqYzqT91axWP3qWI8ek0XvPT97z57Hj1zzRuq98YNwCMfb9Vc9yHQvTfuMuw6VSxZLVlpCEpM3D2rOEzjRuKkL7VoEo3TJa7H340wOxh5cFhnrNitXYdCHCD8fPCs4ee4pkdLPDy8CxpFR0AQBM0eG3feD6WTYFpSI9XZUo2iIzSr7kZFhGHzk8Nx6dM/aD6vuBdMPvOxfjaNQgJr3b96q54CwI+PDAGKirCp7UmgWTMAykPb/xxxMeb8uA8zR/XEpMVbAQCDOqvXRrmkTVO0baY8o+m/49NxrrxKc8aTOGhpodFzKT++DxUtLqpU3Vi8H7x6W19cqfEa/MX4msUAsrKykJWVpfi3lStXSn4/fPiwO0/hV7f0a2taL8GyB6/E+fJqnDh/wafBiNkHZTN1Sm6Mr7KudL1hAIsID1Mtb68m3FUwIp5S6nbLvCe1WawPghFzX5naiUhMfPWrNBNGzX1DOmJ/YSn6i2ZJuRo6cuc4MLiL5wuxyelZ1mFEj5b4NK+2JIH8hKpZ9Kyua8QX8fXEoZ3wj6s64OjZcsdtWjMStRJfxYnvWr6cOBClFTVINtDz6ioXXVwCQmktNTO4FYyQ91gsFjRrFIXTpdKD8Au3aCf5GqW3wBo1HOL8gBt7t3H6u/hqKgCGjPH0qJ7467wNCAuz6CrFr4eRZFdfSGwUhWdu6onIsDDV7nzx1a+RYOSxa9WHY9ToPQ58dt8V6JOagIpqq2Yvhy9ldkvGi7f0kvQG2kU5pvaqz6Zx9cn/7bJULP7lGP50if4p5kDt90pc8l8rgdQbiaFGZn9d3TUZP+4pxB3p7RT/bg88ruqchMf/1A0Xt3RdI8VfQjoYCYAKuA7iK5bP77/CqWCSp4wUTqOGQTxMo9QNK97n/FljRE2P1k2xbdo1iAgPQ9rkpV573PFXXoS31x7y2uMZpVTsTs09V+mbTu4uvcO1FkvtFb9ZgUhtGyyq1WMjtRJYHTkj2uHI9Bt7YESPlroX5xOLi4rAhilXIzzMotkzsr/Qv7mRr99+KY78Ue4UZCy4qz++3ZGPGXU5PhaLBROuci57YCaeoQKE+CCR6IMEy1DOGQlVrgraifcJf9VHccWd2R6uPHFDdyQHSYJumkKypDfpPQ4ozaAJJJoJrDora8dEhmNo12S31yNr1TTWMXvukeFd0K1VPLZNuwYXKVTB9peYyHDF3o6ru6bgxVt7G1od2994hgoQetencFco54yEKlf1BcR/9+a6NN7g7eAhEBJ09fD1sJI4GMnUKKaodJL3JfvaLHrZZyBVa1Rg9edH/sCwzvh20iA0jY3ET48O8d8TNyA8Q8FY1rWviA8SvihAw56R0OOqZ0R84guEokdi3g6en7u5dqVZeX2FQPGPwR3Qt10CRvQwVt/GKHFCr9ZH7s9qvO0S4zSnIyuJ0lGBNRCO66Qfz1ABQpzT4YsDgZ4qfP5inw8/c1RPcxvSwD1+fTcAwP06ytoHwlLsYq0TvFuzZcjFydj91LWYOLSTVx/XW6Zc1w1f3D/Q5XRtT0VJCqxJjzOpifX1ePy5P7hTcEtrobxAygUk/QJ3ACnEiK8EfXEgMHtWgdiovm0wokdLt8dqSZ9ebROwd+a1uk5wgbYuzay/9sGUz3d4NcmO+5v0OCD/xFs0jsaxsxcA+DcYcSd52t7Do7SYpaMCa+Ac8kgHBiMBQtyl3tjEDHZ/4YnBP/ReaVcHwGwasdTEOHw4Pt3sZoSUiHBzEprd6RnRrjNS+w9jkeDCYZoAMnt0HzxxfTdJQRpfCdSxczJHoPWMkO/c1LcNuqQ0xlWdW0hu7yuqZ6GnKJm3uBP46Fm1NxB6Rlo19d0SEQ1Nw78EDyKj+joXpvKFv1zaJmDHzskcgTabhnznldF9IAgClvxyTHJ7jU3AG7dfit35Jbiio/HaG0ZNH9kd07/Zhdl/62P4vtFa5eAdFVjNi0Z+n3kdVv9+Ghl+eB8bipAORkL1WjAQVmikwFLFnpGQYrFYnGq6VNXYcN0lrXCdaEVZX7pr4EX4e3p7t2b6adUZ+W5nAQBzh2miIsKQ2d23M6MaGg7ThCB2yZOcNcByRsj35Ov2KJVW9zV3Sw44hmlkbbbaBMe6MX+UVXnWOPIrBiMhKNCmcZJ5HhzWGYmNovDA1cG7ojG5R57c7O9CZ56wB1LyNttE83r/KPXuoovkWwxGEBiJTv7EYITssod3wa+PZ2ouY04N09CuLdC9Vbzj92AKRtTqjIhrjATKEgekD4OREMQvKYmFBVBBPPKf6IhwLJs0yPG7fMgjkNUP00iPZeJ1aYIpuCIGIyGJX1Iikgum40KUSgKrtGckeF4PMRgJKZd3SAQA3DZAeVluIgpdZiSwuitSoxy8HYOR4BLSU3tDzXvjBuDg6TJ0a+W8xDQRhbagCkbqEli1Ao5gej3EYCSkxESGo3vreNcbElHICcphmhr1YRoGI8EltIdpmMdJRCFuUOckAMAd6e1Nbol+9eXg1RNYmagfXNgzQkQUwt4e2x8HCoNr+FatAqukZySIenqIwQgRUUiLjgi+4dvoyNpgpLLaKrld3BfCBNbgEtrDNHVYZYGIKHjERNZWj61wyhkRDdMwZySoMBghIqKgElsXjFTV2CQLf0p7RpgzEkwYjBARUVCxByMAUCEaqhHnjNzSv60/m0QeYjBCRERBJVq02q84GBF3jTz+p25+bBF5isEIEREFlbAwiyMguSDuGRFFI+KAhQIfPy0iIgo6sVF1SawqwzSWUFuOPciFdDAisOoZEVFQsueNXKiqnzUjPqIzFAkuIR2MEBFRcKqf3mtV/Ds7RoILgxFwpyUiCjYxjp4R8TANe7uDFYMRIiIKOrGRSgms9ZgzElwYjBARUdBxDNOoJLBScGEwQkREQSdWKRip6xthp0jwYTBCRERBJybKOWfEPk7DWCT4MBghIqKgExPhvFiefZSG+SLBh8EIEREFndiougTWKuecEYYiwSekgxEmOxERBSfmjDQsIR2MEBFRcHLUGVGYTWNh30jQYTACgJ16RETBRbHomf0/PKQHHQYjREQUdBpHRwAAyqpqHLfZK7AyFgk+DEaIiCjoNKoLRkorndemYc5I8HErGJk7dy7S0tIQExOD9PR0bNq0SXXbnTt34uabb0ZaWhosFgtmz57tbluJiIgA1PeMlFZUO27jpITgZTgYWbJkCbKzszFt2jRs3rwZvXv3xogRI1BYWKi4fXl5OTp06IDnnnsOLVu29LjBREREjmEapZ4RDtQEHcPByKxZszBhwgSMGzcO3bt3x7x58xAXF4cFCxYobn/ZZZfhxRdfxN/+9jdER0d73GAiIqLGMfZhmvqcke93FQCQzrCh4GAoGKmqqkJeXh4yMzPrHyAsDJmZmdiwYYPXGlVZWYni4mLJDxERkV3j6NrZNOJg5On/7TKrOeQhQ8HImTNnYLVakZKSIrk9JSUF+fn5XmtUTk4OmjZt6vhJTU312mOLcXiRiCg41Sew1jhm0VDwCsjZNFOmTEFRUZHj59ixY2Y3iYiIAog9Z8RqE1BZtz5NVERAntJIhwgjGyclJSE8PBwFBQWS2wsKCryanBodHe3X/BJOAyMiCi6NoupPX6WVNYiJDEejqHBUiRbOo+BhKIyMiopCv379kJub67jNZrMhNzcXGRkZXm8cERGRkrAwCxpF1eWNVNTmjcRFGbq+pgBi+JPLzs7G2LFj0b9/fwwYMACzZ89GWVkZxo0bBwAYM2YM2rRpg5ycHAC1Sa+7du1y/P/EiRPYunUrGjdujE6dOnnxpRARUShpFB2BsiqrI4k1qXEUTpy/YHKryB2Gg5HRo0fj9OnTmDp1KvLz89GnTx8sX77ckdR69OhRhIXVd7icPHkSffv2dfz+0ksv4aWXXsLgwYOxcuVKz18BERGFpMbRESgsqXQEI4M6t8C240Umt4rc4VafVlZWFrKyshT/Jg8w0tLSmOlMREReZ681Uiaa3gsAYzPam9Ec8gBTj4mIKCjZZ9SU1OWMCHUFGyyclRB0GIwQEVFQSoiLBACcL68CwLVpgllIByMcPiIiCl4JcVEAgHPltYvl2Y/o7BgJPiEdjNhxvyUiCj6JdcGIvGckjNFI0GEwQkREQck+THPW0TNSlzNiWovIXQxGiIgoKCU2kvaM2Mdp2DESfBiMEBFRUGpWN0xztqxumKbuds6mCT4MRoiIKCg1c/SM1A7T2GwcpglWDEaIiCgoNavLGTlXLu0ZYTQSfBiMEBFRULL3jJRXWVFRbXXMprEwGgk6DEaIiCgoNYmOQFRE7WnsdEmlYzZNGGORoBPS6y1f2q4Z4mMj0bxxtNlNISIigywWC5KbROP4uQsoLKmo7xlhMBJ0QjoYeeKG7mY3gYiIPJASH1MbjBRXOm7jME3w4TANEREFreQmtT3bBcUVjiU+2DMSfBiMEBFR0EqJjwEAFJZUwuZIYKVgw2CEiIiCVgtHz0h9Aiu7RoIPgxEiIgpa9T0jFaKF8kxsELklpBNYiRoqq9WK6upqs5vRIEVGRiI8PNzsZlAde85IYXElUhPjADCBNRgxGCFqQARBQH5+Ps6fP292Uxq0hIQEtGzZkmugBAB7z0h+Maf2BjMGI0QNiD0QSU5ORlxcHE+WXiYIAsrLy1FYWAgAaNWqlcktojbNYgEARReqUVJR2xvIvT74MBghaiCsVqsjEGnevLnZzWmwYmNrT36FhYVITk7mkI3JGkdHoHmjKPxRVoWjZ8sBsGckGDGBlaiBsOeIxMXFmdyShs/+HjMvJzDYc0WO/GEPRhiNBBsGI0QNDA/Evsf3OLDYg5GiC3XDNPx4gg6DESIiCmrtEmMlv3M2TfBhMEJEREGtXaJ0aJI9I8GHwQgREQW19s0bSX5nLBJ8GIwQUVARBAE1NTVOt1dVVbn1eO7ejwLHxSlNJL+zZyT4MBghItPZbDbk5OTgoosuQmxsLHr37o1PP/0UALBy5UpYLBZ8++236NevH6Kjo7F27VoMGTIEWVlZeOihh5CUlIQRI0YAAFatWoUBAwYgOjoarVq1wuTJkyXBi9L9BEHA9OnT0a5dO0RHR6N169Z48MEHTXkvyLhmjaKQEh/t+J05I8GHdUaIGjBBEHCh2mrKc8dGhuuedZKTk4MPP/wQ8+bNQ+fOnbF69WrccccdaNGihWObyZMn46WXXkKHDh3QrFkzAMDChQtx3333Yd26dQCAEydO4E9/+hPuuusuvP/++9izZw8mTJiAmJgYTJ8+3fFY8vt99tlneOWVV7B48WL06NED+fn52LZtm5feCfKHi1vGo6D4NAD2jAQjBiNEDdiFaiu6T/3OlOfe9dQIxEW5PsRUVlbi2WefxYoVK5CRkQEA6NChA9auXYs333wT99xzDwDgqaeewvDhwyX37dy5M1544QXH748//jhSU1Px2muvwWKxoGvXrjh58iQee+wxTJ06FWFhYYr3W7p0KVq2bInMzExERkaiXbt2GDBggMfvAflPt5ZNsPp3ezDCaCTYcJiGiEy1f/9+lJeXY/jw4WjcuLHj5/3338eBAwcc2/Xv39/pvv369ZP8vnv3bmRkZEhORgMHDkRpaSmOHz+uer9bb70VFy5cQIcOHTBhwgR88cUXinkpFLh6tU1w/J+hSPBhzwhRAxYbGY5dT40w7bn1KC0tBVDbO9GmTRvJ36Kjox0BSaNGjZzuq3SbHvL7paamYu/evVixYgV++OEH3H///XjxxRexatUqREZGuvUc5F/905o5/m/W0CS5j8EIUQNmsVh0DZWYqXv37oiOjsbRo0cxePBgp7+Le0dc6datGz777DMIguDoHVm3bh2aNGmCtm3bat43NjYWI0eOxMiRIzFx4kR07doVO3bswKWXXmrsBZEp7Kv3AsDWY+fNawi5JbCPUkTU4DVp0gSPPvooHn74YdhsNlx55ZUoKirCunXrEB8fj/bt2+t+rPvvvx+zZ8/GAw88gKysLOzduxfTpk1Ddna2I19EyXvvvQer1Yr09HTExcXhww8/RGxsrKHnJvP1bZeALUfPY0SPlmY3hQxiMEJEpnv66afRokUL5OTk4ODBg0hISMCll16Kf//737DZbLofp02bNli2bBn++c9/onfv3khMTMTdd9+NJ554QvN+CQkJeO6555CdnQ2r1YpLLrkE33zzDVc/DjIf3J2OHceLkH5RotlNIYMsgiAIZjfCleLiYjRt2hRFRUWIj483ryFFRcBHHwHR0UBlJfD3vwNNm5rXHiKRiooKHDp0CBdddBFiYmJc34Hcxve6AbIf35s1Axo1AsrKgHPneJz3kN7zN2fTEBERkakYjBAREZGpGIwQERGRqRiMEBERkakYjBA1MEGQkx70+B4TeZdbwcjcuXORlpaGmJgYpKenY9OmTZrbf/LJJ+jatStiYmJwySWXYNmyZW41lojU2SuFlpeXm9yShs/+HrM6K5F3GK4zsmTJEmRnZ2PevHlIT0/H7NmzMWLECOzduxfJyclO269fvx633XYbcnJycMMNN+Cjjz7CqFGjsHnzZvTs2dMrL4KIgPDwcCQkJKCwsBAAEBcXxwXDvEwQBJSXl6OwsBAJCQkID9dX8p6ItBmuM5Keno7LLrsMr732GgDAZrMhNTUVDzzwACZPnuy0/ejRo1FWVob//e9/jtsuv/xy9OnTB/PmzdP1nKwzQqSPIAjIz8/H+fPnzW5Kg5aQkICWLVsy2GtIWGfEJ/Sevw31jFRVVSEvLw9Tpkxx3BYWFobMzExs2LBB8T4bNmxAdna25LYRI0bgyy+/VH2eyspKVFZWOn4vLi420kyikGWxWNCqVSskJyejurra7OY0SJGRkewRIfIyQ8HImTNnYLVakZKSIrk9JSUFe/bsUbxPfn6+4vb5+fmqz5OTk4MZM2YYaRoRiYSHh/OESURBIyBn00yZMgVFRUWOn2PHjpndJCIiIvIRQz0jSUlJCA8PR0FBgeT2goICtGypvEpiy5YtDW0PANHR0YiOjjbSNCIiIgpShnpGoqKi0K9fP+Tm5jpus9lsyM3NRUZGhuJ9MjIyJNsDwA8//KC6PREREYUWw1N7s7OzMXbsWPTv3x8DBgzA7NmzUVZWhnHjxgEAxowZgzZt2iAnJwcAMGnSJAwePBgvv/wyrr/+eixevBi//vor3nrrLd3PaZ/wY3oia3ExcOFC7Y/9d2bTExEFP/vxHQDKy2tnTF64wOO8h+znbZcTdwU3zJkzR2jXrp0QFRUlDBgwQNi4caPjb4MHDxbGjh0r2f7jjz8WunTpIkRFRQk9evQQli5dauj5jh07JgDgD3/4wx/+8Ic/Qfhz7NgxzfO84TojZrDZbDh58iSaNGnCef1uKC4uRmpqKo4dO2ZunRbShZ9X8OBnFTz4WZlDEASUlJSgdevWCAtTzwwxPExjhrCwMLRt29bsZgS9+Ph4fgmDCD+v4MHPKnjws/K/pjqKxgXk1F4iIiIKHQxGiIiIyFQMRkJAdHQ0pk2bxtotQYKfV/DgZxU8+FkFtqBIYCUiIqKGiz0jREREZCoGI0RERGQqBiNERERkKgYjREREZCoGI0Fo9erVGDlyJFq3bg2LxYIvv/xSc/vPP/8cw4cPR4sWLRAfH4+MjAx89913km2mT58Oi8Ui+enatasPX0VoMPpZrV27FgMHDkTz5s0RGxuLrl274pVXXnHabu7cuUhLS0NMTAzS09OxadMmH72C0OGLz4rfK98w+lmJrVu3DhEREejTp4/T3/i9Mg+DkSBUVlaG3r17Y+7cubq2X716NYYPH45ly5YhLy8PQ4cOxciRI7FlyxbJdj169MCpU6ccP2vXrvVF80OK0c+qUaNGyMrKwurVq7F792488cQTeOKJJyQLSy5ZsgTZ2dmYNm0aNm/ejN69e2PEiBEoLCz01csICb74rAB+r3zB6Gdld/78eYwZMwbDhg1z+hu/VyYztGIdBRwAwhdffGH4ft27dxdmzJjh+H3atGlC7969vdcwcuLuZ3XTTTcJd9xxh+P3AQMGCBMnTnT8brVahdatWws5OTneaCYJ3vus+L3yPSOf1ejRo4UnnnhC8XPh98pc7BkJQTabDSUlJUhMTJTcvm/fPrRu3RodOnTA7bffjqNHj5rUQrLbsmUL1q9fj8GDBwMAqqqqkJeXh8zMTMc2YWFhyMzMxIYNG8xqJsH5s7Lj9yowvPvuuzh48CCmTZvm9Dd+r8zHYCQEvfTSSygtLcVf//pXx23p6el47733sHz5crzxxhs4dOgQBg0ahJKSEhNbGrratm2L6Oho9O/fHxMnTsT48eMBAGfOnIHVakVKSopk+5SUFOTn55vR1JCn9lkB/F4Fin379mHy5Mn48MMPERHhvD4sv1fmC4pVe8l7PvroI8yYMQNfffUVkpOTHbdfd911jv/36tUL6enpaN++PT7++GPcfffdZjQ1pK1ZswalpaXYuHEjJk+ejE6dOuG2224zu1mkQOuz4vfKfFarFX//+98xY8YMdOnSxezmkAoGIyFk8eLFGD9+PD755BNJd6SShIQEdOnSBfv37/dT60jsoosuAgBccsklKCgowPTp03HbbbchKSkJ4eHhKCgokGxfUFCAli1bmtHUkKf2WSnh98r/SkpK8Ouvv2LLli3IysoCUDtULQgCIiIi8P333+PKK6/k98pkHKYJEYsWLcK4ceOwaNEiXH/99S63Ly0txYEDB9CqVSs/tI602Gw2VFZWAgCioqLQr18/5ObmSv6em5uLjIwMs5pIdcSflRJ+r/wvPj4eO3bswNatWx0/9957Ly6++GJs3boV6enp/F4FAPaMBKHS0lLJldWhQ4ewdetWJCYmol27dpgyZQpOnDiB999/H0Dt0MzYsWPxn//8B+np6Y4x0NjYWDRt2hQA8Oijj2LkyJFo3749Tp48iWnTpiE8PJxDAx4y+lnNnTsX7dq1c9SiWL16NV566SU8+OCDjsfIzs7G2LFj0b9/fwwYMACzZ89GWVkZxo0b598X18D44rPi98o3jHxWYWFh6Nmzp+T+ycnJiImJkdzO75XJzJ7OQ8b99NNPAgCnn7FjxwqCIAhjx44VBg8e7Nh+8ODBmtsLQu2Ut1atWglRUVFCmzZthNGjRwv79+/37wtrgIx+Vq+++qrQo0cPIS4uToiPjxf69u0rvP7664LVapU87pw5c4R27doJUVFRwoABA4SNGzf68VU1TL74rPi98g2jn5Wc2pRrfq/MYxEEQfBb5ENEREQkw5wRIiIiMhWDESIiIjIVgxEiIiIyFYMRIiIiMhWDESIiIjIVgxEiIiIyFYMRIiIiMhWDESIiohC1evVqjBw5Eq1bt4bFYsGXX35p+DE+/vhj9OnTB3FxcWjfvj1efPFFw4/BYISIiChElZWVoXfv3pg7d65b9//2229x++23495778Vvv/2G119/Ha+88gpee+01Q4/DCqxEREQEi8WCL774AqNGjXLcVllZiccffxyLFi3C+fPn0bNnTzz//PMYMmQIAODvf/87qqur8cknnzjuM2fOHLzwwgs4evQoLBaLrudmzwgREREpysrKwoYNG7B48WJs374dt956K6699lrs27cPQG2wEhMTI7lPbGwsjh8/jiNHjuh+HgYjRERE5OTo0aN499138cknn2DQoEHo2LEjHn30UVx55ZV49913AQAjRozA559/jtzcXNhsNvz+++94+eWXAQCnTp3S/VwRPnkFREREFNR27NgBq9WKLl26SG6vrKxE8+bNAQATJkzAgQMHcMMNN6C6uhrx8fGYNGkSpk+fjrAw/f0dDEaIiIjISWlpKcLDw5GXl4fw8HDJ3xo3bgygNs/k+eefx7PPPov8/Hy0aNECubm5AIAOHTrofi4GI0REROSkb9++sFqtKCwsxKBBgzS3DQ8PR5s2bQAAixYtQkZGBlq0aKH7uRiMEBERhajS0lLs37/f8fuhQ4ewdetWJCYmokuXLrj99tsxZswYvPzyy+jbty9Onz6N3Nxc9OrVC9dffz3OnDmDTz/9FEOGDEFFRYUjx2TVqlWG2sGpvURERCFq5cqVGDp0qNPtY8eOxXvvvYfq6mrMnDkT77//Pk6cOIGkpCRcfvnlmDFjBi655BKcOXMGI0eOxI4dOyAIAjIyMvDMM88gPT3dUDsYjBAREZGpOLWXiIiITMVghIiIiEzFYISIiIhMxWCEiIiITMVghIiIiEzFYISIiIhMxWCEiIiITMVghIiIiEzFYISIiIhMxWCEiIiITMVghIiIiEzFYISIiIhM9f+EBS9SRG6h4QAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "plt.plot(context['index'], context['errors'], label='errors')\n", - "for anomaly in context['anomalies']:\n", - " plt.axvspan(anomaly[0], anomaly[1], color='red', alpha=0.3)\n", - "plt.legend()\n", - "plt.show()" - ] } ], "metadata": { "kernelspec": { - "display_name": "orion310", + "display_name": "test_orion_dependencies", "language": "python", "name": "python3" }, @@ -863,7 +840,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.18" + "version": "3.11.14" }, "orig_nbformat": 4 }, From 223c89e0160f809115ff30b5739453070b304e87 Mon Sep 17 00:00:00 2001 From: AllenBaranov Date: Thu, 19 Feb 2026 02:59:19 -0500 Subject: [PATCH 10/10] Made notebook smaller to run faster --- tutorials/pipelines/chronos2.ipynb | 512 +++-------------------------- 1 file changed, 38 insertions(+), 474 deletions(-) diff --git a/tutorials/pipelines/chronos2.ipynb b/tutorials/pipelines/chronos2.ipynb index 3fe76fab..48f1531b 100644 --- a/tutorials/pipelines/chronos2.ipynb +++ b/tutorials/pipelines/chronos2.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -19,207 +19,14 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
timestamp012345678...15161718192021222324
01222819200-0.3663590.00.00.00.00.00.00.00.0...0.00.00.00.00.00.00.00.00.00.0
11222840800-0.3941080.00.00.00.00.00.00.00.0...0.00.00.00.00.00.00.00.00.00.0
212228624000.4036250.00.00.00.00.00.00.00.0...0.00.00.00.00.00.00.00.00.00.0
31222884000-0.3627590.00.00.00.00.00.00.00.0...0.00.00.00.00.00.00.00.00.00.0
41222905600-0.3707460.00.00.00.00.00.00.00.0...0.00.00.00.00.00.00.00.00.00.0
\n", - "

5 rows × 26 columns

\n", - "
" - ], - "text/plain": [ - " timestamp 0 1 2 3 4 5 6 7 8 ... 15 \\\n", - "0 1222819200 -0.366359 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n", - "1 1222840800 -0.394108 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n", - "2 1222862400 0.403625 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n", - "3 1222884000 -0.362759 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n", - "4 1222905600 -0.370746 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n", - "\n", - " 16 17 18 19 20 21 22 23 24 \n", - "0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n", - "1 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n", - "2 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n", - "3 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n", - "4 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n", - "\n", - "[5 rows x 26 columns]" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "signal_name = 'multivariate/S-1'\n", "\n", "data = load_signal(signal_name)\n", - "data = data[:10000]\n", + "data = data[8000:10000]\n", "\n", "data.head()" ] @@ -234,27 +41,9 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/home/baranov/miniconda/envs/test_orion_dependencies/lib/python3.11/site-packages/mlblocks/discovery.py:17: UserWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html. The pkg_resources package is slated for removal as early as 2025-11-30. Refrain from using this package or pin to Setuptools<81.\n", - " import pkg_resources\n", - "/home/baranov/miniconda/envs/test_orion_dependencies/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", - " from .autonotebook import tqdm as notebook_tqdm\n", - "2026-02-18 17:13:34.414001: I tensorflow/tsl/cuda/cudart_stub.cc:28] Could not find cuda drivers on your machine, GPU will not be used.\n", - "2026-02-18 17:13:34.447258: E tensorflow/compiler/xla/stream_executor/cuda/cuda_dnn.cc:9342] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n", - "2026-02-18 17:13:34.447291: E tensorflow/compiler/xla/stream_executor/cuda/cuda_fft.cc:609] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n", - "2026-02-18 17:13:34.447322: E tensorflow/compiler/xla/stream_executor/cuda/cuda_blas.cc:1518] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n", - "2026-02-18 17:13:34.454352: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n", - "To enable the following instructions: AVX2 AVX512F FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n", - "2026-02-18 17:13:35.442879: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n" - ] - } - ], + "outputs": [], "source": [ "from mlblocks import MLPipeline\n", "\n", @@ -286,7 +75,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -332,25 +121,9 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['mlstars.custom.timeseries_preprocessing.time_segments_aggregate',\n", - " 'sklearn.impute.SimpleImputer',\n", - " 'mlstars.custom.timeseries_preprocessing.rolling_window_sequences',\n", - " 'orion.primitives.chronos2.Chronos2',\n", - " 'orion.primitives.timeseries_errors.regression_errors',\n", - " 'orion.primitives.timeseries_anomalies.find_anomalies']" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "pipeline.primitives" ] @@ -371,20 +144,9 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "dict_keys(['X', 'index'])" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "context = pipeline.fit(data, output_=0)\n", "context.keys()" @@ -392,40 +154,9 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "entry at 1222819200 has value [-0.36635895 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 0. 0.\n", - " 0. ]\n", - "entry at 1222840800 has value [-0.39410778 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 0. 0.\n", - " 0. ]\n", - "entry at 1222862400 has value [0.4036246 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. ]\n", - "entry at 1222884000 has value [-0.36275906 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 0. 0.\n", - " 0. ]\n", - "entry at 1222905600 has value [-0.37074649 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 0. 0.\n", - " 0. ]\n" - ] - } - ], + "outputs": [], "source": [ "for i, x in list(zip(context['index'], context['X']))[:5]:\n", " print(\"entry at {} has value {}\".format(i, x))" @@ -444,20 +175,9 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "dict_keys(['index', 'X'])" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "step = 1\n", "\n", @@ -485,20 +205,9 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "dict_keys(['index', 'X', 'y', 'target_index'])" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "step = 2\n", "\n", @@ -508,20 +217,9 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "X shape = (9750, 250, 25)\n", - "y shape = (9750, 1)\n", - "index shape = (9750,)\n", - "target index shape = (9750,)\n" - ] - } - ], + "outputs": [], "source": [ "# after slicing X into multiple sub-sequences\n", "# we obtain a 3 dimensional matrix X where\n", @@ -548,20 +246,9 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "dict_keys(['index', 'target_index', 'X', 'y', 'y_hat'])" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "step = 3\n", "\n", @@ -571,20 +258,9 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(9750, 1)" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "context['y_hat'].shape" ] @@ -606,20 +282,9 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "dict_keys(['index', 'target_index', 'y_hat', 'X', 'y', 'errors'])" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "step = 4\n", "\n", @@ -644,20 +309,9 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "dict_keys(['index', 'target_index', 'y_hat', 'errors', 'X', 'y', 'anomalies'])" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "step = 5\n", "\n", @@ -667,21 +321,9 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([[1.22821920e+09, 1.22945040e+09, 9.14337515e-01],\n", - " [1.40380560e+09, 1.40596560e+09, 2.28091527e-03]])" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "context['anomalies']" ] @@ -701,54 +343,9 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
startend
013981680001407823200
\n", - "
" - ], - "text/plain": [ - " start end\n", - "0 1398168000 1407823200" - ] - }, - "execution_count": 16, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "from orion.data import load_anomalies\n", "\n", @@ -758,20 +355,9 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[(1228219200.0, 1229450400.0), (1403805600.0, 1405965600.0)]" - ] - }, - "execution_count": 17, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "anomalies = []\n", "for ano in context['anomalies']:\n", @@ -781,20 +367,9 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(None, 1, 0, 1)" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "from orion.evaluation import contextual_confusion_matrix, contextual_f1_score\n", "\n", @@ -805,20 +380,9 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "0.6666666666666666" - ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "contextual_f1_score(ground_truth, anomalies, start = start, end = end, weighted=False)" ] @@ -840,7 +404,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.14" + "version": "3.10.0" }, "orig_nbformat": 4 },