From d3b84cbdd5f9b083d632eccdb40c4a358416c247 Mon Sep 17 00:00:00 2001 From: oleksost Date: Wed, 13 Aug 2025 21:10:48 +0000 Subject: [PATCH 01/18] varlen maba --- fast_llm/layers/ssm/config.py | 20 ++++++++ fast_llm/layers/ssm/mamba2.py | 74 +++++++++++++++++++++++----- fast_llm/layers/ssm/preprocessors.py | 65 ++++++++++++++++++++++++ fast_llm/models/ssm/config.py | 5 ++ fast_llm/models/ssm/model.py | 2 + 5 files changed, 153 insertions(+), 13 deletions(-) create mode 100644 fast_llm/layers/ssm/preprocessors.py diff --git a/fast_llm/layers/ssm/config.py b/fast_llm/layers/ssm/config.py index 3b21ca69..194063a2 100644 --- a/fast_llm/layers/ssm/config.py +++ b/fast_llm/layers/ssm/config.py @@ -12,6 +12,26 @@ from fast_llm.tensor import Initializer +class BaseSSMKwargs: + _kwargs_attributes = { + "cu_seqlens": "cu_seqlens", + "seq_idx": "seq_idx", + "ssm_position_ids": "ssm_position_ids", + } + + _prefix = "" + + def __init_subclass__(cls, prefix="", **kwargs): + super().__init_subclass__(**kwargs) + cls._prefix = prefix + for attr, value in BaseSSMKwargs._kwargs_attributes.items(): + setattr(cls, value, f"{cls._prefix}_{value}" if cls._prefix else value) + + +class SSMKwargs(BaseSSMKwargs, prefix=""): + pass + + class SSMDimNames: # TODO: Use separate tensor space for different mixers so there is no risk of name conflict. state = "ssm_state" # State dimension (N), aka head size / num channels diff --git a/fast_llm/layers/ssm/mamba2.py b/fast_llm/layers/ssm/mamba2.py index 77c1b386..2b71f89b 100644 --- a/fast_llm/layers/ssm/mamba2.py +++ b/fast_llm/layers/ssm/mamba2.py @@ -1,3 +1,4 @@ +import inspect import logging import typing @@ -6,17 +7,28 @@ from fast_llm.engine.config_utils.tensor_space import DefaultDimNames, TensorDim, TensorSpace from fast_llm.functional.config import ActivationType from fast_llm.layers.common.linear import InputParallelLinear, Linear, OutputParallelLinear -from fast_llm.layers.ssm.config import SSMConfig, SSMDimNames +from fast_llm.layers.ssm.config import SSMConfig, SSMDimNames, SSMKwargs from fast_llm.layers.ssm.mamba_layer import init_A, init_dtprojbias from fast_llm.layers.transformer.config import TransformerConfig, TransformerDimNames, TransformerKwargs from fast_llm.layers.transformer.transformer import Mixer from fast_llm.tensor import ParameterMeta, init_kaiming_, init_ones_, init_uniform_centered_ from fast_llm.utils import Assert, div, get_lr_scale +_mamba_varlen = False try: from mamba_ssm.ops.selective_scan_interface import selective_scan_fn # noqa _mamba_available = True + sig = inspect.signature(selective_scan_fn) + if "position_indices" in sig.parameters: + _mamba_varlen = True + logging.warning("Using selective_scan_fn from varlen_mamba that supports packing") + else: + _mamba_varlen = False + logging.warning("Using selective_scan_fn from original mamba without packing support") + # for training with packing install https://github.com/jxiw/varlen_mamba + # see https://github.com/jxiw/M1/blob/main/HYBRID_PACK.md + except (ImportError, RuntimeError): _mamba_available = False @@ -143,8 +155,16 @@ def __init__( ) def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[torch.Tensor, torch.Tensor | None]: + """ + Note, we are nto doing "read" sequence-tensor parallel trainign here, since inner_projection is gathered over all GPUS. + This is also desired, since the currently used mamba kernel does not support STP. + TODO: use correct kernel from Mamba2! + """ assert _mamba_available assert _causal_conv1d_available + cu_seqlens = kwargs[SSMKwargs.cu_seqlens] + seq_idx = kwargs[SSMKwargs.seq_idx] + position_indices = kwargs[SSMKwargs.ssm_position_ids] # inner_projection : (batch/local_sequence, local_sequence/batch, hidden) # -> (batch/sequence, sequence/batch, inner_projection) @@ -174,9 +194,20 @@ def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[ .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) .flatten(1, 2) ) - x = _causal_conv1d_fn(x=x, weight=self.conv1d_weight.squeeze(1), bias=self.conv1d_bias, activation="silu") + + if cu_seqlens is not None: + # from https://github.com/jxiw/M1/blob/d92b53faa640f8ebf624d3e9e771fe24648ef014/rl/verl/verl/models/mamba/hybrid_wrapper.py#L152 + x = _causal_conv1d_fn( + x=x.transpose(1, 2).contiguous().transpose(1, 2), + weight=self.conv1d_weight.squeeze(1), + bias=self.conv1d_bias, + seq_idx=seq_idx, + activation="silu", + ) else: x = _causal_conv1d_fn(x=x, weight=self.conv1d_weight.squeeze(1), bias=self.conv1d_bias, activation="silu") + + if not self._config.repeat_kv_before_conv: x = ( x.unflatten(1, (self._local_head_groups, self._config.state_size)) .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) @@ -203,17 +234,34 @@ def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[ self._debug_log(c, "c", self._BC_DIMS, kwargs) self._debug_log(dt, "dt", self._XZ_DIMS, kwargs) - y = selective_scan_fn( - x, - dt, - -torch.exp(self.A_log.float()), - b, - c, - self.D.float(), - z, - delta_bias=self.dt_proj_bias.float(), - delta_softplus=True, - ) + if not _mamba_varlen: + + y = selective_scan_fn( + x, + dt, + -torch.exp(self.A_log.float()), + b, + c, + self.D.float(), + z, + delta_bias=self.dt_proj_bias.float(), + delta_softplus=True, + ) + else: + position_indices = position_indices if cu_seqlens is not None else None + + y = selective_scan_fn( + x, + dt, + -torch.exp(self.A_log.float()), + b, + c, + self.D.float(), + z, + delta_bias=self.dt_proj_bias.float(), + delta_softplus=True, + position_indices=position_indices, + ) if self._debug_level: self._debug_log(y, "y", self._XZ_DIMS, kwargs) diff --git a/fast_llm/layers/ssm/preprocessors.py b/fast_llm/layers/ssm/preprocessors.py new file mode 100644 index 00000000..d7e3a797 --- /dev/null +++ b/fast_llm/layers/ssm/preprocessors.py @@ -0,0 +1,65 @@ +import logging +import typing + +import torch + +from fast_llm.engine.base_model.config import Preprocessor +from fast_llm.engine.config_utils.tensor_space import TensorSpace +from fast_llm.layers.ssm.config import SSMKwargs +from fast_llm.layers.transformer.config import TransformerKwargs +from fast_llm.models.ssm.config import HybridSSMBaseModelConfig +from fast_llm.utils import Assert + +logger = logging.getLogger(__name__) + + +class Mamba2Preprocessor(Preprocessor): + def __init__(self, config: HybridSSMBaseModelConfig, tensor_space: TensorSpace): + self._config = config + self._tensor_space = tensor_space + self._distributed_config = self._tensor_space.distributed_config + self._transformer_dim_names = config.transformer._transformer_dim_names + + def preprocess(self, batch, kwargs: dict[str, typing.Any]) -> None: + """ + Simplified preprocessor that does not take into account micro-sequences. + """ + sequence_lengths = kwargs[TransformerKwargs.sequence_lengths] + if "cu_seqlens" in kwargs: + cu_seqlens_k = kwargs[TransformerKwargs.cu_seqlens_k] + cu_seqlens_q = kwargs[TransformerKwargs.cu_seqlens_q] + Assert.eq( + cu_seqlens_k.shape[0], + cu_seqlens_q.shape[0], + msg="cu_seqlens_k and cu_seqlens_q have different lengths, is micro_sequence_length being used? This is currently not supported for Mamba.", + ) + Assert.all_equal(cu_seqlens_k, cu_seqlens_q) + cu_seqlens = cu_seqlens_k + else: + seqlens = torch.cat(sequence_lengths) + cu_seqlens = torch.cat( + ( + torch.zeros(1, dtype=torch.int32, device=self._tensor_space.distributed.device), + torch.cumsum(seqlens, dim=0, dtype=torch.int32).to(self._tensor_space.distributed.device), + ) + ) + kwargs[SSMKwargs.cu_seqlens] = cu_seqlens + # from https://github.com/jxiw/M1/blob/d92b53faa640f8ebf624d3e9e771fe24648ef014/rl/verl/verl/models/mamba/hybrid_wrapper.py#L152 + kwargs[SSMKwargs.seq_idx] = torch.cat( + [ + torch.full((s,), i, dtype=torch.int32, device=cu_seqlens.device) + for i, s in enumerate(cu_seqlens[1:] - cu_seqlens[:-1]) + ], + dim=0, + ).unsqueeze(0) + + sequence_lengths = kwargs.get(TransformerKwargs.sequence_lengths) + sequence_k = kwargs[TransformerKwargs.sequence_k_dim].size + sequence_q = kwargs[TransformerKwargs.sequence_q_dim].size + position_ids = torch.stack( + [torch.cat([torch.arange(x) for x in sample_lens]) for sample_lens in sequence_lengths] + ).to(self._tensor_space.distributed.device, dtype=torch.int64) + position_ids = position_ids[ + :, sequence_k - sequence_q : sequence_k + ] # this is only needed if we do micro-sequences? + kwargs[SSMKwargs.ssm_position_ids] = position_ids.to(torch.int32) diff --git a/fast_llm/models/ssm/config.py b/fast_llm/models/ssm/config.py index 5dca41a7..34f3151a 100644 --- a/fast_llm/models/ssm/config.py +++ b/fast_llm/models/ssm/config.py @@ -207,6 +207,11 @@ def get_trainer_class(cls) -> type["HybridSSMTrainer"]: def _validate(self) -> None: super()._validate() + Assert.eq( + self.batch.micro_sequence_length, + self.batch.sequence_length, + msg="Micro-sequences not supported for SSMs. at htis point", + ) if (name := self.model.base_model.distillation_model) is None: Assert.empty(self.reference_models) else: diff --git a/fast_llm/models/ssm/model.py b/fast_llm/models/ssm/model.py index 29f115bd..04dcbc29 100644 --- a/fast_llm/models/ssm/model.py +++ b/fast_llm/models/ssm/model.py @@ -6,6 +6,7 @@ from fast_llm.engine.inference.runner import InferenceRunner from fast_llm.layers.language_model.head import LanguageModelHead from fast_llm.layers.ssm.llamba_block import SSMBlock +from fast_llm.layers.ssm.preprocessors import Mamba2Preprocessor from fast_llm.layers.transformer.transformer import TransformerBlock from fast_llm.models.gpt.config import GPTBatchConfig from fast_llm.models.gpt.model import GPTBaseModel, GPTModel @@ -30,6 +31,7 @@ def __init__( distributed_config: DistributedConfig, ): super().__init__(config, distributed_config) + self._preprocessors.append(Mamba2Preprocessor(config, self._tensor_space)) def get_output_layers(self) -> list[Layer]: """ From 79a4565aaa52663418b66238fa2932721cb96046 Mon Sep 17 00:00:00 2001 From: oleksost Date: Wed, 13 Aug 2025 21:16:03 +0000 Subject: [PATCH 02/18] requirement --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 6ea98610..c2eb1f6f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -50,7 +50,7 @@ HUGGINGFACE = # To install on cpu environment (ex. for IDE support): # MAMBA_FORCE_BUILD=TRUE CAUSAL_CONV1D_FORCE_BUILD=TRUE CAUSAL_CONV1D_SKIP_CUDA_BUILD=TRUE pip install -e ".[CORE,SSM]" --no-build-isolation SSM = - mamba_ssm[causal-conv1d]==2.2.4 + mamba_ssm[causal-conv1d] @ git+https://github.com/jxiw/varlen_mamba.git@varlen_mamba cartesia_pytorch>=0.0.2 # GENERATION = From 1657a1bce86be02df632df3a5c04e00422706d16 Mon Sep 17 00:00:00 2001 From: oleksost Date: Wed, 13 Aug 2025 21:19:11 +0000 Subject: [PATCH 03/18] docker --- Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 0f3c2d8c..7cf95101 100644 --- a/Dockerfile +++ b/Dockerfile @@ -29,8 +29,9 @@ ENV PIP_CONSTRAINT="" # There is no pre-build mamba image for pytorch 2.8, we build it before the rest to avoid rebuilds. # We need to compile from the repo because of https://github.com/state-spaces/mamba/issues/720 (same for causal-conv1d) # We set the number of workers to avoid OOM when compiling on laptop. (TODO: Can we make it configurable?) +# Using varlen_mamba for variable length sequence support RUN MAX_JOBS=2 pip install --no-build-isolation "causal-conv1d@git+https://github.com/Dao-AILab/causal-conv1d@2a288a1" -RUN MAX_JOBS=2 pip install --no-build-isolation "mamba_ssm[causal-conv1d]@git+https://github.com/state-spaces/mamba@4a8a2a2" +RUN MAX_JOBS=2 pip install --no-build-isolation "mamba_ssm[causal-conv1d]@git+https://github.com/jxiw/varlen_mamba@varlen_mamba" # Copy dependency files with universal write permissions for all users. COPY --chmod=777 setup.py setup.cfg pyproject.toml ./ COPY --chmod=777 ./fast_llm/__init__.py fast_llm/ From 2b171eb1649f16e2feea15d9787a1c0820bf9655 Mon Sep 17 00:00:00 2001 From: oleksost Date: Fri, 15 Aug 2025 19:06:15 +0000 Subject: [PATCH 04/18] test varlen mamba --- tests/test_ssms.py | 271 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 269 insertions(+), 2 deletions(-) diff --git a/tests/test_ssms.py b/tests/test_ssms.py index 694faa55..2a338f1b 100644 --- a/tests/test_ssms.py +++ b/tests/test_ssms.py @@ -1,19 +1,60 @@ +import inspect +import itertools import pathlib +from functools import partial import pytest import torch +from mamba2 import Mamba2 from fast_llm.config import NoAutoValidate from fast_llm.engine.checkpoint.config import CheckpointLoadConfig +from fast_llm.engine.config_utils.tensor_space import TensorSpace from fast_llm.engine.distributed.config import DistributedConfig, PhaseType +from fast_llm.engine.distributed.distributed import Distributed from fast_llm.engine.schedule.config import ScheduleConfig from fast_llm.engine.schedule.runner import ScheduleRunner from fast_llm.engine.schedule.schedule import Schedule -from fast_llm.layers.transformer.config import TransformerKwargs +from fast_llm.layers.ssm.config import SSMConfig +from fast_llm.layers.ssm.llamba_block import SSMBlock +from fast_llm.layers.transformer.config import TransformerConfig, TransformerKwargs from fast_llm.models.gpt.config import GPTBatchConfig -from fast_llm.models.ssm.config import LLambaHuggingfaceCheckpointFormat +from fast_llm.models.ssm.config import HybridSSMBaseModelConfig, LLambaHuggingfaceCheckpointFormat from fast_llm.models.ssm.model import HybridSSMModel +_mamba_varlen = False +try: + from mamba_ssm.ops.selective_scan_interface import selective_scan_fn # noqa + + _mamba_available = True + sig = inspect.signature(selective_scan_fn) + if "position_indices" in sig.parameters: + _mamba_varlen = True + else: + _mamba_varlen = False + # for training with packing install https://github.com/jxiw/varlen_mamba + # see https://github.com/jxiw/M1/blob/main/HYBRID_PACK.md + +except (ImportError, RuntimeError): + _mamba_available = False + + +def get_hybrid_config(hybrid_block_layout=["t", "m2"], prediction_heads=1, default_mtp_type=None): + hidden_size = 512 + config = HybridSSMBaseModelConfig( + transformer=TransformerConfig(num_layers=len(hybrid_block_layout), hidden_size=hidden_size), + ssm=SSMConfig(d_xb=hidden_size, dt_rank=10, d_inner=hidden_size * 2), + hybrid_block_layout=hybrid_block_layout, + prediction_heads=prediction_heads, + default_mtp_type=default_mtp_type, + init_method_std_embed=0.02, + init_method_min_embed=-0.02, + init_method_max_embed=0.02, + use_position_embeddings=True, + tie_word_embeddings=False, + ) + return config + @pytest.mark.skip("Disabled due to cartesia_pytorch installation issue") @pytest.mark.slow @@ -80,3 +121,229 @@ def test_load_from_llamba_checkpoint(): logits = input_data[0][1]["logits"].cpu() assert torch.allclose(logits, hf_logits, atol=1e-2) + + +@pytest.fixture +def distributed_config(): + return DistributedConfig( + tensor_parallel=1, + pipeline_parallel=1, + sequence_data_parallel=1, + local_world_size=1, + world_size=1, + ) + + +@pytest.fixture +def distributed(distributed_config): + return Distributed(config=distributed_config) + + +def materialize_meta_tensors(model, tensor_space): + # Materialize parameters that are on meta device + for name, param in model.named_parameters(): + if param.device.type == "meta": + # Check if the parameter is a custom tensor type + if hasattr(param, "tensor_name") and hasattr(param, "init_parameter"): + param_data = param.new_empty(param.shape, device="cuda") + # Initialize param_data + param.init_parameter(param_data, tensor_space.distributed) + # Replace the parameter in the module + module_path, param_name = name.rsplit(".", 1) if "." in name else (None, name) + module = model + if module_path is not None: + for part in module_path.split("."): + module = getattr(module, part) + param = torch.nn.Parameter(param_data, requires_grad=param.requires_grad) + # TODO: add param_grad_is_zero etc., grad_buffer, etc., see test_mlp_recomputation + param.grad = None + param.grad_buffer = torch.empty_like(param) + param.param_grad_is_zero = True + module._parameters[param_name] = param + return model + + +def unpack(packed_hidden_states, cu_seqlens): + batch_size = packed_hidden_states.shape[0] + package_num = cu_seqlens.shape[0] - 1 + seq_len = (cu_seqlens[1:] - cu_seqlens[:-1]).max() + hidden_dim = packed_hidden_states.shape[2] + hidden_states = torch.zeros( + package_num * batch_size, + seq_len, + hidden_dim, + dtype=packed_hidden_states.dtype, + device=packed_hidden_states.device, + ) + for j in range(batch_size): + for i in range(package_num): + line = j * package_num + i + hidden_states[line, : cu_seqlens[i + 1] - cu_seqlens[i], :] = packed_hidden_states[ + j, cu_seqlens[i] : cu_seqlens[i + 1], : + ] + return hidden_states + + +def pack(hidden_states, cu_seqlens, batch_size): + package_num, seq_len, hidden_dim = hidden_states.shape + seq_len_list = cu_seqlens[1:] - cu_seqlens[:-1] + seq_len_list_3d = seq_len_list.unsqueeze(1).unsqueeze(2) + indices_3d = ( + torch.arange(seq_len, device=hidden_states.device).unsqueeze(0).unsqueeze(2).repeat(package_num, 1, hidden_dim) + ) + mask_3d = indices_3d < seq_len_list_3d.repeat(batch_size, 1, 1) + packed_hidden_states = hidden_states[mask_3d].view(batch_size, -1, hidden_dim) + return packed_hidden_states + + +def generate_random_cu_seqlens(seq_len, packages_num=2): + if packages_num < 1: + raise ValueError("packages_num must be at least 1") + + # base size of each chunk, and how many get an extra token + base, rem = divmod(seq_len, packages_num) + # lengths: e.g. for seq_len=10, packages=3 → [4,3,3] + lengths = [base + 1 if i < rem else base for i in range(packages_num)] + + # split points exclude the final cumulative (seq_len) + split_points = list(itertools.accumulate(lengths))[:-1] + + # cu_seqlens = [0] + split_points + [seq_len] + cu_seqlens = [0] + split_points + [seq_len] + + # index: for each chunk, we emit 0,1,...,length-1 + index = [] + for length in lengths: + index.extend(range(length)) + + # sanity check + assert len(cu_seqlens) - 1 == packages_num + assert sum(lengths) == seq_len + assert len(index) == seq_len + + return cu_seqlens, index + + +# Quick and dirty test for Mamba2 varlen block from https://github.com/jxiw/M1/blob/d92b53faa640f8ebf624d3e9e771fe24648ef014/rl/verl/tests/pack_mamba/test_mamba_layer.py +# TODO: integrate in the testing framework +@pytest.mark.slow +@pytest.mark.skipif(not torch.cuda.is_available(), reason="No CUDA available") +@pytest.mark.skipif(not _mamba_available, reason="Mamba2 is not available") +@pytest.mark.skipif(not _mamba_varlen, reason="Mamba2 varlen is not available") +def test_mamba_varlen_block(distributed_config, distributed): + """ + Compare that the output and grads of packed and unpacked Mamba2 varlen block are the same. + """ + hybrid_config = get_hybrid_config(hybrid_block_layout=["m2", "t"]) + tensor_space = TensorSpace(distributed_config=distributed_config) + tensor_space.setup(distributed) + hybrid_config.setup_tensor_space(tensor_space) + layer_idx = 0 + + mixer_cls = partial(Mamba2, block_index=layer_idx) + block_packed = SSMBlock( + hybrid_config.transformer, + hybrid_config.ssm, + mixer_cls=mixer_cls, + tensor_space=tensor_space, + block_index=layer_idx, + ) + block_ref = SSMBlock( + hybrid_config.transformer, + hybrid_config.ssm, + mixer_cls=mixer_cls, + tensor_space=tensor_space, + block_index=layer_idx, + ) + device = "cuda" + materialize_meta_tensors(block_packed, tensor_space) + materialize_meta_tensors(block_ref, tensor_space) + block_ref.load_state_dict(block_packed.state_dict()) + block_packed.to(device) + block_ref.to(device) + + batch_size = 2 + seq_len = 64 + packages_num = 2 + hidden_dim = hybrid_config.transformer.hidden_size + + cu_seqlens, index = generate_random_cu_seqlens(seq_len, packages_num=packages_num) + cu_seqlens = torch.tensor(cu_seqlens).cuda() + ssm_position_ids = torch.tensor(index, dtype=torch.int32).unsqueeze(0).expand(batch_size, -1).contiguous().cuda() + seq_idx = ( + torch.cat( + [ + torch.full((s,), i, dtype=torch.int32, device=cu_seqlens.device) + for i, s in enumerate(cu_seqlens[1:] - cu_seqlens[:-1]) + ], + dim=0, + ) + .unsqueeze(0) + .repeat(batch_size, 1) + ) + + # Generate packed_hidden_states with random values for testing + hidden_states_list = [ + torch.randn(l, hidden_dim, device=device, dtype=torch.bfloat16, requires_grad=True) + for l in (cu_seqlens[1:] - cu_seqlens[:-1]).tolist() + ] + packed_hidden_states = torch.cat(hidden_states_list, dim=0).unsqueeze(0) + packed_hidden_states = packed_hidden_states.expand(batch_size, -1, -1).contiguous() + # hidden_states should be forwarded without cu_seqlens + hidden_states = unpack(packed_hidden_states, cu_seqlens) + + # Check: sum of seq_len of item in hidden_states_list should be equal to seq_len of packed_hidden_states + assert sum([hs.shape[0] for hs in hidden_states_list]) == packed_hidden_states.shape[1] + # Check: max of seq_len of item in hidden_states_list should be equal to seq_len of hidden_states + assert max([hs.shape[0] for hs in hidden_states_list]) == hidden_states.shape[1] + + output_states_packed = block_packed( + packed_hidden_states, + {"cu_seqlens": cu_seqlens, "seq_idx": seq_idx, "ssm_position_ids": ssm_position_ids, "sequence_first": False}, + ) + output_states_unpacked = block_ref( + hidden_states.clone(), {"cu_seqlens": None, "seq_idx": None, "ssm_position_ids": None, "sequence_first": False} + ) + tollerance = 1e-4 + assert output_states_packed.shape == packed_hidden_states.shape + assert output_states_unpacked.shape == hidden_states.shape + assert not torch.isnan(hidden_states).any() + assert not torch.isinf(hidden_states).any() + + output_states_unpacked = pack(output_states_unpacked, cu_seqlens, batch_size) + torch.allclose(output_states_packed, output_states_unpacked, atol=tollerance) + + loss = output_states_packed.sum() + loss.backward() + loss_ref = output_states_unpacked.sum() + loss_ref.backward() + assert torch.allclose(block_packed.mixer.conv1d_weight.grad, block_ref.mixer.conv1d_weight.grad, atol=tollerance) + assert torch.allclose(block_packed.mixer.conv1d_bias.grad, block_ref.mixer.conv1d_bias.grad, atol=tollerance) + assert torch.allclose( + block_packed.mixer.in_proj.weight.grad_buffer, block_ref.mixer.in_proj.weight.grad_buffer, atol=tollerance + ) + assert torch.allclose( + block_packed.mixer.out_proj.weight.grad_buffer, block_ref.mixer.out_proj.weight.grad_buffer, atol=tollerance + ) + assert torch.allclose( + block_packed.mixer.dt_in_proj.weight.grad_buffer, + block_ref.mixer.dt_in_proj.weight.grad_buffer, + atol=tollerance, + ) + + assert torch.allclose( + block_packed.mlp.layer_1.weight.grad_buffer, block_ref.mlp.layer_1.weight.grad_buffer, atol=tollerance + ) + assert torch.allclose( + block_packed.mlp.layer_1.bias.grad_buffer, block_ref.mlp.layer_1.bias.grad_buffer, atol=tollerance + ) + assert torch.allclose( + block_packed.mlp.layer_2.weight.grad_buffer, block_ref.mlp.layer_2.weight.grad_buffer, atol=tollerance + ) + assert torch.allclose( + block_packed.mlp.layer_2.bias.grad_buffer, block_ref.mlp.layer_2.bias.grad_buffer, atol=tollerance + ) + + +if __name__ == "__main__": + pytest.main([__file__]) From 115c1ec504a97589e9100c17fe93cecf7bdeb779 Mon Sep 17 00:00:00 2001 From: oleksost Date: Tue, 19 Aug 2025 16:07:10 +0000 Subject: [PATCH 05/18] wip --- fast_llm/layers/ssm/mamba2.py | 239 ++++++++++++++++++ .../make_hybrid_checkpoint_with_identity.py | 41 +++ .../make_hybrid_checkpoint_with_mil.py | 104 ++++++++ 3 files changed, 384 insertions(+) create mode 100644 fast_llm/models/ssm/external/make_hybrid_checkpoint_with_identity.py create mode 100644 fast_llm/models/ssm/external/make_hybrid_checkpoint_with_mil.py diff --git a/fast_llm/layers/ssm/mamba2.py b/fast_llm/layers/ssm/mamba2.py index 2b71f89b..155a782f 100644 --- a/fast_llm/layers/ssm/mamba2.py +++ b/fast_llm/layers/ssm/mamba2.py @@ -85,6 +85,9 @@ def __init__( self._local_inner_size = inner_dim.size self._local_xb_size = xb_dim.size + state_size = tensor_space[SSMDimNames.state].size + div(self._local_inner_size, state_size) + conv1d_dim = inner_dim if self._config.repeat_kv_before_conv else xb_dim self.conv1d_weight = ParameterMeta.from_dims( ( @@ -274,3 +277,239 @@ def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[ # (batch/sequence, sequence/batch, local_heads * state) # -> (batch/local_sequence, local_sequence/batch, hidden) return self.out_proj(y) + + +class Mamba2Chunked(Mamba2): + """ + This code is adapted from https://github.com/jxiw/M1/blob/main/mamba2/hybrid_mamba_layer.py + """ + + _mixer_name: typing.ClassVar[str] = "mamba_2" + + _XZ_DIMS = ( + TransformerDimNames.batch, + SSMDimNames.composite_heads_and_head_dim, + TransformerDimNames.sequence_q, + ) + _BC_DIMS = ( + TransformerDimNames.batch, + SSMDimNames.composite_heads, + SSMDimNames.state, + TransformerDimNames.sequence_q, + ) + + def __init__( + self, + config: SSMConfig, + tensor_space: TensorSpace, + block_index: int, + transformer_config: TransformerConfig, + ): + super().__init__(tensor_space, block_index, debug_level=transformer_config.debug_transformer) + self._config: SSMConfig = config + Assert.eq(self._config.activation_type, ActivationType.silu) + layer_lr_scale: float | None = config.per_layer_lr_scale[block_index] if config.per_layer_lr_scale else None + lr_scale: float | tuple[float | None, ...] | None = get_lr_scale(self._config.mamba_lr_scale, layer_lr_scale) + + inner_dim: TensorDim = tensor_space[SSMDimNames.composite_heads_and_head_dim] + xb_dim = tensor_space[SSMDimNames.composite_head_groups_and_state] + hidden_dim: TensorDim = tensor_space[TransformerDimNames.hidden] + dt_rank_dim = tensor_space[SSMDimNames.dt_rank] + + self._local_heads = tensor_space[SSMDimNames.composite_heads].size + self._local_head_groups = tensor_space[SSMDimNames.head_groups].size + self._group_heads = div(self._local_heads, self._local_head_groups) + self._local_inner_size = inner_dim.size + self._local_xb_size = xb_dim.size + + self.nheads = inner_dim // self.config.n_heads + + conv1d_dim = inner_dim if self._config.repeat_kv_before_conv else xb_dim + self.conv1d_weight = ParameterMeta.from_dims( + ( + conv1d_dim, + tensor_space[DefaultDimNames.scalar], + tensor_space[SSMDimNames.convolution_kernel], + ), + init_method=init_uniform_centered_((conv1d_dim.global_size * self._config.conv_kernel_dimension) ** -0.5), + lr_scale=lr_scale, + ) + self.conv1d_bias = ParameterMeta.from_dims( + (conv1d_dim,), + init_method=init_uniform_centered_(self._config.conv_kernel_dimension**-0.5), + lr_scale=lr_scale, + ) + self.in_proj = OutputParallelLinear( + hidden_dim, + tensor_space[SSMDimNames.concatenated_inner_projection], + bias=config.add_bias_linear, + weight_init_method=init_kaiming_(transformer_config.hidden_size), + sequence_parallel=self._sequence_parallel, + lr_scale=lr_scale, + ) + + self.dt_in_proj = Linear( + hidden_dim, + self.nheads, + bias=config.add_bias_linear, + weight_init_method=init_kaiming_(transformer_config.hidden_size), + lr_scale=lr_scale, + ) + self.dt_proj = OutputParallelLinear( + dt_rank_dim, + inner_dim, + bias=False, + # Initialize special dt projection to preserve variance at initialization + weight_init_method=self._config.dt_init.get_init_method( + self._config.dt_rank**-0.5 * self._config.dt_scale + ), + sequence_parallel=self._sequence_parallel, + lr_scale=lr_scale, + ) + # define bias outside the linear layer since it's also used in the selective_scan_fn + self.dt_proj_bias = ParameterMeta.from_dims( + (inner_dim,), + init_method=init_dtprojbias(self._config.dt_max, self._config.dt_min, self._config.dt_init_floor), + lr_scale=lr_scale, + ) + self.A_log = ParameterMeta.from_dims( + (inner_dim, tensor_space[SSMDimNames.state]), + init_method=init_A(self._config.state_size, self._config.d_inner), + lr_scale=lr_scale, + weight_decay=False, + ) + self.D = ParameterMeta.from_dims( + (inner_dim,), + weight_decay=False, + init_method=init_ones_, + lr_scale=lr_scale, + ) + self.out_proj = InputParallelLinear( + inner_dim, + hidden_dim, + bias=config.add_bias_linear, + weight_init_method=init_kaiming_(self._config.d_inner), + sequence_parallel=self._sequence_parallel, + # TODO: lr_scale? + ) + + def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[torch.Tensor, torch.Tensor | None]: + """ + Note, we are nto doing "read" sequence-tensor parallel trainign here, since inner_projection is gathered over all GPUS. + This is also desired, since the currently used mamba kernel does not support STP. + TODO: use correct kernel from Mamba2! + """ + assert _mamba_available + assert _causal_conv1d_available + cu_seqlens = kwargs[SSMKwargs.cu_seqlens] + seq_idx = kwargs[SSMKwargs.seq_idx] + position_indices = kwargs[SSMKwargs.ssm_position_ids] + + # inner_projection : (batch/local_sequence, local_sequence/batch, hidden) + # -> (batch/sequence, sequence/batch, inner_projection) + inner_projection = self.in_proj(input_) + dt = self.dt_proj(self.dt_in_proj(input_)) + self.dt_proj_bias + # Standardize to (batch, sequence, inner_projection) + if kwargs[TransformerKwargs.sequence_first]: + inner_projection = inner_projection.transpose(0, 1) + dt = dt.transpose(0, 1) + + sequence_length = inner_projection.size(1) + + z, x, b, c = torch.split( + inner_projection, + [self._local_inner_size, self._local_xb_size, self._local_xb_size, self._local_inner_size], + dim=2, + ) + + # z: (batch, sequence, local_heads * state) -> (batch, local_heads * state, sequence) + z = z.transpose(1, 2) + + # x: (batch, sequence, local_head_groups * state) -> (batch, local_heads * state, sequence) + x = x.transpose(1, 2) + if self._config.repeat_kv_before_conv: + x = ( + x.unflatten(1, (self._local_head_groups, self._config.state_size)) + .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) + .flatten(1, 2) + ) + + if cu_seqlens is not None: + # from https://github.com/jxiw/M1/blob/d92b53faa640f8ebf624d3e9e771fe24648ef014/rl/verl/verl/models/mamba/hybrid_wrapper.py#L152 + x = _causal_conv1d_fn( + x=x.transpose(1, 2).contiguous().transpose(1, 2), + weight=self.conv1d_weight.squeeze(1), + bias=self.conv1d_bias, + seq_idx=seq_idx, + activation="silu", + ) + else: + x = _causal_conv1d_fn(x=x, weight=self.conv1d_weight.squeeze(1), bias=self.conv1d_bias, activation="silu") + + if not self._config.repeat_kv_before_conv: + x = ( + x.unflatten(1, (self._local_head_groups, self._config.state_size)) + .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) + .flatten(1, 2) + ) + + # b: (batch, sequence, local_head_groups * state) -> (batch, local_heads, state, sequence) + b = ( + b.transpose(1, 2) + .unflatten(1, (self._local_head_groups, self._config.state_size)) + .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) + ) + + # c: (batch, sequence, heads * state) -> (batch, heads, state, sequence) + c = c.transpose(1, 2).unflatten(1, (self._local_heads, self._config.state_size)) + + # dt: (batch, sequence, heads * state) -> (batch, heads * state, sequence) + dt = dt.transpose(1, 2) + + if self._debug_level: + self._debug_log(z, "z", self._XZ_DIMS, kwargs) + self._debug_log(x, "x", self._XZ_DIMS, kwargs) + self._debug_log(b, "b", self._BC_DIMS, kwargs) + self._debug_log(c, "c", self._BC_DIMS, kwargs) + self._debug_log(dt, "dt", self._XZ_DIMS, kwargs) + + if not _mamba_varlen: + + y = selective_scan_fn( + x, + dt, + -torch.exp(self.A_log.float()), + b, + c, + self.D.float(), + z, + delta_bias=self.dt_proj_bias.float(), + delta_softplus=True, + ) + else: + position_indices = position_indices if cu_seqlens is not None else None + + y = selective_scan_fn( + x, + dt, + -torch.exp(self.A_log.float()), + b, + c, + self.D.float(), + z, + delta_bias=self.dt_proj_bias.float(), + delta_softplus=True, + position_indices=position_indices, + ) + + if self._debug_level: + self._debug_log(y, "y", self._XZ_DIMS, kwargs) + + # y: (batch, local_heads * state, sequence) -> (batch, sequence, local_heads * state) + y = y.transpose(1, 2)[:, :sequence_length] + if kwargs[TransformerKwargs.sequence_first]: + # TODO: Is contiguous needed? + y = y.transpose(0, 1).contiguous() + # (batch/sequence, sequence/batch, local_heads * state) + # -> (batch/local_sequence, local_sequence/batch, hidden) + return self.out_proj(y) diff --git a/fast_llm/models/ssm/external/make_hybrid_checkpoint_with_identity.py b/fast_llm/models/ssm/external/make_hybrid_checkpoint_with_identity.py new file mode 100644 index 00000000..a0616ab6 --- /dev/null +++ b/fast_llm/models/ssm/external/make_hybrid_checkpoint_with_identity.py @@ -0,0 +1,41 @@ +import gc + +import click +import torch +from transformers import AutoConfig, AutoModelForCausalLM + +from fast_llm.models.ssm.external.apriel_15b_hybrid.configuration_ssm_hybrid_apriel15b import AprielSSMHybridConfig +from fast_llm.models.ssm.external.apriel_15b_hybrid.modeling_ssm_hybrid_apriel15b import AprielSSMHybridForCausalLM + +device = "cuda" if torch.cuda.is_available() else "cpu" + + +@click.command() +@click.option("--identity_index", type=int, required=True) +@click.option("--save_dir", type=str, required=True) +def main(identity_index: int, save_dir: str): + checkpoint = "ServiceNow-AI/Apriel-Nemotron-15b-Thinker" + config = AutoConfig.from_pretrained(checkpoint, trust_remote_code=True) + + hybrid_block_layout = ["t"] * config.num_hidden_layers + if identity_index >= 0: + hybrid_block_layout[identity_index] = "i" + + hybrdif_apriel_config = AprielSSMHybridConfig(**config.to_dict(), hybrid_block_layout=hybrid_block_layout) + hybrid_apriel_model = AprielSSMHybridForCausalLM(hybrdif_apriel_config) + hybrid_apriel_model.to(dtype=torch.bfloat16).to(device) + + apriel_model = AutoModelForCausalLM.from_pretrained(checkpoint, torch_dtype=torch.bfloat16, trust_remote_code=True) + apriel_state_dict = apriel_model.state_dict() + hybrid_apriel_model.load_state_dict(apriel_state_dict, strict=False) + + hybrid_apriel_model.save_pretrained(save_dir, save_config=True) + torch.cuda.empty_cache() + del hybrid_apriel_model + del apriel_model + del apriel_state_dict + gc.collect() + + +if __name__ == "__main__": + main() diff --git a/fast_llm/models/ssm/external/make_hybrid_checkpoint_with_mil.py b/fast_llm/models/ssm/external/make_hybrid_checkpoint_with_mil.py new file mode 100644 index 00000000..d50a45fa --- /dev/null +++ b/fast_llm/models/ssm/external/make_hybrid_checkpoint_with_mil.py @@ -0,0 +1,104 @@ +import gc + +import click +import torch +from transformers import AutoModelForCausalLM + +from fast_llm.models.ssm.external.apriel_15b_hybrid.configuration_ssm_hybrid_apriel15b import AprielSSMHybridConfig +from fast_llm.models.ssm.external.apriel_15b_hybrid.modeling_ssm_hybrid_apriel15b import ( + AprielSSMM2DecoderLayer, + AprielThinkerSSMHybridForCausalLM, +) + +device = "cuda" if torch.cuda.is_available() else "cpu" + + +def convert_layers(transformer, mamba_config, hybrid_block_layout, init_with_kqvo, torch_dtype=torch.bfloat16): + config = transformer.config + embed_dim = config.hidden_size + num_heads = config.num_attention_heads + num_heads_kv = config.num_key_value_heads + head_dim = embed_dim // num_heads + head_dim * num_heads + head_dim * num_heads_kv + + for layer_idx, type in enumerate(hybrid_block_layout): + print("Converting layer %d...", layer_idx) + # Fetch the layer module for easier access + layer_module = transformer.model.layers._modules[f"{layer_idx}"] + if type == "t": + print("Skipping transformer layer %d..." % layer_idx) + elif type == "m2": + print("Converting layer %d..." % layer_idx) + # Use MambaDecoderLayer for the remaining layers + mamba_encoder = AprielSSMM2DecoderLayer( + mamba_config, + layer_idx, + device="cpu", + dtype=torch_dtype, + ) + + mamba_encoder.mlp.load_state_dict(layer_module.mlp.state_dict()) + mamba_encoder.input_layernorm.load_state_dict(layer_module.input_layernorm.state_dict()) + mamba_encoder.post_attention_layernorm.load_state_dict(layer_module.post_attention_layernorm.state_dict()) + mamba_encoder.mixer.out_proj.load_state_dict(layer_module.self_attn.o_proj.state_dict()) + + if init_with_kqvo: + # Copy weights: [z, x, B, C, dt], x -> v, B -> k, C -> q + mamba_encoder.mixer.in_proj.weight.data[ + mamba_config.ssm_cfg["d_inner"] : mamba_config.ssm_cfg["d_inner"] + mamba_config.ssm_cfg["d_xb"], : + ].copy_(layer_module.self_attn.v_proj.weight.data) + mamba_encoder.mixer.in_proj.weight.data[ + mamba_config.ssm_cfg["d_inner"] + + mamba_config.ssm_cfg["d_xb"] : mamba_config.ssm_cfg["d_inner"] + + 2 * mamba_config.ssm_cfg["d_xb"], + :, + ].copy_(layer_module.self_attn.k_proj.weight.data) + mamba_encoder.mixer.in_proj.weight.data[ + mamba_config.ssm_cfg["d_inner"] + + 2 * mamba_config.ssm_cfg["d_xb"] : 2 * mamba_config.ssm_cfg["d_inner"] + + 2 * mamba_config.ssm_cfg["d_xb"], + :, + ].copy_(layer_module.self_attn.q_proj.weight.data) + + print("Init Mamba using Attention") + + transformer.model.layers[layer_idx] = mamba_encoder + + else: + raise ValueError(f"Invalid layer type: {type}") + + +@click.command() +@click.option("--m2_index", type=int, required=True) +@click.option("--hybrid_checkpoint", type=str, required=True) +@click.option("--save_dir", type=str, required=True) +def main(m2_index: int, hybrid_checkpoint: str, save_dir: str): + path_base = "/mnt/checkpoints/upstream/Apriel-Nemotron-15b-Thinker" + transformer = AutoModelForCausalLM.from_pretrained(path_base, trust_remote_code=True) + hybrid_config = AprielSSMHybridConfig.from_pretrained(hybrid_checkpoint) + + hybrid_block_layout = hybrid_config.hybrid_block_layout + hybrid_block_layout[m2_index] = "m2" + print(hybrid_block_layout) + + convert_layers(transformer, hybrid_config, hybrid_block_layout, True, torch.bfloat16) + hybrid_config.ssm_cfg["activation"] = "silu" + + # load all existing ssm layers + hybrid_model = AprielThinkerSSMHybridForCausalLM.from_pretrained(hybrid_checkpoint) + state_dict = hybrid_model.state_dict() + missing, unexpected = transformer.load_state_dict(state_dict, strict=False) + assert f"model.layers.{m2_index}.mixer.A_log" in missing + assert f"model.layers.{m2_index}.self_attn.q_proj.weight" in unexpected + print(missing) + print(unexpected) + transformer.save_pretrained(save_dir) + + hybrid_config.save_pretrained(save_dir) + + gc.collect() + + +if __name__ == "__main__": + main() From 37d3be8a4381c677801e470df5f16dbf44a0bbbf Mon Sep 17 00:00:00 2001 From: oleksost Date: Tue, 19 Aug 2025 16:14:41 +0000 Subject: [PATCH 06/18] cleanup --- fast_llm/layers/ssm/mamba2.py | 2 +- fast_llm/layers/ssm/{preprocessors.py => preprocessing.py} | 5 ++++- fast_llm/models/ssm/model.py | 2 +- 3 files changed, 6 insertions(+), 3 deletions(-) rename fast_llm/layers/ssm/{preprocessors.py => preprocessing.py} (93%) diff --git a/fast_llm/layers/ssm/mamba2.py b/fast_llm/layers/ssm/mamba2.py index 2b71f89b..ff96c5ce 100644 --- a/fast_llm/layers/ssm/mamba2.py +++ b/fast_llm/layers/ssm/mamba2.py @@ -235,7 +235,7 @@ def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[ self._debug_log(dt, "dt", self._XZ_DIMS, kwargs) if not _mamba_varlen: - + Assert.eq(cu_seqlens, None, msg="This version of Mamba2 does not support cu_seqlens, install verlen mamba") y = selective_scan_fn( x, dt, diff --git a/fast_llm/layers/ssm/preprocessors.py b/fast_llm/layers/ssm/preprocessing.py similarity index 93% rename from fast_llm/layers/ssm/preprocessors.py rename to fast_llm/layers/ssm/preprocessing.py index d7e3a797..343f0bb2 100644 --- a/fast_llm/layers/ssm/preprocessors.py +++ b/fast_llm/layers/ssm/preprocessing.py @@ -24,8 +24,11 @@ def preprocess(self, batch, kwargs: dict[str, typing.Any]) -> None: """ Simplified preprocessor that does not take into account micro-sequences. """ + if TransformerKwargs.sequence_lengths not in kwargs: + return sequence_lengths = kwargs[TransformerKwargs.sequence_lengths] - if "cu_seqlens" in kwargs: + if TransformerKwargs.cu_seqlens_k in kwargs: + # already set this in the transformer preprocessor, so we can use it here cu_seqlens_k = kwargs[TransformerKwargs.cu_seqlens_k] cu_seqlens_q = kwargs[TransformerKwargs.cu_seqlens_q] Assert.eq( diff --git a/fast_llm/models/ssm/model.py b/fast_llm/models/ssm/model.py index 04dcbc29..fafe4409 100644 --- a/fast_llm/models/ssm/model.py +++ b/fast_llm/models/ssm/model.py @@ -6,7 +6,7 @@ from fast_llm.engine.inference.runner import InferenceRunner from fast_llm.layers.language_model.head import LanguageModelHead from fast_llm.layers.ssm.llamba_block import SSMBlock -from fast_llm.layers.ssm.preprocessors import Mamba2Preprocessor +from fast_llm.layers.ssm.preprocessing import Mamba2Preprocessor from fast_llm.layers.transformer.transformer import TransformerBlock from fast_llm.models.gpt.config import GPTBatchConfig from fast_llm.models.gpt.model import GPTBaseModel, GPTModel From 35c6f2041ae7f68c99e03e852773f0f92e745e2f Mon Sep 17 00:00:00 2001 From: oleksost Date: Wed, 20 Aug 2025 13:40:36 +0000 Subject: [PATCH 07/18] wip --- fast_llm/layers/ssm/config.py | 6 + fast_llm/layers/ssm/mamba2.py | 618 +++++++++--------- fast_llm/models/ssm/external/15B_hybrid.ipynb | 135 +++- 3 files changed, 431 insertions(+), 328 deletions(-) diff --git a/fast_llm/layers/ssm/config.py b/fast_llm/layers/ssm/config.py index 194063a2..7a357bde 100644 --- a/fast_llm/layers/ssm/config.py +++ b/fast_llm/layers/ssm/config.py @@ -38,6 +38,7 @@ class SSMDimNames: head_dim = "ssm_head_dim" head_groups = "ssm_head_groups" group_heads = "ssm_group_heads" + conv1d_dim = "ssm_conv1d_dim" # Mamba 2 x_proj_dim_2 = "x_proj_dim_2" # d_xb @@ -295,6 +296,11 @@ def setup_tensor_space(self, tensor_space: TensorSpace, block_type: SSMBlockType (heads_and_head_dim, head_groups_and_state, head_groups_and_state, heads_and_head_dim), ) ) + tensor_space.add_tensor_dim( + ConcatenatedTensorDim( + SSMDimNames.conv1d_dim, (heads_and_head_dim, head_groups_and_state, head_groups_and_state) + ) + ) elif block_type == SSMBlockType.mamba2_discrete: tensor_space.add_tensor_dim( ConcatenatedTensorDim( diff --git a/fast_llm/layers/ssm/mamba2.py b/fast_llm/layers/ssm/mamba2.py index bef0b2bc..caa6e214 100644 --- a/fast_llm/layers/ssm/mamba2.py +++ b/fast_llm/layers/ssm/mamba2.py @@ -2,11 +2,13 @@ import logging import typing +import einops import torch from fast_llm.engine.config_utils.tensor_space import DefaultDimNames, TensorDim, TensorSpace from fast_llm.functional.config import ActivationType from fast_llm.layers.common.linear import InputParallelLinear, Linear, OutputParallelLinear +from fast_llm.layers.common.normalization import RMSNorm from fast_llm.layers.ssm.config import SSMConfig, SSMDimNames, SSMKwargs from fast_llm.layers.ssm.mamba_layer import init_A, init_dtprojbias from fast_llm.layers.transformer.config import TransformerConfig, TransformerDimNames, TransformerKwargs @@ -17,6 +19,7 @@ _mamba_varlen = False try: from mamba_ssm.ops.selective_scan_interface import selective_scan_fn # noqa + from mamba_ssm.ops.triton.ssd_combined import mamba_chunk_scan_combined _mamba_available = True sig = inspect.signature(selective_scan_fn) @@ -42,9 +45,247 @@ logger = logging.getLogger(__name__) +# class Mamba2(Mixer): +# """ +# This code is adapted from https://github.com/jxiw/M1/blob/537a1ca5407a786a99dc6c721873493cf8750d5e/mamba/hybrid_mamba_layer.py +# """ + +# _mixer_name: typing.ClassVar[str] = "mamba_2" + +# _XZ_DIMS = ( +# TransformerDimNames.batch, +# SSMDimNames.composite_heads_and_head_dim, +# TransformerDimNames.sequence_q, +# ) +# _BC_DIMS = ( +# TransformerDimNames.batch, +# SSMDimNames.composite_heads, +# SSMDimNames.state, +# TransformerDimNames.sequence_q, +# ) + +# def __init__( +# self, +# config: SSMConfig, +# tensor_space: TensorSpace, +# block_index: int, +# transformer_config: TransformerConfig, +# ): +# super().__init__(tensor_space, block_index, debug_level=transformer_config.debug_transformer) +# self._config: SSMConfig = config +# Assert.eq(self._config.activation_type, ActivationType.silu) +# layer_lr_scale: float | None = config.per_layer_lr_scale[block_index] if config.per_layer_lr_scale else None +# lr_scale: float | tuple[float | None, ...] | None = get_lr_scale(self._config.mamba_lr_scale, layer_lr_scale) + +# inner_dim: TensorDim = tensor_space[SSMDimNames.composite_heads_and_head_dim] +# xb_dim = tensor_space[SSMDimNames.composite_head_groups_and_state] +# hidden_dim: TensorDim = tensor_space[TransformerDimNames.hidden] +# dt_rank_dim = tensor_space[SSMDimNames.dt_rank] + +# self._local_heads = tensor_space[SSMDimNames.composite_heads].size +# self._local_head_groups = tensor_space[SSMDimNames.head_groups].size +# self._group_heads = div(self._local_heads, self._local_head_groups) +# self._local_inner_size = inner_dim.size +# self._local_xb_size = xb_dim.size + +# state_size = tensor_space[SSMDimNames.state].size +# div(self._local_inner_size, state_size) + +# conv1d_dim = inner_dim if self._config.repeat_kv_before_conv else xb_dim +# self.conv1d_weight = ParameterMeta.from_dims( +# ( +# conv1d_dim, +# tensor_space[DefaultDimNames.scalar], +# tensor_space[SSMDimNames.convolution_kernel], +# ), +# init_method=init_uniform_centered_((conv1d_dim.global_size * self._config.conv_kernel_dimension) ** -0.5), +# lr_scale=lr_scale, +# ) +# self.conv1d_bias = ParameterMeta.from_dims( +# (conv1d_dim,), +# init_method=init_uniform_centered_(self._config.conv_kernel_dimension**-0.5), +# lr_scale=lr_scale, +# ) +# self.in_proj = OutputParallelLinear( +# hidden_dim, +# tensor_space[SSMDimNames.concatenated_inner_projection], +# bias=config.add_bias_linear, +# weight_init_method=init_kaiming_(transformer_config.hidden_size), +# sequence_parallel=self._sequence_parallel, +# lr_scale=lr_scale, +# ) + +# self.dt_in_proj = Linear( +# hidden_dim, +# dt_rank_dim, +# bias=config.add_bias_linear, +# weight_init_method=init_kaiming_(transformer_config.hidden_size), +# lr_scale=lr_scale, +# ) +# self.dt_proj = OutputParallelLinear( +# dt_rank_dim, +# inner_dim, +# bias=False, +# # Initialize special dt projection to preserve variance at initialization +# weight_init_method=self._config.dt_init.get_init_method( +# self._config.dt_rank**-0.5 * self._config.dt_scale +# ), +# sequence_parallel=self._sequence_parallel, +# lr_scale=lr_scale, +# ) +# # define bias outside the linear layer since it's also used in the selective_scan_fn +# self.dt_proj_bias = ParameterMeta.from_dims( +# (inner_dim,), +# init_method=init_dtprojbias(self._config.dt_max, self._config.dt_min, self._config.dt_init_floor), +# lr_scale=lr_scale, +# ) +# self.A_log = ParameterMeta.from_dims( +# (inner_dim, tensor_space[SSMDimNames.state]), +# init_method=init_A(self._config.state_size, self._config.d_inner), +# lr_scale=lr_scale, +# weight_decay=False, +# ) +# self.D = ParameterMeta.from_dims( +# (inner_dim,), +# weight_decay=False, +# init_method=init_ones_, +# lr_scale=lr_scale, +# ) +# self.out_proj = InputParallelLinear( +# inner_dim, +# hidden_dim, +# bias=config.add_bias_linear, +# weight_init_method=init_kaiming_(self._config.d_inner), +# sequence_parallel=self._sequence_parallel, +# # TODO: lr_scale? +# ) + +# def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[torch.Tensor, torch.Tensor | None]: +# """ +# Note, we are nto doing "read" sequence-tensor parallel trainign here, since inner_projection is gathered over all GPUS. +# This is also desired, since the currently used mamba kernel does not support STP. +# TODO: use correct kernel from Mamba2! +# """ +# assert _mamba_available +# assert _causal_conv1d_available +# cu_seqlens = kwargs[SSMKwargs.cu_seqlens] +# seq_idx = kwargs[SSMKwargs.seq_idx] +# position_indices = kwargs[SSMKwargs.ssm_position_ids] + +# # inner_projection : (batch/local_sequence, local_sequence/batch, hidden) +# # -> (batch/sequence, sequence/batch, inner_projection) +# inner_projection = self.in_proj(input_) +# dt = self.dt_proj(self.dt_in_proj(input_)) + self.dt_proj_bias +# # Standardize to (batch, sequence, inner_projection) +# if kwargs[TransformerKwargs.sequence_first]: +# inner_projection = inner_projection.transpose(0, 1) +# dt = dt.transpose(0, 1) + +# sequence_length = inner_projection.size(1) + +# z, x, b, c = torch.split( +# inner_projection, +# [self._local_inner_size, self._local_xb_size, self._local_xb_size, self._local_inner_size], +# dim=2, +# ) + +# # z: (batch, sequence, local_heads * state) -> (batch, local_heads * state, sequence) +# z = z.transpose(1, 2) + +# # x: (batch, sequence, local_head_groups * state) -> (batch, local_heads * state, sequence) +# x = x.transpose(1, 2) +# # x: (batch, local_heads * state, sequence) -> (batch, local_head_per_groups, state, sequence) +# if self._config.repeat_kv_before_conv: +# x = ( +# x.unflatten(1, (self._local_head_groups, self._config.state_size)) +# .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) +# .flatten(1, 2) +# ) + +# if cu_seqlens is not None: +# # from https://github.com/jxiw/M1/blob/d92b53faa640f8ebf624d3e9e771fe24648ef014/rl/verl/verl/models/mamba/hybrid_wrapper.py#L152 +# x = _causal_conv1d_fn( +# x=x.transpose(1, 2).contiguous().transpose(1, 2), +# weight=self.conv1d_weight.squeeze(1), +# bias=self.conv1d_bias, +# seq_idx=seq_idx, +# activation="silu", +# ) +# else: +# x = _causal_conv1d_fn(x=x, weight=self.conv1d_weight.squeeze(1), bias=self.conv1d_bias, activation="silu") + +# if not self._config.repeat_kv_before_conv: +# x = ( +# x.unflatten(1, (self._local_head_groups, self._config.state_size)) +# .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) +# .flatten(1, 2) +# ) + +# # b: (batch, sequence, local_head_groups * state) -> (batch, local_heads, state, sequence) +# b = ( +# b.transpose(1, 2) +# .unflatten(1, (self._local_head_groups, self._config.state_size)) +# .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) +# ) + +# # c: (batch, sequence, heads * state) -> (batch, heads, state, sequence) +# c = c.transpose(1, 2).unflatten(1, (self._local_heads, self._config.state_size)) + +# # dt: (batch, sequence, heads * state) -> (batch, heads * state, sequence) +# dt = dt.transpose(1, 2) + +# if self._debug_level: +# self._debug_log(z, "z", self._XZ_DIMS, kwargs) +# self._debug_log(x, "x", self._XZ_DIMS, kwargs) +# self._debug_log(b, "b", self._BC_DIMS, kwargs) +# self._debug_log(c, "c", self._BC_DIMS, kwargs) +# self._debug_log(dt, "dt", self._XZ_DIMS, kwargs) + +# if not _mamba_varlen: +# Assert.eq(cu_seqlens, None, msg="This version of Mamba2 does not support cu_seqlens, install verlen mamba") +# y = selective_scan_fn( +# x, +# dt, +# -torch.exp(self.A_log.float()), +# b, +# c, +# self.D.float(), +# z, +# delta_bias=self.dt_proj_bias.float(), +# delta_softplus=True, +# ) +# else: +# position_indices = position_indices if cu_seqlens is not None else None + +# y = selective_scan_fn( +# x, +# dt, +# -torch.exp(self.A_log.float()), +# b, +# c, +# self.D.float(), +# z, +# delta_bias=self.dt_proj_bias.float(), +# delta_softplus=True, +# position_indices=position_indices, +# ) + +# if self._debug_level: +# self._debug_log(y, "y", self._XZ_DIMS, kwargs) + +# # y: (batch, local_heads * state, sequence) -> (batch, sequence, local_heads * state) +# y = y.transpose(1, 2)[:, :sequence_length] +# if kwargs[TransformerKwargs.sequence_first]: +# # TODO: Is contiguous needed? +# y = y.transpose(0, 1).contiguous() +# # (batch/sequence, sequence/batch, local_heads * state) +# # -> (batch/local_sequence, local_sequence/batch, hidden) +# return self.out_proj(y) + + class Mamba2(Mixer): """ - This code is adapted from https://github.com/jxiw/M1/blob/537a1ca5407a786a99dc6c721873493cf8750d5e/mamba/hybrid_mamba_layer.py + This code is adapted from https://github.com/jxiw/M1/blob/main/mamba2/hybrid_mamba_layer.py """ _mixer_name: typing.ClassVar[str] = "mamba_2" @@ -77,7 +318,7 @@ def __init__( inner_dim: TensorDim = tensor_space[SSMDimNames.composite_heads_and_head_dim] xb_dim = tensor_space[SSMDimNames.composite_head_groups_and_state] hidden_dim: TensorDim = tensor_space[TransformerDimNames.hidden] - dt_rank_dim = tensor_space[SSMDimNames.dt_rank] + tensor_space[SSMDimNames.dt_rank] self._local_heads = tensor_space[SSMDimNames.composite_heads].size self._local_head_groups = tensor_space[SSMDimNames.head_groups].size @@ -85,10 +326,7 @@ def __init__( self._local_inner_size = inner_dim.size self._local_xb_size = xb_dim.size - state_size = tensor_space[SSMDimNames.state].size - div(self._local_inner_size, state_size) - - conv1d_dim = inner_dim if self._config.repeat_kv_before_conv else xb_dim + conv1d_dim = tensor_space[SSMDimNames.conv1d_dim] self.conv1d_weight = ParameterMeta.from_dims( ( conv1d_dim, @@ -114,30 +352,19 @@ def __init__( self.dt_in_proj = Linear( hidden_dim, - dt_rank_dim, + tensor_space[SSMDimNames.composite_heads], bias=config.add_bias_linear, weight_init_method=init_kaiming_(transformer_config.hidden_size), lr_scale=lr_scale, ) - self.dt_proj = OutputParallelLinear( - dt_rank_dim, - inner_dim, - bias=False, - # Initialize special dt projection to preserve variance at initialization - weight_init_method=self._config.dt_init.get_init_method( - self._config.dt_rank**-0.5 * self._config.dt_scale - ), - sequence_parallel=self._sequence_parallel, - lr_scale=lr_scale, - ) - # define bias outside the linear layer since it's also used in the selective_scan_fn + self.dt_proj_bias = ParameterMeta.from_dims( - (inner_dim,), + (tensor_space[SSMDimNames.composite_heads],), init_method=init_dtprojbias(self._config.dt_max, self._config.dt_min, self._config.dt_init_floor), lr_scale=lr_scale, ) self.A_log = ParameterMeta.from_dims( - (inner_dim, tensor_space[SSMDimNames.state]), + (tensor_space[SSMDimNames.composite_heads],), init_method=init_A(self._config.state_size, self._config.d_inner), lr_scale=lr_scale, weight_decay=False, @@ -154,261 +381,26 @@ def __init__( bias=config.add_bias_linear, weight_init_method=init_kaiming_(self._config.d_inner), sequence_parallel=self._sequence_parallel, - # TODO: lr_scale? - ) - - def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[torch.Tensor, torch.Tensor | None]: - """ - Note, we are nto doing "read" sequence-tensor parallel trainign here, since inner_projection is gathered over all GPUS. - This is also desired, since the currently used mamba kernel does not support STP. - TODO: use correct kernel from Mamba2! - """ - assert _mamba_available - assert _causal_conv1d_available - cu_seqlens = kwargs[SSMKwargs.cu_seqlens] - seq_idx = kwargs[SSMKwargs.seq_idx] - position_indices = kwargs[SSMKwargs.ssm_position_ids] - - # inner_projection : (batch/local_sequence, local_sequence/batch, hidden) - # -> (batch/sequence, sequence/batch, inner_projection) - inner_projection = self.in_proj(input_) - dt = self.dt_proj(self.dt_in_proj(input_)) + self.dt_proj_bias - # Standardize to (batch, sequence, inner_projection) - if kwargs[TransformerKwargs.sequence_first]: - inner_projection = inner_projection.transpose(0, 1) - dt = dt.transpose(0, 1) - - sequence_length = inner_projection.size(1) - - z, x, b, c = torch.split( - inner_projection, - [self._local_inner_size, self._local_xb_size, self._local_xb_size, self._local_inner_size], - dim=2, - ) - - # z: (batch, sequence, local_heads * state) -> (batch, local_heads * state, sequence) - z = z.transpose(1, 2) - - # x: (batch, sequence, local_head_groups * state) -> (batch, local_heads * state, sequence) - x = x.transpose(1, 2) - if self._config.repeat_kv_before_conv: - x = ( - x.unflatten(1, (self._local_head_groups, self._config.state_size)) - .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) - .flatten(1, 2) - ) - - if cu_seqlens is not None: - # from https://github.com/jxiw/M1/blob/d92b53faa640f8ebf624d3e9e771fe24648ef014/rl/verl/verl/models/mamba/hybrid_wrapper.py#L152 - x = _causal_conv1d_fn( - x=x.transpose(1, 2).contiguous().transpose(1, 2), - weight=self.conv1d_weight.squeeze(1), - bias=self.conv1d_bias, - seq_idx=seq_idx, - activation="silu", - ) - else: - x = _causal_conv1d_fn(x=x, weight=self.conv1d_weight.squeeze(1), bias=self.conv1d_bias, activation="silu") - - if not self._config.repeat_kv_before_conv: - x = ( - x.unflatten(1, (self._local_head_groups, self._config.state_size)) - .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) - .flatten(1, 2) - ) - - # b: (batch, sequence, local_head_groups * state) -> (batch, local_heads, state, sequence) - b = ( - b.transpose(1, 2) - .unflatten(1, (self._local_head_groups, self._config.state_size)) - .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) - ) - - # c: (batch, sequence, heads * state) -> (batch, heads, state, sequence) - c = c.transpose(1, 2).unflatten(1, (self._local_heads, self._config.state_size)) - - # dt: (batch, sequence, heads * state) -> (batch, heads * state, sequence) - dt = dt.transpose(1, 2) - - if self._debug_level: - self._debug_log(z, "z", self._XZ_DIMS, kwargs) - self._debug_log(x, "x", self._XZ_DIMS, kwargs) - self._debug_log(b, "b", self._BC_DIMS, kwargs) - self._debug_log(c, "c", self._BC_DIMS, kwargs) - self._debug_log(dt, "dt", self._XZ_DIMS, kwargs) - - if not _mamba_varlen: - Assert.eq(cu_seqlens, None, msg="This version of Mamba2 does not support cu_seqlens, install verlen mamba") - y = selective_scan_fn( - x, - dt, - -torch.exp(self.A_log.float()), - b, - c, - self.D.float(), - z, - delta_bias=self.dt_proj_bias.float(), - delta_softplus=True, - ) - else: - position_indices = position_indices if cu_seqlens is not None else None - - y = selective_scan_fn( - x, - dt, - -torch.exp(self.A_log.float()), - b, - c, - self.D.float(), - z, - delta_bias=self.dt_proj_bias.float(), - delta_softplus=True, - position_indices=position_indices, - ) - - if self._debug_level: - self._debug_log(y, "y", self._XZ_DIMS, kwargs) - - # y: (batch, local_heads * state, sequence) -> (batch, sequence, local_heads * state) - y = y.transpose(1, 2)[:, :sequence_length] - if kwargs[TransformerKwargs.sequence_first]: - # TODO: Is contiguous needed? - y = y.transpose(0, 1).contiguous() - # (batch/sequence, sequence/batch, local_heads * state) - # -> (batch/local_sequence, local_sequence/batch, hidden) - return self.out_proj(y) - - -class Mamba2Chunked(Mamba2): - """ - This code is adapted from https://github.com/jxiw/M1/blob/main/mamba2/hybrid_mamba_layer.py - """ - - _mixer_name: typing.ClassVar[str] = "mamba_2" - - _XZ_DIMS = ( - TransformerDimNames.batch, - SSMDimNames.composite_heads_and_head_dim, - TransformerDimNames.sequence_q, - ) - _BC_DIMS = ( - TransformerDimNames.batch, - SSMDimNames.composite_heads, - SSMDimNames.state, - TransformerDimNames.sequence_q, - ) - - def __init__( - self, - config: SSMConfig, - tensor_space: TensorSpace, - block_index: int, - transformer_config: TransformerConfig, - ): - super().__init__(tensor_space, block_index, debug_level=transformer_config.debug_transformer) - self._config: SSMConfig = config - Assert.eq(self._config.activation_type, ActivationType.silu) - layer_lr_scale: float | None = config.per_layer_lr_scale[block_index] if config.per_layer_lr_scale else None - lr_scale: float | tuple[float | None, ...] | None = get_lr_scale(self._config.mamba_lr_scale, layer_lr_scale) - - inner_dim: TensorDim = tensor_space[SSMDimNames.composite_heads_and_head_dim] - xb_dim = tensor_space[SSMDimNames.composite_head_groups_and_state] - hidden_dim: TensorDim = tensor_space[TransformerDimNames.hidden] - dt_rank_dim = tensor_space[SSMDimNames.dt_rank] - - self._local_heads = tensor_space[SSMDimNames.composite_heads].size - self._local_head_groups = tensor_space[SSMDimNames.head_groups].size - self._group_heads = div(self._local_heads, self._local_head_groups) - self._local_inner_size = inner_dim.size - self._local_xb_size = xb_dim.size - - self.nheads = inner_dim // self.config.n_heads - - conv1d_dim = inner_dim if self._config.repeat_kv_before_conv else xb_dim - self.conv1d_weight = ParameterMeta.from_dims( - ( - conv1d_dim, - tensor_space[DefaultDimNames.scalar], - tensor_space[SSMDimNames.convolution_kernel], - ), - init_method=init_uniform_centered_((conv1d_dim.global_size * self._config.conv_kernel_dimension) ** -0.5), - lr_scale=lr_scale, - ) - self.conv1d_bias = ParameterMeta.from_dims( - (conv1d_dim,), - init_method=init_uniform_centered_(self._config.conv_kernel_dimension**-0.5), - lr_scale=lr_scale, - ) - self.in_proj = OutputParallelLinear( - hidden_dim, - tensor_space[SSMDimNames.concatenated_inner_projection], - bias=config.add_bias_linear, - weight_init_method=init_kaiming_(transformer_config.hidden_size), - sequence_parallel=self._sequence_parallel, lr_scale=lr_scale, ) - - self.dt_in_proj = Linear( - hidden_dim, - self.nheads, - bias=config.add_bias_linear, - weight_init_method=init_kaiming_(transformer_config.hidden_size), - lr_scale=lr_scale, - ) - self.dt_proj = OutputParallelLinear( - dt_rank_dim, + self.norm = RMSNorm( inner_dim, - bias=False, - # Initialize special dt projection to preserve variance at initialization - weight_init_method=self._config.dt_init.get_init_method( - self._config.dt_rank**-0.5 * self._config.dt_scale - ), - sequence_parallel=self._sequence_parallel, - lr_scale=lr_scale, - ) - # define bias outside the linear layer since it's also used in the selective_scan_fn - self.dt_proj_bias = ParameterMeta.from_dims( - (inner_dim,), - init_method=init_dtprojbias(self._config.dt_max, self._config.dt_min, self._config.dt_init_floor), - lr_scale=lr_scale, - ) - self.A_log = ParameterMeta.from_dims( - (inner_dim, tensor_space[SSMDimNames.state]), - init_method=init_A(self._config.state_size, self._config.d_inner), + eps=1e-5, lr_scale=lr_scale, - weight_decay=False, - ) - self.D = ParameterMeta.from_dims( - (inner_dim,), - weight_decay=False, - init_method=init_ones_, - lr_scale=lr_scale, - ) - self.out_proj = InputParallelLinear( - inner_dim, - hidden_dim, - bias=config.add_bias_linear, - weight_init_method=init_kaiming_(self._config.d_inner), - sequence_parallel=self._sequence_parallel, - # TODO: lr_scale? ) def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[torch.Tensor, torch.Tensor | None]: - """ - Note, we are nto doing "read" sequence-tensor parallel trainign here, since inner_projection is gathered over all GPUS. - This is also desired, since the currently used mamba kernel does not support STP. - TODO: use correct kernel from Mamba2! - """ + """ """ assert _mamba_available assert _causal_conv1d_available cu_seqlens = kwargs[SSMKwargs.cu_seqlens] seq_idx = kwargs[SSMKwargs.seq_idx] - position_indices = kwargs[SSMKwargs.ssm_position_ids] + kwargs[SSMKwargs.ssm_position_ids] # inner_projection : (batch/local_sequence, local_sequence/batch, hidden) # -> (batch/sequence, sequence/batch, inner_projection) inner_projection = self.in_proj(input_) - dt = self.dt_proj(self.dt_in_proj(input_)) + self.dt_proj_bias + dt = self.dt_in_proj(input_) # bs, seq, heads #+ self.dt_proj_bias # Standardize to (batch, sequence, inner_projection) if kwargs[TransformerKwargs.sequence_first]: inner_projection = inner_projection.transpose(0, 1) @@ -416,55 +408,34 @@ def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[ sequence_length = inner_projection.size(1) - z, x, b, c = torch.split( + z, xBC = torch.split( inner_projection, - [self._local_inner_size, self._local_xb_size, self._local_xb_size, self._local_inner_size], + [self._local_inner_size, self._local_xb_size + self._local_xb_size + self._local_inner_size], dim=2, ) - # z: (batch, sequence, local_heads * state) -> (batch, local_heads * state, sequence) - z = z.transpose(1, 2) - - # x: (batch, sequence, local_head_groups * state) -> (batch, local_heads * state, sequence) - x = x.transpose(1, 2) - if self._config.repeat_kv_before_conv: - x = ( - x.unflatten(1, (self._local_head_groups, self._config.state_size)) - .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) - .flatten(1, 2) - ) - if cu_seqlens is not None: # from https://github.com/jxiw/M1/blob/d92b53faa640f8ebf624d3e9e771fe24648ef014/rl/verl/verl/models/mamba/hybrid_wrapper.py#L152 - x = _causal_conv1d_fn( - x=x.transpose(1, 2).contiguous().transpose(1, 2), + xBC = _causal_conv1d_fn( + xBC.transpose(1, 2), weight=self.conv1d_weight.squeeze(1), bias=self.conv1d_bias, seq_idx=seq_idx, activation="silu", - ) + ).transpose(1, 2) else: - x = _causal_conv1d_fn(x=x, weight=self.conv1d_weight.squeeze(1), bias=self.conv1d_bias, activation="silu") - - if not self._config.repeat_kv_before_conv: - x = ( - x.unflatten(1, (self._local_head_groups, self._config.state_size)) - .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) - .flatten(1, 2) - ) - - # b: (batch, sequence, local_head_groups * state) -> (batch, local_heads, state, sequence) - b = ( - b.transpose(1, 2) - .unflatten(1, (self._local_head_groups, self._config.state_size)) - .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) - ) - - # c: (batch, sequence, heads * state) -> (batch, heads, state, sequence) - c = c.transpose(1, 2).unflatten(1, (self._local_heads, self._config.state_size)) - - # dt: (batch, sequence, heads * state) -> (batch, heads * state, sequence) - dt = dt.transpose(1, 2) + xBC = _causal_conv1d_fn( + x=xBC.transpose(1, 2), weight=self.conv1d_weight.squeeze(1), bias=self.conv1d_bias, activation="silu" + ).transpose(1, 2) + + x, b, c = torch.split(xBC, [self._local_xb_size, self._local_xb_size, self._local_inner_size], dim=-1) + x = einops.rearrange(x, "b l (xb_group dstate) -> b xb_group l dstate", dstate=self._config.state_size) + b = einops.rearrange(b, "b l (xb_group dstate) -> b xb_group l dstate", dstate=self._config.state_size) + batch, num_key_value_heads, slen, head_dim = x.shape + x = x[:, :, None, :, :].expand(batch, num_key_value_heads, self._group_heads, slen, head_dim) + x = x.reshape(batch, num_key_value_heads * self._group_heads, slen, head_dim) + b = b[:, :, None, :, :].expand(batch, num_key_value_heads, self._group_heads, slen, head_dim) + b = b.reshape(batch, num_key_value_heads * self._group_heads, slen, head_dim) if self._debug_level: self._debug_log(z, "z", self._XZ_DIMS, kwargs) @@ -473,34 +444,27 @@ def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[ self._debug_log(c, "c", self._BC_DIMS, kwargs) self._debug_log(dt, "dt", self._XZ_DIMS, kwargs) - if not _mamba_varlen: - - y = selective_scan_fn( - x, - dt, - -torch.exp(self.A_log.float()), - b, - c, - self.D.float(), - z, - delta_bias=self.dt_proj_bias.float(), - delta_softplus=True, - ) - else: - position_indices = position_indices if cu_seqlens is not None else None - - y = selective_scan_fn( - x, - dt, - -torch.exp(self.A_log.float()), - b, - c, - self.D.float(), - z, - delta_bias=self.dt_proj_bias.float(), - delta_softplus=True, - position_indices=position_indices, - ) + dt_limit_kwargs = {} + # c is b x seq x heads * state + y = mamba_chunk_scan_combined( + # rearrange(x, "b l (h p) -> b l h p", p=self.headdim), + einops.rearrange(x, "b g l p -> b l g p"), + dt, + -torch.exp(self.A_log.float()), + # rearrange(B, "b l (g n) -> b l g n", g=self.ngroups), + einops.rearrange(b, "b g l n -> b l g n"), + einops.rearrange(c, "b l (g n) -> b l g n", g=self._local_heads), + chunk_size=self._config.chunk_size, + D=self.D, + z=None, + dt_bias=self.dt_proj_bias, + dt_softplus=True, + seq_idx=seq_idx, + cu_seqlens=cu_seqlens, + **dt_limit_kwargs, + return_final_states=False, + return_varlen_states=False, + ) if self._debug_level: self._debug_log(y, "y", self._XZ_DIMS, kwargs) diff --git a/fast_llm/models/ssm/external/15B_hybrid.ipynb b/fast_llm/models/ssm/external/15B_hybrid.ipynb index a8f0c33b..8d433e5b 100644 --- a/fast_llm/models/ssm/external/15B_hybrid.ipynb +++ b/fast_llm/models/ssm/external/15B_hybrid.ipynb @@ -1534,7 +1534,140 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "============================================================\n", + "WRONG APPROACH: Naive Discretization\n", + "============================================================\n", + "Step 1: x = 7.000\n", + "Step 2: x = 5.500\n", + "Step 3: x = 4.750\n", + "Step 4: x = 4.375\n", + "Step 5: x = 4.188\n", + "\n", + "============================================================\n", + "CORRECT APPROACH: Solving the Differential Equation\n", + "============================================================\n", + "\n", + "We need to solve: dx/dt = Ax + Bu\n", + "This is a first-order linear ODE with constant coefficients.\n", + "\n", + "Step 1: Homogeneous solution (u=0)\n", + " dx/dt = Ax\n", + " Solution: x_h(t) = e^(At) * x(0)\n", + "\n", + "Step 2: Particular solution (variation of parameters)\n", + " Full solution: x(t) = e^(At)*x(0) + ∫[0,t] e^(A(t-τ))*B*u(τ) dτ\n", + "\n", + "Step 3: Apply ZOH (u is constant over [0,Δ])\n", + " x(Δ) = e^(AΔ)*x(0) + (∫[0,Δ] e^(As) ds)*B*u\n", + " x(Δ) = e^(AΔ)*x(0) + A^(-1)*(e^(AΔ) - 1)*B*u\n", + "\n", + "Discretized system:\n", + "A_d = e^(AΔ) = e^(-0.5*1.0) = 0.607\n", + "B_d = (e^(AΔ)-1)/A * B = 1.574\n", + "Step 1: x = 7.639\n", + "Step 2: x = 6.207\n", + "Step 3: x = 5.339\n", + "Step 4: x = 4.812\n", + "Step 5: x = 4.493\n" + ] + }, + { + "ename": "TypeError", + "evalue": "unsupported format string passed to numpy.ndarray.__format__", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[9], line 111\u001b[0m\n\u001b[1;32m 109\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m i \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mrange\u001b[39m(\u001b[38;5;241m1\u001b[39m, \u001b[38;5;28mlen\u001b[39m(t_discrete)):\n\u001b[1;32m 110\u001b[0m error_naive \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mabs\u001b[39m(x_naive_history[i] \u001b[38;5;241m-\u001b[39m x_continuous[\u001b[38;5;28mint\u001b[39m(i\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m1000\u001b[39m\u001b[38;5;241m/\u001b[39m\u001b[38;5;241m5\u001b[39m)])\n\u001b[0;32m--> 111\u001b[0m ax1\u001b[38;5;241m.\u001b[39mannotate(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mError: \u001b[39m\u001b[38;5;132;43;01m{\u001b[39;49;00m\u001b[43merror_naive\u001b[49m\u001b[38;5;132;43;01m:\u001b[39;49;00m\u001b[38;5;124;43m.2f\u001b[39;49m\u001b[38;5;132;43;01m}\u001b[39;49;00m\u001b[38;5;124m'\u001b[39m, \n\u001b[1;32m 112\u001b[0m xy\u001b[38;5;241m=\u001b[39m(i, x_naive_history[i]), \n\u001b[1;32m 113\u001b[0m xytext\u001b[38;5;241m=\u001b[39m(i\u001b[38;5;241m+\u001b[39m\u001b[38;5;241m0.1\u001b[39m, x_naive_history[i]\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m),\n\u001b[1;32m 114\u001b[0m fontsize\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m8\u001b[39m, color\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mred\u001b[39m\u001b[38;5;124m'\u001b[39m, alpha\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m0.7\u001b[39m)\n\u001b[1;32m 116\u001b[0m \u001b[38;5;66;03m# Plot 2: The continuous evolution between samples\u001b[39;00m\n\u001b[1;32m 117\u001b[0m ax2 \u001b[38;5;241m=\u001b[39m axes[\u001b[38;5;241m0\u001b[39m, \u001b[38;5;241m1\u001b[39m]\n", + "\u001b[0;31mTypeError\u001b[0m: unsupported format string passed to numpy.ndarray.__format__" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABIoAAANECAYAAADfVMS/AAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAA6TpJREFUeJzs3Xd0FFUbx/HvphN6C0UivXcQqdIMBOlVehcbVZogvTdBioCK9A7SAkjvIFWkSO8gAqEEAgmpO+8fa/Y1JoEEkmwgv885czJz587MM3c3ZPfh3jsmwzAMREREREREREQk0bOzdQAiIiIiIiIiIpIwKFEkIiIiIiIiIiKAEkUiIiIiIiIiIvIPJYpERERERERERARQokhERERERERERP6hRJGIiIiIiIiIiABKFImIiIiIiIiIyD+UKBIREREREREREUCJIhERERERERER+YcSRSJRMJlMdOnSxdZhxFi7du3Ili1bjI+7fv06JpOJefPmxXpMryNbtmzUrl3b1mGIiIiIiIgkCkoUyVtnxYoVmEwm1qxZE2Ff0aJFMZlM7Nq1K8K+d999l3LlysVHiOFcv36d9u3bkzNnTlxcXMiYMSMVK1ZkyJAh8R6LrZw9e5ahQ4dy/fp1m8ZhGAYLFy6kYsWKpEqVCldXVwoXLszw4cPx8/OLUL9y5cqYTCZMJhN2dnakSJGCvHnz0rp1a7Zt2xbpNbJly2Y95r9LjRo1Xhjf7t27rXUXLVoUaZ3y5ctjMpkoVKhQzBsAmDFjRqTJwoTyGomIiIiISNxysHUAIrGtQoUKAOzfv58GDRpYy319ffnzzz9xcHDgwIEDVKlSxbrv1q1b3Lp1i2bNmsVrrJcvX6ZUqVIkSZKEDh06kC1bNu7cucPx48cZN24cw4YNi7dYsmbNyvPnz3F0dIy3a4Y5e/Ysw4YNo3Llyq/UGyo2hIaG0qJFC1asWMEHH3zA0KFDcXV1Zd++fQwbNoyVK1eyfft2MmTIEO64LFmyMGbMGAD8/Py4fPkyq1evZtGiRXz88ccsWrQoQpsWK1aMXr16RYghc+bM0YrVxcWFJUuW0KpVq3Dl169f57fffsPFxSUmtx7OjBkzSJcuHe3atQtXnhBeIxERERERiXtKFMlbJ3PmzGTPnp39+/eHKz948CCGYdCkSZMI+8K2w5JM8eW7777j2bNnnDhxgqxZs4bb5+3tHa+xmEym10owvOnGjx/PihUr6N27NxMmTLCWf/rpp3z88cfUr1+fdu3asWnTpnDHpUyZMkLCZuzYsXTr1o0ZM2aQLVs2xo0bF27/O++8E+GYmKhZsyZeXl48ePCAdOnSWcuXLFlChgwZyJ07Nz4+Pq98/vjk5+dH0qRJbR2GiIiIiIj8Q0PP5K1UoUIF/vjjD54/f24tO3DgAAULFuSjjz7i0KFDmM3mcPtMJhPly5ePcK61a9dSqFAhnJ2dKViwIJs3b7bu27VrV5TD3JYsWYLJZOLgwYNRxnnlyhWyZMkSIUkE4ObmFqFsxowZFCxYEGdnZzJnzkznzp15/PhxlOcPDg4mTZo0tG/fPsI+X19fXFxc6N27NxD5HEXt2rUjWbJk3L59m/r165MsWTLSp09P7969CQ0NDXe+hw8f0rp1a1KkSEGqVKlo27YtJ0+efOm8R/PmzaNJkyYAVKlSxTq0avfu3eHq7d+/n/fffx8XFxdy5MjBggULIpzr8ePH9OjRA3d3d5ydncmVKxfjxo0L91pH5vnz50yYMIE8efJYewf9W506dWjbti2bN2/m0KFDLzwXgL29PVOnTqVAgQJ8//33PHny5KXHxES9evVwdnZm5cqV4cqXLFnCxx9/jL29fYRj5s6dS9WqVXFzc8PZ2ZkCBQowc+bMcHWyZcvGmTNn2LNnj/V1qFy5crReo02bNvHBBx+QNGlSkidPTq1atThz5ky484e9n65cuULNmjVJnjw5LVu2BODSpUs0atSIjBkz4uLiQpYsWWjWrFmst52IiIiIiLyYEkXyVqpQoQLBwcEcPnzYWnbgwAHKlStHuXLlePLkCX/++We4ffny5SNt2rThzrN//36+/PJLmjVrxvjx4wkICKBRo0Y8fPgQsMxR4+7uzuLFiyPEsHjxYnLmzEnZsmWjjDNr1qzcunWLnTt3vvSehg4dSufOncmcOTMTJ06kUaNG/Pjjj1SvXp3g4OBIj3F0dKRBgwasXbuWoKCgcPvWrl1LYGDgS4fbhYaG4unpSdq0afn222+pVKkSEydO5KeffrLWMZvN1KlTh6VLl9K2bVtGjRrFnTt3aNu27Uvvq2LFinTr1g2Ab775hoULF7Jw4ULy589vrXP58mUaN25MtWrVmDhxIqlTp6Zdu3bhEhH+/v5UqlSJRYsW0aZNG6ZOnUr58uXp378/PXv2fGEM+/fvx8fHhxYtWuDgEHlHyzZt2gCwYcOGl94TWJJFzZs3x9/fP0IPtuDgYB48eBBh+Xdi80VcXV2pV68eS5cutZadPHmSM2fO0KJFi0iPmTlzJlmzZuWbb75h4sSJuLu78+WXXzJ9+nRrncmTJ5MlSxby5ctnfR0GDBjw0tdo4cKF1KpVi2TJkjFu3DgGDRrE2bNnqVChQoQ5jUJCQvD09MTNzY1vv/2WRo0aERQUhKenJ4cOHaJr165Mnz6dTz/9lKtXr74wESoiIiIiInHAEHkLnTlzxgCMESNGGIZhGMHBwUbSpEmN+fPnG4ZhGBkyZDCmT59uGIZh+Pr6Gvb29kanTp3CnQMwnJycjMuXL1vLTp48aQDGtGnTrGX9+/c3nJ2djcePH1vLvL29DQcHB2PIkCEvjPPPP/80kiRJYgBGsWLFjO7duxtr1641/Pz8wtXz9vY2nJycjOrVqxuhoaHW8u+//94AjDlz5ljL2rZta2TNmtW6vWXLFgMw1q9fH+6cNWvWNHLkyGHdvnbtmgEYc+fODXcuwBg+fHi4Y4sXL26ULFnSur1q1SoDMCZPnmwtCw0NNapWrRrhnJFZuXKlARi7du2KsC9r1qwGYOzduzdcezg7Oxu9evWylo0YMcJImjSpcfHixXDH9+vXz7C3tzdu3rwZ5fUnT55sAMaaNWuirPPo0SMDMBo2bGgtq1SpklGwYMEoj1mzZo0BGFOmTIlwP5EtY8aMifJchmEYu3btMgBj5cqVxoYNGwyTyWS9rz59+lhfz8ji8vf3j3A+T0/PcO8BwzCMggULGpUqVYpQN6rX6OnTp0aqVKki/P7cvXvXSJkyZbjysPdTv379wtX9448/rPclIiIiIiK2pR5F8lbKnz8/adOmtfbkOHnyJH5+ftanmpUrV44DBw4AlrmLQkNDI52fyMPDg5w5c1q3ixQpQooUKbh69aq1rE2bNgQGBvLLL79Yy5YvX05ISMhL56EpWLAgJ06coFWrVly/fp0pU6ZQv359MmTIwKxZs6z1tm/fTlBQED169MDO7v+/tp06dSJFihRs3LgxymtUrVqVdOnSsXz5cmuZj48P27Zto2nTpi+ML8znn38ebvuDDz4I1wabN2/G0dGRTp06Wcvs7Ozo3LlztM7/MgUKFOCDDz6wbqdPn568efOGi2HlypV88MEHpE6dOlwvHQ8PD0JDQ9m7d2+U53/69CkAyZMnj7JO2D5fX99ox50sWbJw5w9TunRptm3bFmFp3rx5tM9dvXp10qRJw7JlyzAMg2XLlr3w+CRJkljXnzx5woMHD6hUqRJXr159reFd27Zt4/HjxzRv3jxcu9vb21O6dOlInzD4xRdfhNtOmTIlAFu2bMHf3/+VYxERERERkdenyazlrWQymShXrhx79+7FbDZz4MAB3NzcyJUrF2BJFH3//fcA1oRRZImid999N0JZ6tSpw00UnC9fPkqVKsXixYvp2LEjYBl2VqZMGev1XiRPnjwsXLiQ0NBQzp49y4YNGxg/fjyffvop2bNnx8PDgxs3bgCQN2/ecMc6OTmRI0cO6/7IODg40KhRI5YsWUJgYCDOzs6sXr2a4ODgaCWKXFxcSJ8+/Qvb4MaNG2TKlAlXV9dw9aJz/9ERndfh0qVLnDp1KkKsYV40OXhYEui/CZ1/i04y6b+ePXsW6THp0qXDw8Mj2ueJjKOjI02aNGHJkiW8//773Lp1K8phZ2B5nw8ZMoSDBw9GSMY8efLEmqyJqUuXLgGWhGRkUqRIEW7bwcGBLFmyhCvLnj07PXv2ZNKkSSxevJgPPviAunXr0qpVq1eOS0REREREXo0SRfLWqlChAuvXr+f06dPW+YnClCtXjj59+nD79m32799P5syZyZEjR4RzRDYpMIBhGOG227RpQ/fu3fnrr78IDAzk0KFD1kRUdNnb21O4cGEKFy5M2bJlqVKlCosXL37thAJAs2bN+PHHH9m0aRP169dnxYoV5MuXj6JFi0YrLluLzutgNpupVq0affv2jbRunjx5ojx/2Fw7p06don79+pHWOXXqFGDp3RRdYfNgxVbC7L9atGjBDz/8wNChQylatGiUsV25coUPP/yQfPnyMWnSJNzd3XFycuLXX3/lu+++e+lk3y8SduzChQvJmDFjhP3/nfPJ2dk5XK+4MBMnTqRdu3asW7eOrVu30q1bN8aMGcOhQ4ciJJZERERERCTuKFEkb62wHkL79+/nwIED9OjRw7qvZMmSODs7s3v3bg4fPkzNmjVf61rNmjWjZ8+eLF26lOfPn+Po6BjtYV2Ree+99wC4c+cOgPWpaBcuXAiX0AoKCuLatWsvTSZVrFiRTJkysXz5cipUqMDOnTsZMGDAK8f3X1mzZmXXrl34+/uH61V0+fLlaB1vMpleO4acOXPy7NmzV0qsVahQgVSpUrFkyRIGDBgQaWIq7ClrtWvXjtY5Q0NDWbJkCa6urpH2VosNFSpU4N1332X37t2MGzcuynrr168nMDAQLy+vcL2zIhsWFtVrEVV52NBMNze3105qhiVKBw4cyG+//Ub58uX54YcfGDly5GudV0REREREok9zFMlb67333sPFxYXFixdz+/btcD2KnJ2dKVGiBNOnT8fPz++1v8inS5eOjz76iEWLFrF48WJq1KhBunTpXnrcvn37In1i2a+//gr8f6iZh4cHTk5OTJ06NVwvmtmzZ/PkyRNq1ar1wuvY2dnRuHFj1q9fz8KFCwkJCXmtRNZ/eXp6EhwcHG5eJbPZHO6JWi+SNGlSgNd6wtXHH3/MwYMH2bJlS4R9jx8/JiQkJMpjXV1d6d27NxcuXIg0gbZx40bmzZuHp6cnZcqUeWksoaGhdOvWjXPnztGtW7cIw69ii8lkYurUqQwZMoTWrVtHWS8s8fXv986TJ0+YO3duhLpJkyaN9HWI6jXy9PQkRYoUjB49OtL38v379196H76+vhFen8KFC2NnZ0dgYOBLjxcRERERkdijHkXy1nJycqJUqVLs27cPZ2dnSpYsGW5/uXLlmDhxIhD5/EQx1aZNGxo3bgzAiBEjonXMuHHj+P3332nYsCFFihQB4Pjx4yxYsIA0adJYe0GlT5+e/v37M2zYMGrUqEHdunW5cOECM2bMoFSpUi+dNBugadOmTJs2jSFDhlC4cOFwj59/XfXr1+f999+nV69eXL58mXz58uHl5cWjR4+Al/cYKlasGPb29owbN44nT57g7OxM1apVcXNzi3YMffr0wcvLi9q1a9OuXTtKliyJn58fp0+f5pdffuH69esvTN7169ePP/74g3HjxnHw4EEaNWpEkiRJ2L9/P4sWLSJ//vzMnz8/wnFPnjxh0aJFAPj7+3P58mVWr17NlStXaNasWaTvhdu3b1uP+bdkyZJFOfQtKvXq1aNevXovrFO9enWcnJyoU6cOn332Gc+ePWPWrFm4ublZe62FKVmyJDNnzmTkyJHkypULNzc3qlat+sLXaObMmbRu3ZoSJUrQrFkz0qdPz82bN9m4cSPly5d/6TDMnTt30qVLF5o0aUKePHkICQlh4cKF2Nvb06hRoxi1h4iIiIiIvCabPnNNJI7179/fAIxy5cpF2Ld69WoDMJInT26EhIRE2A8YnTt3jlCeNWtWo23bthHKAwMDjdSpUxspU6Y0nj9/Hq34Dhw4YHTu3NkoVKiQkTJlSsPR0dF49913jXbt2hlXrlyJUP/777838uXLZzg6OhoZMmQwvvjiC8PHxydcnbZt2xpZs2aNcKzZbDbc3d0NwBg5cmSE/deuXYvwKPu2bdsaSZMmjVB3yJAhxn//+bh//77RokULI3ny5EbKlCmNdu3aGQcOHDAAY9myZS9ti1mzZhk5cuQw7O3twz2GPWvWrEatWrUi1K9UqVKEx7g/ffrU6N+/v5ErVy7DycnJSJcunVGuXDnj22+/NYKCgl4aQ2hoqDF37lyjfPnyRooUKQwXFxejYMGCxrBhw4xnz55FGgP/erx9smTJjNy5cxutWrUytm7dGuk1smbNGu6Yfy+RvW7/tmvXrmg9Rr5SpUpGwYIFw5V5eXkZRYoUMVxcXIxs2bIZ48aNM+bMmWMAxrVr16z17t69a9SqVctInjy5AYRr46heo7DYPD09jZQpUxouLi5Gzpw5jXbt2hnHjh2z1onq/XT16lWjQ4cORs6cOQ0XFxcjTZo0RpUqVYzt27e/8D5FRERERCT2mQzjP7PyisgrCQkJIXPmzNSpU4fZs2fbOpwEYe3atTRo0ID9+/dTvnx5W4cjIiIiIiIiL6E5ikRiydq1a7l//z5t2rSxdSg28fz583DboaGhTJs2jRQpUlCiRAkbRSUiIiIiIiIxoTmKRF7T4cOHOXXqFCNGjKB48eJUqlTJ1iHZRNeuXXn+/Dlly5YlMDCQ1atX89tvvzF69GiSJEli6/BEREREREQkGpQoEnlNM2fOZNGiRRQrVox58+bZOhybqVq1KhMnTmTDhg0EBASQK1cupk2bRpcuXWwdmoiIiIiIiEST5igSERERScD27t3LhAkT+P3337lz5w5r1qx56RMSd+/eTc+ePTlz5gzu7u4MHDiQdu3axUu8IiIi8mbTHEUiIiIiCZifnx9FixZl+vTp0ap/7do1atWqRZUqVThx4gQ9evTgk08+YcuWLXEcqYiIiLwN1KNIRERE5A1hMple2qPo66+/ZuPGjfz555/WsmbNmvH48WM2b94cD1GKiIjIm+ytn6PIbDbz999/kzx5ckwmk63DERERkSgYhsHTp0/JnDkzdnbq9PyqDh48iIeHR7gyT09PevToEeUxgYGBBAYGWrfNZjOPHj0ibdq0+vwkIiKSgMXF56e3PlH0999/4+7ubuswREREJJpu3bpFlixZbB3GG+vu3btkyJAhXFmGDBnw9fXl+fPnkT6JcsyYMQwbNiy+QhQREZFYFpufn976RFHy5MkBS6OlSJEi1s5rNpu5f/8+6dOn1/96xiG1c/xQO8c9tXH8UDvHj7hqZ19fX9zd3a1/uyX+9O/fn549e1q3nzx5wrvvvhvrn59EREQkdsXF56e3PlEU1l06RYoUsZ4oCggIIEWKFPoyEofUzvFD7Rz31MbxQ+0cP+K6nTXU6fVkzJiRe/fuhSu7d+8eKVKkiLQ3EYCzszPOzs4RymP785OIiIjEjdj8/KRP0SIiIiJvkbJly7Jjx45wZdu2baNs2bI2ikhERETeJEoUiYiIiCRgz54948SJE5w4cQKAa9euceLECW7evAlYho21adPGWv/zzz/n6tWr9O3bl/PnzzNjxgxWrFjBV199ZYvwRURE5A2jRJGIiIhIAnbs2DGKFy9O8eLFAejZsyfFixdn8ODBANy5c8eaNALInj07GzduZNu2bRQtWpSJEyfy888/4+npaZP4RURE5M3y1s9RJCLyMqGhoQQHB8fpNcxmM8HBwQQEBGjunDikdo4fr9rOjo6O2Nvbx2Fkb6fKlStjGEaU++fNmxfpMX/88UccRiUiIiJvKyWKRCTRMgyDu3fv8vjx43i5ltls5unTp5qoNw6pnePH67RzqlSpyJgxo14fERERkQRKiSIRSbTCkkRubm64urrG6RdXwzAICQnBwcFBX5DjkNo5frxKOxuGgb+/P97e3gBkypQpLkMUERERkVekRJGIJEqhoaHWJFHatGnj/HpKYMQPtXP8eNV2Dns0u7e3N25ubhqGJiIiIpIAKVEUQwEhAaw8s5I1e3/k4bnjpM1fggYVP6NJwSa4OLjYOjwRiaawOYlcXV1tHIlI4hL2OxccHKxEkYiIiEgCpERRDHhd8KLd2nb4BPhgZ4A5A9g9PMCatQfovrk78+vPp07eOrYOU0RiQL1OROKXfudEREREEjabPhJm79691KlTh8yZM2MymVi7dm24/YZhMHjwYDJlykSSJEnw8PDg0qVLNonV64IX9ZfV53HAYwDM/3zODfv5OOAx9ZbVw+uCl03iExERERERERF5XTZNFPn5+VG0aFGmT58e6f7x48czdepUfvjhBw4fPkzSpEnx9PQkICAgXuMMCAmg3dp2ABhE/njasPJ2a9sREBK/8YmIJEbz5s0jVapUtg4jwYjsP1xeRbZs2Zg8efJrn0dERERE3kw2TRR99NFHjBw5kgYNGkTYZxgGkydPZuDAgdSrV48iRYqwYMEC/v7771j5IBwTK8+sxCfAJ8okURgDA58AH345+0s8RSYiiY3JZHrhMnTo0HiN5/Lly7Rv354sWbLg7OxM9uzZad68OceOHYvV60SWvGjatCkXL16M1evYypo1ayhTpgwpU6YkefLkFCxYkB49esTpNaNKtB09epRPP/00Tq8tIiIiIglXgp2j6Nq1a9y9excPDw9rWcqUKSldujQHDx6kWbNmkR4XGBhIYGCgddvX1xcAs9mM2Wx+pVjWnF+DnckOs/Hy4+1Mdqw+t5oWhVq80rUkPLPZjGEYr/zaSfQkxnYOu+ewJT6EXed1rvf3339b15cvX86QIUM4f/68tSxZsmThrhMaGoqDQ9z8U3/s2DE8PDwoVKgQP/zwA/ny5ePp06esW7eOXr16sXv37li93n9fKxcXF1xcXCK0Z2y0c3zasWMHTZs2ZeTIkdStWxeTycTZs2fZtm1bjO8hJu/nqNopXbp0kZZH9/joxhjZ3+XE9G+QiIiISEKVYBNFd+/eBSBDhgzhyjNkyGDdF5kxY8YwbNiwCOX3799/5SFrd5/cjVaSCMBsmLn75C7e3t6vdC0Jz2w28+TJEwzDwM7Oph3g3mqJsZ2Dg4Mxm82EhIQQEhIS59cLS9rA603mG/YlHiB58uSYTCZr2Z49e6hWrRpeXl4MGTKEP//8k19//ZUFCxbw+PFjVq1aZT22V69enDx5ku3btwOW98CECROYPXs2d+/eJXfu3HzzzTc0atQoyvtp164duXLlYufOneHeN4UKFaJz587Wdj19+jS9evXi0KFDuLq60qBBAyZMmECyZMkA6NixI48fP6Z8+fJMnjyZoKAgPv74YyZOnIijoyMeHh7cuHGDnj170rNnTwCCgoJYsGABvXr14v79+wAMHz4cLy8vunXrxogRI/Dx8cHT05MffviB5MmTA5A7d266du1Kt27drPG+99571K1bl8GDBwNw8+ZNevTowa5du7Czs6N69epMnjzZ+vcoLN4XteeqVasYOXIkV65cwdXVlWLFirFq1SqSJk0aoS29vLwoV64cX331lbUsR44c1K5dO9x788cff+S7777j1q1bZMuWjf79+9OqVatw5woNDSUkJMT6XvD29rb2Gjpx4gTvv/8+Fy9e5MaNG3To0AHA+toNHDiQwYMHR2ijyNrju+++s77vRowYgZeXFz169GDYsGGRtvu/hYSEYDabefjwIY6OjuH2PX36NEJ9EREREYlfCTZR9Kr69+9v/SIBlh5F7u7upE+fnhQpUrzSOTOmzIjd3ej3KMqYMiNubm6vdC0Jz2w2YzKZSJ8+faJJYNhCYmzngIAAnj59ioODQ5z1uInMf78Yv46w1yos/rBHjQ8cOJAJEyaQI0cOUqdOzaJFi7Czswt3n2FD1cLKRo0axeLFi5k5cya5c+dm7969tGvXjowZM1KpUqUI1/7jjz84e/YsixcvxsnJKcL+sCSCn58ftWvXpmzZshw5cgRvb286derEV199xdy5c633sWfPHjJnzszOnTu5fPkyzZo1o3jx4nTq1InVq1dTrFgxOnXqRKdOnaz3/N/7t7Oz4+rVq2zYsIH169fj4+ND06ZN+fbbbxk1alS4dvtvW4SVmc1mGjduTLJkydi9ezchISF06dKFVq1asWvXLuvxL2rPO3fu0Lp1a8aNG0eDBg14+vQp+/btw97ePtL3WqZMmVi2bBnnz5+nUKFCkb7Wa9asoWfPnnz33Xd4eHiwYcMGOnXqRNasWalSpYq1Xtg1wt4L/35///vnBx98wHfffReuR1qyZMnCteWL2qN169Zs3boVR0fHaLd7mLDXLm3atLi4uITb999tEREREYl/CTZRlDFjRgDu3btHpkyZrOX37t2jWLFiUR7n7OyMs7NzhPKwD/avokG+Bqw5vyZadc2GmYb5GyaaL9vxIexLnNo0biW2drazsws3t0+Y996DF3RafA0GEJYkCt+jKGNGeJUpfcLi/u/P4cOHU7169Sjr//eYwMBAxowZw/bt2ylbtiwAOXPm5MCBA/z0009Urlw5wrkuX74MQP78+V/YQ2rp0qUEBASwYMECa2+a77//njp16jBu3DhrL53UqVMzffp07O3tyZ8/P7Vq1WLnzp18+umnpE2bFnt7e1KkSBHu70Fk9282m5k9ezapU6fGZDLRunVrdu7cGeHe/xtzWNnOnTs5ffo0165dw93dHYAFCxZQsGBBjh07RqlSpV7annfv3iUkJIRGjRqRNWtWAIoUKRJlG3Xr1o39+/dTpEgRsmbNSpkyZahevTotW7a0/j2bOHEi7dq1o3PnzgDkzZuXw4cPM3HiRKpWrRrhPv4dT2Tt5OzsTKpUqTCZTOHaNCbt8fvvv1OmTBlru8+bN8/agyiydv/vuSP79yax/PsjIiIikpAl2ERR9uzZyZgxIzt27LAmhnx9fTl8+DBffPFFvMbSpEBjuq9oz2PHUIwXjBgxGZAq2J7G+SMfqiEiCd/du3D7dlyc+dWHm8XUe++9F6P6ly9fxt/fn2rVqoUrDwoKonjx4pEeE915ac6dO0fRokXDDbkqX748ZrOZCxcuWBNFBQsWtPaCAUsvm9OnT8foPsAy6fW/hztlypQpRkOBz507h7u7uzUpAlCgQAFSpUrFuXPnwiWKolK0aFE+/PBDChcujKenJ9WrV6dx48akTp060vpJkyZl48aNXLlyhV27dnHo0CF69erFlClTOHjwIK6urpw7dy7CBNPly5dnypQp0b63V/Gy9ihTpgzw+u0uIiIiIgmHTRNFz549s/6vNFgmsD5x4gRp0qTh3XffpUePHowcOZLcuXOTPXt2Bg0aRObMmalfv368xumycy/zfwmlXnNLMiiqZJEBzP8lFJeKe8HTM15jFJHY8U9nxjjw78RKxB5Fsem/8+DY2dlFSOwEBwdb1589ewbAxo0beeedd8LVi6yHJkCePHkAOH/+fJTJpJj475C8sF4qsX2el7VFdLzsHPb29mzbto3ffvuNrVu3Mm3aNAYMGMDhw4fJnj17lOfNmTMnOXPm5JNPPmHAgAHkyZOH5cuX0759+xjFFxYjhE/oxfQ+YyK2Xj8RERERsT2bJoqOHTsWbm6FsLmF2rZty7x58+jbty9+fn58+umnPH78mAoVKrB58+b4ncPAMGDQIOpctmPtMjPt6oNPErAzg9nu/z8BMIGjYYJBg6B6dXiNCWtFxDZi+anuVoZhmcTXwcEh3v9pSJ8+PX/++We4shMnTli/3BcoUABnZ2du3rwZ6XxEkSlWrBgFChRg4sSJNG3aNMKQocePH5MqVSry58/PvHnz8PPzsyawDhw4gJ2dHXnz5o32PTg5OVknA38d6dOn586dO9ZtX19frl27Zt3Onz8/t27d4tatW9ZeNGfPnuXx48cUKFDAeo4XtSdYEiXly5enfPnyDB48mKxZs1rnGYqObNmy4erqip+fnzWuAwcO0LZtW2udAwcOWGOK7D4B7ty5Y+3JdOLEiXB1otOm0WkPEREREXm72HQygMqVK4d7PHXYMm/ePMDyQXv48OHcvXuXgIAAtm/fbv1f7HgTFAQ3b4LZTN0L8PdEWLga6p+HytcsPxeuhi+OWKp3qGvw0PuG5TgRkQSgatWqHDt2jAULFnDp0iXrE9HCJE+enN69e/PVV18xf/58rly5wvHjx5k2bRrz58+P9Jwmk4m5c+dy8eJFPvjgA3799VeuXr3KqVOnGDVqFPXq1QOgZcuWuLi40LZtW/7880927dpF165dad26dYSnWr5ItmzZ2Lt3L7dv3+bBgwev1RYLFy5k3759nD59mrZt24Yb8ubh4UHhwoVp2bIlx48f58iRI7Rp04ZKlSpZh/S9rD0PHz7M6NGjOXbsGDdv3mT16tXcv3+f/PnzRxrT0KFD6du3L7t37+batWv88ccfdOjQgeDgYOtwwD59+jBv3jxmzpzJpUuXmDRpEqtXr6Z3796RnjNXrly4u7szdOhQLl26xMaNG5k4cWKENn327Bk7duzgwYMH+Pv7RzjPi9qjZMmSMWt8EREREXkjaNbIl3F2hqNH4fff4fffcTn8O63m/s7KPkdZ2WILK/scpXj3Q/TYlpl89+FOcvhk0PsYkTwFSETEFjw9PRk0aBB9+/alVKlSPH36lDZt2oSrM2LECAYNGsSYMWPInz8/NWrUYOPGjS8cKvX+++9z7NgxcuXKRadOncifPz9169blzJkzTJ48GQBXV1e2bNnCo0ePKFWqFI0bN+bDDz/k+++/j9E9DB8+nOvXr5MzZ05rb5lX0b9/fypVqkTt2rWpVasW9evXJ2fOnNb9JpOJdevWkTp1aipWrIiHhwc5cuRg+fLl1jova88UKVKwd+9eatasSZ48eRg4cCATJ07ko48+ijSmSpUqcfXqVdq0aUO+fPn46KOPuHv3Llu3brX2uqpfvz5Tpkzh22+/pWDBgvz444/MnTs30onGwTIUbOnSpZw/f54iRYowbtw4Ro4cGa5OuXLl+Pzzz2natCnp06dn/PjxEc4TVXssW7Ys2m0uIiIiIm8WkxHdGUnfUL6+vqRMmZInT56QIkWKWDuv2WzG29sbNzc37OzsmFJhJeWvfUzZjhBiD4saLKJlkZaxdr3E6r/tLHEjMbZzQEAA165dI3v27PEynNUwjH8NPdOw1Liido4fr9POL/rdi6u/2RJzei1ERETeDHHxNztxfCOMB/UWNsZ8530G77Fsf7GhM7ee3LJtUCIiIiIiIiIiMaBEUSzJlt3E8WYT6L8f3v8LngY/od26dpgNPfVFRERERERERN4MShTFouYzK7LNvi6LVoNzkD07r+1k6uGptg5LRERERERERCRalCiKRSlTwqM+Y8jxyI7JWy2PHO63vR9n75+1cWQiIiIiIiIiIi+nRFEs+3hoAVal6shnx6DMpVQEhgbSanUrgkKDbB2aiIiIiIiIiMgLKVEUyxwdwW36UPxxZfW6xzg/T8Yfd/9g2O5htg5NREREREREROSFlCiKA5VbZGZ97l5kegYTvJIBMPbAWH679ZuNIxMRERERERERiZoSRXGk5LI+eJOerufukvfU+5gNM63XtOZZ0DNbhyYiIiIiIiIiEikliuJI7hLJOfDhEADWb7xK0iB3rvpcpdeWXjaOTEREREREREQkckoUxaGqyz7lin1ucgc+oPGSKpgw8dPxn9hwcYOtQxOR2LZ9OxQoYPn5BqlcuTI9evSI8+s8fPgQNzc3rl+/Hqvnja34+/XrR9euXV8/IBERERGRN5wSRXEoZTpHrnUaA8D067/gfuFTADp6deS+331bhiYisckw4Jtv4Nw5y0/DiNPLtWvXDpPJxNixY8OVr127FpPJFKNzrV69mhEjRsRmeJEaNWoU9erVI1u2bABcv34dk8kU6XLo0KE4j+e/evfuzfz587l69eprnWfz5s2YTCbu3r0brjxTpkzWew8T1gY7duwALEmvsDZwcXEhT548jBkzBiOS99P8+fMpVaoUrq6uJE+enEqVKrFhQ/j/hNi9ezcmk4mCBQsSGhoabl+qVKmYN29euLI//viDpk2bkilTJpydncmaNSu1a9dm/fr1kcYgIiIiIm8nJYriWJVpDTnlWoak+NNnZTCZHQri7efNpxs+1QdvkbfF1q1w9Khl/ehRy3Ycc3FxYdy4cfj4+LzWedKkSUPy5MljKarI+fv7M3v2bDp27Bhh3/bt27lz5064pWTJknEaz7+FhoZiNptJly4dnp6ezJw587XOV6FCBRwcHNi9e7e17Ny5czx//hwfH59wPap27dqFs7Mz5cuXt5Z16tSJO3fucOHCBfr378/gwYP54Ycfwl2jd+/efPbZZzRt2pRTp05x5MgRKlSoQL169fj+++8jxHT16lUWLFjwwrjXrVtHmTJlePbsGfPnz+fcuXNs3ryZBg0aMHDgQJ48efJqDSIiIiIibxwliuKYvYMJY/wEAD4PmU+6+cNxtHNk7fm1zD8538bRichrMwwYNAjs7S3b9vaW7ThOBHt4eJAxY0bGjBkTZZ2HDx/SvHlz3nnnHVxdXSlcuDBLly4NV+ffQ7e++eYbSpcuHeE8RYsWZfjw4dbtn3/+mfz58+Pi4kK+fPmYMWPGC2P99ddfcXZ2pkyZMhH2pU2blowZM4ZbHB0dAUvPqfr164er36NHDypXrhzltQIDA/n666/JkiULSZMmpXTp0uGSNvPmzSNVqlR4eXlRoEABnJ2duXnzJgB16tRh2bJlL7yXl0mWLBmlSpUKd83du3dToUIFypcvH6G8TJkyuLi4WMtcXV3JmDEjWbNmpX379hQpUoRt27ZZ9x86dIiJEycyYcIEevfuTa5cucifPz+jRo2iR48e9OzZk1u3boWLqWvXrgwZMoTAwMBIY/bz86Njx47UqlWLjRs3Ur16dXLkyEH+/Pnp2LEjJ0+eJGXKlK/VLiIiIiLy5lCiKB4U7VyBo+/Uw4FQhl1aQGl/yxeubpu6cf3xddsGJyL/Zxjg5xezxcvL0osobGhPaKhl28sr+ud4haSSvb09o0ePZtq0afz111+R1gkICKBkyZJs3LiRP//8k08//ZTWrVtz5MiRSOu3bNmSI0eOcOXKFWvZmTNnOHXqFC1atABg8eLFDB48mFGjRnHu3DlGjx7NoEGDmD8/6sT3vn374q2XUJcuXTh06BBLly7l1KlTNGnShBo1anDp0iVrHX9/f8aNG8fPP//MmTNncHNzA+D999/nr7/+eu15lKpUqcKuXbus27t27aJy5cpUqlQpXPnu3bupUqVKpOcwDIN9+/Zx/vx5nJycrOVLly4lWbJkfPbZZxGO6dWrF8HBwaxatSpceY8ePQgJCWHatGmRXmvr1q08fPiQvn37RnlPMR3SKCIiIiJvLiWK4sk7C8YSgj31WYfdt2Uoka48T4Oe0mZNG0LNoS8/gYjEPX9/SJYsZst/erxY1a8frp4peXIcU6fGlDx5xHP4+79SuA0aNKBYsWIMGTIk0v3vvPMOvXv3plixYuTIkYOuXbtSo0YNVqxYEWn9ggULUrRoUZYsWWItW7x4MaVLlyZXrlwADBkyhIkTJ9KwYUOyZ89Ow4YN+eqrr/jxxx+jjPPGjRtkzpw50n3lypUjWbJk4ZZXdfPmTebNm8fSpUv54IMPyJkzJ71796ZChQrMnTvXWi84OJgZM2ZQrlw58ubNi6urK4A1xhs3brxyDGBJFF28eJE7d+4AsGfPHipVqkTFihXZs2cPYBkOdvPmzQiJohkzZpAsWTKcnZ2pWLEiZrOZbt26WfdfvHiRnDlzhksehcmcOTMpUqTg4sWL4cpdXV0ZMmQIY8aMiXQIWVj9vHnzWsuOHj0a7jX57/xHIiIiIvL2UqIonmSumo8TJT8BYFxIP1x+nU8yp2Tsu7mPSQcn2Tg6EXlTjRs3zjqnzH+FhoYyYsQIChcuTJo0aUiWLBlbtmyxDrWKTMuWLa2JIsMwWLp0KS1btgQsQ5SuXLlCx44dwyURRo4cGa4X0n89f/483PCqf1u+fDknTpwIt7yq06dPExoaSsGCBUmePLk1vj179oSLz8nJiSJFikQ4PkmSJIClx1FkRo8eHe6+o2rHcuXK4eTkxO7duzl79izPnz+nRIkSvPfee9y/f59r166xe/dukiRJEmE4XsuWLTlx4gQHDhzgo48+YsCAAZQrVy5cnVeZ365jx46kTZuWcePGRat+kSJFrK+Hn58fISEhMb6miIiIiLyZHGwdQGJScMUQ/HMtpIxxmEy//kHhepP58c4nDNw1EM9cnhTJEPGLi4jEI1dXePYsenUNAypVgpMn/z/s7N/s7aFoUdizB0wmDMMgJCQEBweHiMN4/unR8ioqVqyIp6cn/fv3p127duH2TZgwgSlTpjB58mQKFy5M0qRJ6dGjB0FBQVGer3nz5nz99dccP36c58+fc+vWLZo2bQrAs3/aZtasWRHmMrIPm6MpEunSpYty0m13d3drb6X/srOzi5AUCQ4OjvI6z549w97enkOHDuHs7Byunf/dUylJkiSRDqV69OgRAOnTp4/0/J9//jkff/yxdTuqXlKurq68//777Nq1i0ePHlGhQgXs7e2xt7enXLly7Nq1i127dlG+fPkIPYNSpkxpbY8VK1aQK1cuypQpg4eHBwB58uRh//79BAUFRTj277//xtfXlzx58kSIycHBgVGjRtGuXTu6dOkSbl/u3LkBuHDhgjVx5ezsHOXrIiIiIiJvN/UoikdJcmTi1se9ARhDfzYNa03NnHUJCg2i1epWBIZEPtGoiMQTkwmSJo3ecuAAHD8eeZIILOXHj1vqvexcrzn/y9ixY1m/fj0HDx4MV37gwAHq1atHq1atKFq0KDly5IgwLOm/smTJQqVKlVi8eDGLFy+mWrVq1jl8MmTIQObMmbl69Sq5cuUKt2TPnj3KcxYvXpyzZ8/G+L7Sp09vHb4V5kU9jooXL05oaCj379+PEF/GjBlfer0///wTR0dHChYsGOn+NGnShDung0PU/9dSpUoVdu/eze7du8NNvl2xYkV2797Nnj17opyfKEyyZMno3r07vXv3tibMmjVrxrNnzyId6vftt9/i6OhIo0aNIj1fkyZNKFiwIMOGDQtXXr16ddKkSRPt3kYiIiIi8nZToiie5fmpNz5ObuTmMrX/nkX207NI75qe096nGbRrkK3DE5Ho+O+TzqIST09AK1y4MC1btmTq1KnhynPnzs22bdv47bffOHfuHJ999hn37t176flatmzJsmXLWLlypXXYWZhhw4YxZswYpk6dysWLFzl9+jRz585l0qSoh9B6enpy5syZSHsVPXz4kLt374ZbAgICAKhatSrHjh1jwYIFXLp0iSFDhvDnn39GeZ08efLQsmVLOnTowOrVq7l27RpHjhxhzJgxbNy48aX3vW/fPj744APrELTXUaVKFS5dusSWLVuoVKmStbxSpUqsXbuWW7duvTRRBPDZZ59x8eJF6wTVZcuWpXv37vTp04eJEydy5coVzp8/z8CBA5kyZQoTJ07E3d09yvONHTuWOXPm4OfnZy1LliwZP//8Mxs3bqRWrVps2bKFq1evcurUKcaPHw+8uMeYiIiIiLxdlCiKZ6YUyQkZMBSAIQxjyUQXBhefBcC3v33L3ht7bRidiETL1q3hn3QWlbAnoG3dGuchDR8+HLPZHK5s4MCBlChRAk9PTypXrkzGjBkjPG4+Mo0bN+bhw4f4+/tHqP/JJ5/w888/M3fuXAoXLkylSpWYN2/eC3sUFS5cmBIlSkQ6ibaHhweZMmUKt6xduxawJJgGDRpE3759KVWqFE+fPqVNmzYvjH3OnDm0bNmS3r17kzdvXurXr8/Ro0d59913X3rfy5Yto1OnTi+tFx1ly5bF2dkZwzDCPfGtdOnSBAcHkyxZMkqVKvXS86RJk4Y2bdowdOhQ6+s7efJkZsyYwdKlSylUqBDvvfcee/fuZe3atXTt2vWF56tatSpVq1aNMOdQgwYN+O2333B1daVNmzbkzZuXqlWrsnPnTpYtW0bt2rVfoRVERERE5E1kMl5lVsw3iK+vLylTpuTJkyekSJEi1s5rNpvx9vbGzc0NO7sY5tuCg3mQqRDpHl5kBAPZ8+EI3u3akbkn5pA1ZVZOfXGKFM6xF+ub7LXaWaItMbZzQEAA165dI3v27FFOtBwpw4DSpeH33+E/iZlI2dlByZIYhw4REhoa+RxFicDGjRvp06cPf/75Z5y+x144F9QLbNq0iV69enHq1KkXDikTi1dtZ3jx715c/c2WmNNrISIi8maIi7/ZieMbYULj6EjyaWMA6Mkkzu74m0r+k8meKjs3ntyg++buNg5QRKIUFAQ3b0YvSQSWerduWY5LxGrVqsWnn37K7du3bR1KpPz8/Jg7d66SRCIiIiKS6OkTsY04N2vAo2FlSXPhIEMZyoA+PzFn+wJqLK/IvBPzqJunLg3yN7B1mCLyX87OluFk9+9H/xg3N8txifwR4z169LB1CFFq3LixrUMQEREREUkQlCiyFZOJ1D9PgA8q0JHZTL7dgx1zK9C3Wl/GHRjHpxs+pax7WTIme/mTekQknrm7W5aYeLtH+YqIiIiIyFtCQ89syFShPM+qN8AeM2Ppx6RJ0DzjMIpmKMoD/wd84vUJb/kUUiIiIiIiIiKSgChRZGPJpo4h1GRPXdZTNmQvPbs7s7DBIpzsndh4aSM/H//Z1iGKiIiIiIiISCKhRJGt5c2L0dHyOOYJ9GHnToOT2woxuupoAL7a8hVXHl2xZYQiIiIiIiIikkgoUZQAOIwYQohLUkpzhMb8Qs+e0DbvV1TOVhm/YD9ar2lNiDlxT4IrIiIiIiIiInFPiaKEIGNGHPr1AWAM/Xl8P4hv+tsxr948Ujin4OBfBxl/YLyNgxQRERERERGRt50SRQlFr16Eps9ALq7wGT8yaxbc+jMr0z6aBsCQ3UM4fue4jYMUERERERERkbeZEkUJRbJk2A8fCsBghpMcXz7/HJrma02j/I0IMYfQek1rngc/t2mYIhJeQEgAC08upNGKRlSeV5lGKxqx8ORCAkICbB3aG61ixYosWbLE1mFw9uxZsmTJgp+fn61DERERERGJF0oUJSQdO2LkzUt6HtCX8Zw5A5Mmmfih9g9kSJqBs/fP8s2Ob2wdpYj8w+uCF5knZqbN2jasPb+WPTf2sPb8WtqsbUPmiZlZf2F9nF377t27dO3alRw5cuDs7Iy7uzt16tRhx44dcXbN1zFv3jxSpUoVrbpeXl7cu3ePZs2aATB06FBMJlOUy7Bhw6zHPn/+nGHDhpE3b16cnZ1Jly4dTZo04cyZM+GuMXToUIoVKxbh2tevX8dkMnHixAkAChQoQJkyZZg0adIr3beIiIiIyJtGiaKExNER09ixAPRkEpm5zfDh4Hs3HXPqzQFg8uHJ7LiaML8IiiQmXhe8qL+sPo8DHgNgNszhfj4OeEy9ZfXwuuAV69e+fv06JUuWZOfOnUyYMIHTp0+zefNmqlSpQufOnV/5vEFBQZGWBwcHv/I5X8XUqVNp3749dnaWP1G9e/fmzp07EZZ27dqRKlUqWrRoAUBgYCDVqlVj/vz5jBgxgosXL/Lrr78SEhJC6dKlOXTo0CvF0759e2bOnElIiB4qICIiIiJvPyWKEpp69aB8eVx5zjCGEBAAX34JH+WqyWclPwOg3bp21i+nIhL/AkICaLe2HQAGRqR1wsrbrW0X68PQvvzyS0wmE0eOHKFRo0bkyZOHggUL0rNnz3DJkJs3b1KvXj2SJUtGihQp+Pjjj7l37551f1ivmp9//pns2bPj4uICgMlkYubMmdStW5ekSZMyatQoANatW0eJEiVwcXEhR44cDBs2LFzy5PHjx3z22WdkyJABFxcXChUqxIYNG9i9ezft27fnyZMn1l5AQ4cOjfTe7t+/z86dO6lTp461LFmyZGTMmDHcsmPHDhYuXMiyZcvInTs3AJMnT+bgwYOsWbOGjz/+mKxZs/L++++zatUq8ufPT8eOHTGMyF+vF6lWrRqPHj1iz549MT5WRERERORNo0RRQmMywYQJALRnLgU4w5YtsGIFfFv9W3KlycVfvn/R5dcuNg5U5O1jGAZ+QX4vXRadXIRPgE+USSLr+TDwCfBh8anFLzxfTJIXjx49YvPmzXTu3JmkSZNG2B82vMtsNlOvXj1rgmPbtm1cvXqVpk2bhqt/+fJlVq1axerVq63DrcCSRGrQoAGnT5+mQ4cO7Nu3jzZt2tC9e3fOnj3Ljz/+yLx586xJJLPZzEcffcSBAwdYtGgRZ8+eZezYsdjb21OuXDkmT55MihQprL2BevfuHen97d+/H1dXV/Lnzx9lG/z+++906tSJsWPH4unpaS1fsmQJ1apVo2jRouHq29nZ8dVXX3H27FlOnjz5wvaNjJOTE8WKFWPfvn0xPlZERERE5E3jYOsAJBJly0LDhtivXs1Y+lGX9XTvDp6eyVjYYCHl55Rn8enF1M1bl48LfmzraEXeGv7B/iQbkyzWz/vJ+k/4ZP0nUe5/1v8ZSZ0iJn0ic/nyZQzDIF++fC+st2PHDk6fPs21a9dwd3cHYMGCBRQsWJCjR49SqlQpwDLcbMGCBaRPnz7c8S1atKB9+/bW7Q4dOtCvXz/atm0LQI4cORgxYgR9+/ZlyJAhbN++nSNHjnDu3Dny5MljrRMmZcqUmEwmMmbM+MK4b9y4QYYMGazDzv7L29ubBg0a0KhRowjJposXL1K5cuVIjwtLPF28eNE6N9Hp06dJliz86x1V0i5z5szcuHHjhbGLiIiIiLwN1KMooRozBuztqcMGKrGbe/egXz8ok6UM31SwTGj9+YbPue1728aBikh8im7vo3PnzuHu7m5NEoFlYuZUqVJx7tw5a1nWrFkjJIkA3nvvvXDbJ0+eZPjw4SRLlsy6dOrUiTt37uDv78+JEyfIkiWLNUn0qp4/f24dAvdfwcHBNG7cmAwZMjBr1qxI68Skd1bevHk5ceJEuOXXX3+NtG6SJEnw9/eP9rlFRERERN5U6lGUUOXJA599BjNmMNGuD6XMh/nxRzuaN4fBlQaz6fImfr/zOx29OrKp5SZMJpOtIxZ547k6uvKs/7OX1muxugUbLm6wTlz9InYmO2rnqc3iBosJCQnBwcEhwu+rq6NrtGPMnTs3JpOJ8+fPR/uYF4ls+Fpk5c+ePWPYsGE0bNgwQl0XFxeSJEkSK/GkS5cOHx+fSPd169aNS5cucfTo0UiTSXny5ImyXcKSY/9OZDk5OZErV65w9RwcIv+z+OjRI3LmzBmtexAREREReZOpR1FCNngwJEtGSfMxmrASgE8+gZAgRxY2WIiLgwtbrmxh5rGZNg5U5O1gMplI6pT0pUvj/I2jlSQCy1PQmhRo8sLzxSTRmyZNGjw9PZk+fTp+fn4R9j9+/BiwDLW6desWt27dsu47e/Ysjx8/pkCBAtG+XpgSJUpw4cIFcuXKFWGxs7OjSJEi/PXXX1y8eDHS452cnAgNDX3pdYoXL87du3cjJIt++ukn5syZw6pVq8iSJUukxzZr1ozt27dHmIfIbDbz3XffUaBAgQjzF0XXn3/+SfHixV/pWBERERGRN4kSRQlZhgzQpw8Ak5y/wZEgLl+GYcMgf/r8jPMYB0Dvrb258OCCLSMVSVSaFGxCapfUmHhxgseEidQuqWlcoHGsXn/69OmEhoZan+h16dIlzp07x9SpUylbtiwAHh4eFC5cmJYtW3L8+HGOHDlCmzZtqFSpUoRhZdExePBgFixYwLBhwzhz5gznzp1j2bJlDBw4EIBKlSpRsWJFGjVqxLZt27h27RqbNm1i8+bNAGTLlo1nz56xY8cOHjx4EOUwruLFi5MuXToOHDhgLTtw4ABdu3Zl8ODB5MiRg7t374Zbnjx5AsBXX33F+++/T8OGDVm5ciU3b97k6NGjNGrUiHPnzjF79uxX6n15/fp1bt++jYeHR4yPFRERERF50yhRlND17AkZM/JO4FW62P8AwLffwvHj0OX9Lnjk8OB5yHNar2lNcGiwjYMVSRxcHFyYX38+QJTJorDy+fXn4+IQ+Zw7rypHjhwcP36cKlWq0KtXLwoVKkS1atXYsWMHM2daehiaTCbWrVtH6tSpqVixIh4eHuTIkYPly5e/0jU9PT3ZsGEDW7dupVSpUpQpU4bvvvuOrFmzWuusWrWKUqVK0bx5cwoUKEDfvn2tvYjKlSvH559/TtOmTUmfPj3jx4+P9Dr29va0b9+exYsXW8t+/vlngoKCGDhwIJkyZYqwdO/eHbAMgduxYwctW7ZkwIAB5MqVixo1amBvb8+hQ4coU6bMK9370qVLqV69erh7FRERERF5W5mMmMz8aQNPnz5l0KBBrFmzBm9vb4oXL86UKVOsT+x5GV9fX1KmTMmTJ09IkSJFrMVlNpvx9vbGzc0tyqfzxJqffoLPPsPfNS2Z/K/gS0qKFYMjR+De878oPLMwjwMeM7TSUIZUHhK3scSzeG3nRCwxtnNAQADXrl0je/bsUU6e/DJeF7xot7YdPgE+2JnsMBtm68/ULqmZX38+dfLWASyTLEc1R5GEd/fuXQoWLMjx48djnJyJ7XYOCgoid+7cLFmyhPLly7/2+d4Wr9POL/rdi6u/2W+D6dOnM2HCBO7evUvRokWZNm0a77//fpT1J0+ezMyZM7l58ybp0qWjcePGjBkzJtr/3um1EBEReTPExd/sBP+N8JNPPmHbtm0sXLiQ06dPU716dTw8PLh9OxE97atDB8iXD1f/h0xMbxluduKEpWdRlhRZmFFzBgAj9o7gyO0jNgxUJHGpm7cuf/f6m4UNFlI/X30qZ61M/Xz1WdhgIX/3+tuaJJKYyZgxI7Nnz+bmzZu2DoWbN2/yzTffKEkkNrV8+XJ69uzJkCFDOH78OEWLFsXT0xNvb+9I6y9ZsoR+/foxZMgQ67DL5cuX880338Rz5CIiIvImStA9ip4/f07y5MlZt24dtWrVspaXLFmSjz76iJEjR770HG9FjyIALy+oVw+zswvZgi5xy8iCszOcPAl580LzVc1Z9ucy8qTNwx+f/RGjpyglZImxp4stJMZ2jo0eRTGhHkXxQ+0cP9SjKH6VLl2aUqVK8f333wOWf7Pd3d3p2rUr/fr1i1C/S5cunDt3jh07dljLevXqxeHDh9m/f3+0rqnXQkRE5M2Q6HoUhYSEEBoaGuGDZJIkSaL9QeetUacOVKiAXWAAKwtahpcFBkKnTmA2w/Sa08mcPDMXH16k77a+Ng5WREREYkNQUBC///57uMnU7ezs8PDw4ODBg5EeU65cOX7//XeOHLH0Mr569Sq//vorNWvWjPI6gYGB+Pr6hltEREQkcXKwdQAvkjx5csqWLcuIESPInz8/GTJkYOnSpRw8eJBcuXJFekxgYCCBgYHW7bAPOmazGbM5eo+zjg6z2YxhGLF6zpcaNw678uV5/+w8amTpzua/irBvH8ycaeaLL1Ixp+4caiyuwfSj06mZqyY1ctWIv9jiiE3aORFKjO0cds9hS3wIu04C7sj5VlA7x49Xbeew37nI/i4npn+DouvBgweEhoaSIUOGcOUZMmTg/PnzkR7TokULHjx4QIUKFay9vz7//PMXDj0bM2YMw4YNi9XYRURE5M2UoBNFAAsXLqRDhw6888472NvbU6JECZo3b87vv/8eaf2oPujcv3+fgICAWIvLbDbz5MkTDMOIv6E6OXKQqnZtXDZsYE763mT+aysA/fpBmTIPKPpOUToW6sjsP2fTYV0HdjbZSRqXNPETWxyxSTsnQomxnYODgzGbzQQHB+PgEPf/FBqGYX0CmIZExR21c/x4nXYO+917+PAhjo6O4fY9ffo01mJMzHbv3s3o0aOZMWMGpUuX5vLly3Tv3p0RI0YwaNCgSI/p378/PXv2tG77+vri7u4eXyGLiIhIApKg5yj6Nz8/P3x9fcmUKRNNmzbl2bNnbNy4MUK9yHoUubu74+PjE+tzFN2/f5/06dPH7xfrS5cwFSqEKSSEbz/aTp9NHwJQrZrBpk0Gz0P8eW/We1x4eIHG+RuzrNGyN/rLks3aOZFJjO0cGhrKpUuXcHNzI23atPFyzeDg4AhfjCX2qZ3jx6u288OHD/H29iZ37tzY29uH2+fr60vq1Kk1L86/BAUF4erqyi+//EL9+vWt5W3btuXx48esW7cuwjEffPABZcqUYcKECdayRYsW8emnn/Ls2bNo/TuvOYpERETeDHHxNzvB9ygKkzRpUpImTYqPjw9btmxh/PjxkdZzdnbG2dk5QrmdnV2sfwE2mUxxct4XypsXPvsMpk+nx52vmZL5CH/9bce2bSbmzDHRqVMyFjVcRNnZZfnl3C8sO7OMlkVaxl98ccAm7ZwIJbZ2trOzI3Xq1Ny/fx+TyYSrq2ucJlXDhn+Ehoa+0cnbhE7tHD9epZ0Nw8Df35/79++TOnXqSJNMieXfn5hwcnKiZMmS7Nixw5ooMpvN7Nixgy5dukR6jL+/f4S2DEvKvSH/PygiIiI2lOATRVu2bMEwDPLmzcvly5fp06cP+fLlo3379rYOzXYGD4b583E48Tvrv15B8XHNAOjZE6pXh/eyvsegioMYsnsInX/tTMWsFXFPqe7jIv+VMWNGgCgfMR2bwuZksbOzUwIjDqmd48frtHOqVKmsv3sSPT179qRt27a89957vP/++0yePBk/Pz/rZ6E2bdrwzjvvMGbMGADq1KnDpEmTKF68uHXo2aBBg6hTp06EXlwiIiIi/5XgE0VPnjyhf//+/PXXX6RJk4ZGjRoxatSoxD2swM0Nvv4aBg2i2Ipv+Lx9A36Y68yzZ9ChA2zbBt988A0bL23kyO0jtFvXjm2tt2Fn0v/UivybyWQiU6ZMuLm5ERwcHKfXCpuTJW3atOo1EYfUzvHjVdvZ0dFRiYpX0LRpU+7fv8/gwYO5e/cuxYoVY/PmzdYJrm/evBnudRg4cCAmk4mBAwdy+/Zt0qdPT506dRg1apStbkFERETeIG/MHEWvKq7G2JvNZry9vXFzc7PNlxE/P8idG+7cIWDMd+SZ0YNbtyy7ZsyAL76Aiw8vUvzH4vgH+/Od53f0KNMj/uN8TTZv50RC7Rz31MbxQ+0cP+KqnTUvTsKh10JEROTNEBd/s/Up+k2VNCkMHw6Ay4QRLJj62LqrTx+4ehXypM3Dt9W+BaDf9n6cvX/WFpGKiIiIiIiIyBtCiaI3Wbt2kD8/PHpE5cPj+PxzS7Gfn2UImtkMn7/3OTVy1SAwNJBWq1sRFBpk05BFREREREREJOFSouhN5uAA48ZZ1idPZkK3W2TLZtncswemT7fMwTKn7hzSJEnDH3f/YNjuYTYLV0REREREREQSNiWK3nS1a0PFihAQQLLxg5kz5/+7vv4aLl+GTMkz8WPtHwEYe2Asv936zUbBioiIiIiIiEhCpkTRm85kggkTLOvz51Ml7Sm6dLFsPn9uGZ0WGgqNCzSmdZHWmA0zrde05lnQM5uFLCIiIiIiIiIJkxJFb4P334ePPwbDgK+/ZuxYyJHDsuvAAfjuO8v6tI+m4Z7Cnas+V+m1pZft4hURERERERGRBEmJorfFqFGWOYs2bybpoR3Mm2fpbAQwYACcOgUpXVIyv/58AH46/hMbLm6wXbwiIiIiIiIikuAoUfS2yJULvvjCst63Lx+UN9Prn05DQUHQqhUEBECV7FXoWaYnAB29OnLf776NAhYRERERERGRhEaJorfJoEGQPDkcPw7LljFyJBQpYtl1+jQMHGhZH/XhKAqmL4i3nzefbvgUwzBsF7OIiIiIiIiIJBhKFL1N0qeHfv0s6998gzOBLFoETk6WookTYedOcHFwYWGDhTjaObL2/Frmn5xvu5hFREREREREJMFQouht06MHZM4MN27A9OkULgxjxvx/d9u24OMDxTMVZ3iV4QB029SN64+v2yRcEREREREREUk4lCh627i6wnBLAoiRI8HHhx49oGpVS9Fff0Hnzpb1PuX6UN69PE+DntJmTRtCzaE2CVlEREREREREEgYlit5GbdtCwYKWrkNjx2JnB/PmQapUlt1Ll1oWezt7FjRYQDKnZOy7uY9JByfZMmoRERERERERsTElit5GDg4wbpxlfcoUuHkTd3eYMeP/Vb74Am7dghypc/Cd53cADNw1kFP3TtkgYBERERERERFJCJQoelvVrAmVK0NgoOVpaEDz5pYF4MkTS8cjsxk6Fu9InTx1CAoNotXqVgSGBNoubhERERERERGxGSWK3lYmE4wfb1lfuBBOngRg+nTIksVSvGsXTJoEJpOJWXVmkd41Pae9TzNo1yAbBS0iIiIiIiIitqRE0dusVClo2hQMA77+GoDUqWH+/P9X+eYb+P13yJAsA7PqzALg29++Ze+NvbaIWERERERERERsSImit92oUeDoCFu2wLZtgOUJaH36WHYHB0OzZvD0KdTLV48OxTpgYNBmTRt8A31tGLiIiIiIiIiIxDclit52OXPCl19a1vv2tUxKBIwcaelwBHD5MnTpYlmfXGMy2VNl58aTG3Tf3N0GAYuIiIiIiIiIrShRlBgMHAgpUsCJE7BkCQBOTrB0KSRPbqmyYAEsWgTJnZMzv/58TJiYd2Iea86tsV3cIiIiIiIiIhKvlChKDNKlg379LOsDB0JAAGDpbDRz5v+rffGFpXfRB1k/oG/5vgB8uuFT7j67G98Ri4iIiIiIiIgNKFGUWHTvDu+8AzduWB599o+WLaFtW8v6s2fQvDkEBcGwysMokqEID/wf8InXJxiGYaPARURERERERCS+KFGUWLi6wogRlvWRI+HRI+uuadMgd27L+rFjlk5Hzg7OLGqwCCd7JzZe2sjPx3+2QdAiIiIiIiIiEp+UKEpM2rSBQoXg8WMYM8ZanDy5Zb4iR0fL9oQJloekFc5QmNFVRwPw1ZavuPzosg2CFhEREREREZH4okRRYmJvD+PGWdanTrUMQ/tHyZL/3wWWnNK9e/BV2a+olLUSfsF+tFnThhBzSDwHLSIiIiIiIiLxRYmixOajj6BKFctERIMGhdvVvbtlN4C3tyVZhGHH/PrzSe6UnIN/HWT8gfHxH7OIiIiIiIiIxAslihIbkwnG/5PsWbQI/vjDusvODubNg4wZLdtbt8Lo0ZA1VVa+r/k9AEN2D+H4nePxHLSIiIiIiIiIxAclihKj996zPN7MMODrr8PtcnODJUssSSOAIUNg505oXaQ1DfM3JMQcQus1rXke/NwGgYuIiIiIiIhIXFKiKLEaNcoye/W2bZauQ/9SpQoMG2ZZN5uhRQu4e9fEj7V/JEPSDJy9f5Zvdnxjg6BFREREREREJC4pUZRYZc8OnTtb1vv2tWSE/uWbb8DT07J+756lA1Iqp3TMrjsbgMmHJ7Pj6o74jFhERERERERE4pgSRYnZwIGQMiWcPAmLF4fbZWcHCxfCO+9YtvfsgaFDoVaeWnxW8jMA2q1rx+OAx/Ebs4iIiIiIiIjEGSWKErO0aaF/f8v6gAEQEBBud/r0sHw52NtbtkeNgk2b4Nvq35IrTS7+8v2LLr92ieegRURERERERCSuKFGU2HXrBlmywK1bMG1ahN3ly8PYsf/fbt0afO4lY2GDhdiZ7Fh8ejErzqyIx4BFREREREREJK4oUZTYJUkCI0ZY1kePhkePIlTp1Qvq1LGsP3wITZtCyQxl+KaCZULrzzd8zm3f2/EVsYiIiIiIiIjEESWKxNJNqHBhePzYkiz6D5MJ5s+HbNks2wcPQr9+MLjSYEpmKolPgA8dvDpgGEa8hi0iIiIiIiIisUuJIrFMQjR+vGV92jS4fj1CldSpYcUKcHS0bE+aBGtXO7KwwUJcHFzYemUrM47OiL+YRURERERERCTWKVEkFp6e8OGHEBRkeRpaJEqVgu+++/92+/Zg3M/POI9xAPTZ1ocLDy7ER7QiIiIiIiIiEgeUKBILk+n/vYoWL4bjxyOt9uWXlpFqAH5+0KABtM7bhQ+zf8jzkOe0XtOa4NDgeApaRERERERERGKTEkXyfyVKQIsWlvW+fSGSOYdMJvjhByhWzLJ98SK0b2fHnLrzSOWSiqN/H2X0vojzHImIiIiIiIhIwqdEkYQ3ciQ4OcGOHbB1a6RVXF1h9WrLvEUA69bBwulZmFHTMkfRiL0jOHL7SHxFLCIiIiIiIiKxRIkiCS97dujSxbLety+EhkZZbelSSw8jgEGDIPXt5jQt2JRQI5TWa1rjH+wfT0GLiIiIiIiISGxQokgiGjAAUqWCU6dg0aIoq3l6wogRlnXDsIxa61twBpmTZ+biw4v03dY3fuIVERERERERkVihRJFElCYN9O9vWR84EJ4/j7Jq//5Qr55l3ccHOjRPww815gEw/eh0Nl/eHMfBioiIiIiIiEhsUaJIIte1K7i7w19/wbRpUVazs4P58yFPHsv2yZOwfHQ1OpeyDF/rsK4DD/0fxkfEIiIiIiIiIvKaEnSiKDQ0lEGDBpE9e3aSJElCzpw5GTFiBEYkT+OSWJYkiWVia4DRo+Fh1MmelClhzRpImtSyvXgxZDk3jrxp83Ln2R2+2PiFXjMRERERERGRN0CCThSNGzeOmTNn8v3333Pu3DnGjRvH+PHjmfaCHi4Si1q2hKJF4ckTGDXqhVULFIB58/6/PaCvK19kXIi9yZ6VZ1ey5PSSuI1VRERERERERF5bgk4U/fbbb9SrV49atWqRLVs2GjduTPXq1TlyRI9ejxf29jB+vGX9++/h2rUXVm/c2PL0MwCzGYZ8UoovCw4GoPOvnbn15FZcRisiIiIiIiIirylBJ4rKlSvHjh07uHjxIgAnT55k//79fPTRRzaOLBGpXh08PCA42PI0tJcYOhTq17esP3kCmwd8Q8kM7/Mk8Ant1rXDbJjjNFwREREREREReXUOtg7gRfr164evry/58uXD3t6e0NBQRo0aRcuWLaM8JjAwkMDAQOu2r68vAGazGbM59pIUZrMZwzBi9ZwJ1tix2L33Hixdivmrr6BkyRdWnz8fKlQwcfq0iUsXHCi/aQFJ3i/Ozms7mXJoCt1Ld4/2pRNVO9uQ2jnuqY3jh9o5fsRVO+t1ExEREbG9BJ0oWrFiBYsXL2bJkiUULFiQEydO0KNHDzJnzkzbtm0jPWbMmDEMGzYsQvn9+/cJCAiItdjMZjNPnjzBMAzs7BJ0x6zX9847pGzUiCSrVhH81Vf4rFwJJtMLD/n5Z3tq1EiLj48dB7zyUiHnaPan/Ir+O/pTIlUJ8qbOG61LJ6p2tiG1c9xTG8cPtXP8iKt2fvr0aaydS0RERERejclIwI+jcnd3p1+/fnTu3NlaNnLkSBYtWsT58+cjPSayHkXu7u74+PiQIkWKWIvNbDZz//590qdPnzi+jNy4gSlfPkxBQZg3bIBoDP/bsweqVzcREmICDAqN/Yg/A7ZQPGNxfuvwG072Ti89R6JrZxtRO8c9tXH8UDvHj7hqZ19fX1KnTs2TJ09i9W+2xJyvry8pU6bUayEiIpLAxcXf7ATdo8jf3z/CB1B7e/sXdk13dnbG2dk5QrmdnV2sf2kwmUxxct4EKXt26NoVJk7Erl8/qFHDMtn1C1SpYpkD+/PPAUxcGDeXFL0L8cfdPxixdwSjPnzxk9TCJKp2tiG1c9xTG8cPtXP8iIt21msmIiIiYnsJ+hNZnTp1GDVqFBs3buT69eusWbOGSZMm0aBBA1uHljh98w2kSgV//gkLF0brkM8+gy+/tKwH+2TCtOFHAMYeGMtvt36Lo0BFRERERERE5FUk6ETRtGnTaNy4MV9++SX58+end+/efPbZZ4wYMcLWoSVOadL8/8lnAwfC8+fROmzyZKhc2bL+5GBjUt1shdkw03pNa54FPYuTUEVEREREREQk5hJ0oih58uRMnjyZGzdu8Pz5c65cucLIkSNxcnr53DYSR7p0gXffhdu3YcqUaB3i6AgrV0KOHJbtx0um4RLozlWfq/Tc0jMOgxURERERERGRmEjQiSJJgFxcYNQ/cwuNGQMPHkTrsHTpYONGy8g1AlIRsHQ+ALOOz2LDxQ1xE6uIiIiIiIiIxIgSRRJzLVpAsWLg6wsjR0b7sHz5YPVqcHAArleBg18B0NGrI/f97sdNrCIiIiIiIiISbUoUSczZ2cH48Zb1GTPg6tVoH1qlCsya9c/GjtHgXQBvP28+3fAphmHEfqwiIiIiIiIiEm1KFMmrqVYNqleH4OD/T3AdTe3aWR6gRogLrF4EoY6sPb+W+Sfnx0moIiIiIiIiIhI9ShTJqxs3DkwmWLYMjh6N0aEjRkCTJsDd4rBrGABdf+3G9cfXYz9OERGRN9z06dPJli0bLi4ulC5dmiNHjryw/uPHj+ncuTOZMmXC2dmZPHny8Ouvv8ZTtCIiIvImU6JIXl2xYtCqlWW9b1+IwdAxOzuYPx/KlAEO9IWb5XgW/JSWv7Qh1BwaJ+GKiIi8iZYvX07Pnj0ZMmQIx48fp2jRonh6euLt7R1p/aCgIKpVq8b169f55ZdfuHDhArNmzeKdd96J58hFRETkTaREkbyeESPA2Rl274ZNm2J0aJIksG4dZMtqD2sWQFBSfru9j28PTIqbWEVERN5AkyZNolOnTrRv354CBQrwww8/4Orqypw5cyKtP2fOHB49esTatWspX7482bJlo1KlShQtWjSeIxcREZE3kRJF8nqyZoVu3SzrfftCaMx6A7m5wcaNkNKcEzZPBuCbHQM58tdRFp5cSOOVjWno1ZDGKxuz8ORCAkICYvkGREREEq6goCB+//13PDw8rGV2dnZ4eHhw8ODBSI/x8vKibNmydO7cmQwZMlCoUCFGjx5N6Av+RgcGBuLr6xtuERERkcRJiSJ5ff37Q+rUcOaMZTxZDBUoAGvWgOOfHeFCHcymIMrMLkObtW1Yd2EdB+8cZN2FdbRZ24bMEzOz/sL6OLgJERGRhOfBgweEhoaSIUOGcOUZMmTg7t27kR5z9epVfvnlF0JDQ/n1118ZNGgQEydOZOTIkVFeZ8yYMaRMmdK6uLu7x+p9iIiIyJtDiSJ5falTw8CBlvVBg8DfP8anqFIFFi4wwZnGYICBGQCzEf7n44DH1FtWD68LXrETu4iIyFvGbDbj5ubGTz/9RMmSJWnatCkDBgzghx9+iPKY/v378+TJE+ty69ateIxYREREEhIliiR2dO5sGYb2998wefIrnaJeowCSNOwBmKKsY2CZMLvd2nYahiYiIm+9dOnSYW9vz71798KV37t3j4wZM0Z6TKZMmciTJw/29vbWsvz583P37l2CgoIiPcbZ2ZkUKVKEW0RERCRxUqJIYoezM4waZVkfOxbu34/xKVaeWclzfMD04qenGRj4BPjwy9lfXiVSERGRN4aTkxMlS5Zkx44d1jKz2cyOHTsoW7ZspMeUL1+ey5cvYzabrWUXL14kU6ZMODk5xXnMIiIi8maLUaLIbDaza9cuhg8fTseOHWnevDndunVj7ty56qIs0Lw5FC8OT5/CC+ZBiMraC2uxM0XvLWlnsmPN+TUxvoaIiMibpmfPnsyaNYv58+dz7tw5vvjiC/z8/Gjfvj0Abdq0oX///tb6X3zxBY8ePaJ79+5cvHiRjRs3Mnr0aDp37myrWxAREZE3SLS+lT9//pyRI0fi7u5OzZo12bRpE48fP8be3p7Lly8zZMgQsmfPTs2aNTl06FBcxywJlZ0dTJhgWZ8xAy5fjtHhD/0fWuciehmzYeaR/6OYRigiIvLGadq0Kd9++y2DBw+mWLFinDhxgs2bN1snuL558yZ37tyx1nd3d2fLli0cPXqUIkWK0K1bN7p3706/fv1sdQsiIiLyBnGITqU8efJQtmxZZs2aRbVq1XB0dIxQ58aNGyxZsoRmzZoxYMAAOnXqFOvByhvgww/B0xO2bIEBA2D58mgfmtY1LXYmu2gli+xMdqRxTfM6kYqIiLwxunTpQpcuXSLdt3v37ghlZcuW1X/eiYiIyCuJVo+irVu3smLFCmrWrBlpkggga9as9O/fn0uXLlG1atVYDVLeMOPGgckEK1bAkSPRPqx+3vox6lHUIF+DV41QRERERERERCIRrURR/vz5res3b97EMCJONmwYBjdv3sTR0ZGcOXPGXoTy5ilaFNq0saz37QuRvF8i06RgE1K7pMb0gqeeAWBAcqfkNC7Q+DUDFREREREREZF/i/FTz7Jnz879SJ5o9ejRI7Jnzx4rQclbYMQIy5PQ9uyBjRujdYiLgwvz688HiDpZZAAm8AsIYvuVnbEUrIiIiIiIiIjAKySKDMPAZIr4Jf7Zs2e4uLjESlDyFnB3hx49LOtffw0hIdE6rE7eOqxttpZULqkArE9Bsz4NLTAV3CmK2S6Qukvr8MOxH2M3bhEREREREZFELFqTWYPl0awAJpOJQYMG4erqat0XGhrK4cOHKVasWKwHKG+wfv1g1iw4exbmzYNPPonWYXXz1uXvXn/zy9lfWH1uNXef3CVjyow0zN8Q+wuNadnCHqPWpxjF5/HFxs+58fg6oz4c9f9kkoiIiIiIiIi8kmgniv744w/A0qPo9OnTODk5Wfc5OTlRtGhRevfuHfsRypsrVSoYOBB69oQhQ6B5c0iaNFqHuji40KpIK1oUaoG3tzdubm7Y2dlBEfB7Ap06zYHH2aHKEMYeGMv1J9eZV28ezg7OcXtPIiIiIiIiIm+xaCeKdu3aBUD79u2ZMmUKKVKkiLOg5C3y5ZcwdSpcvw6TJ8OAAa99yk8+gUePTHz99WB4nBXqfsKyP5fx99O/Wdt0LamTpH7ta4iIiIiIiIgkRjEeqzN37lwliST6nJ1h9GjL+rhx4O0dK6ft29eycLItLN4EASnYe2Mv5eeU5/rj67FyDREREREREZHEJlqJos8//5y//vorWidcvnw5ixcvfq2g5C3TtCmULAlPn1qehhZLxo6FTp2Aqx4wZz/4vsO5B+co83MZjv19LNauIyIiIiIiIpJYRCtRlD59egoWLEjNmjWZOXMmR48e5fbt2zx8+JDLly/j5eVF3759effdd/nuu+8oXLhwXMctbxI7Oxg/3rL+ww9w6VKsnNZkgpkzoUULwLsw/HwIk3cR7vndo9K8Smy4uCFWriMiIiIiIiKSWEQrUTRixAguXrxI+fLlmTFjBmXKlOHdd9/Fzc2NvHnz0qZNG65evcpPP/3EoUOHKFKkSFzHLW+aqlXho48gJCRW5ikKY28P8+dDo0aAbxaM2fuwv1Yd/2B/6i2rx8yjM2PtWiIiIiIiIiJvu2jPUZQhQwYGDBjA6dOnefDgAcePH+fAgQNcuHABHx8ffvnlF2rUqBGXscqbbtw4SzeglSvh8OFYO62DAyxZArVrA4EpCF24Acc/O2A2zHz565d8ve1rzIY51q4nIiIiIiIi8raK8WTWAKlTp6Zo0aKUKVOGXLlyYTKZYjsueRsVLgzt2lnW+/QBw4i1Uzs5WfJP1aoBZkeCf/kZl4PDARj/23harGpBQEhArF1PRERERERE5G0U40TR0KFDMZsj9s548uQJzZs3j5Wg5C02fDi4uMC+fbB+faye2sUF1q6FihUBTARsGUTy7fNxMDmw/Mxyqi+szqPnj2L1miIiIiIiIiJvkxgnimbPnk2FChW4evWqtWz37t0ULlyYK1euxGpw8hbKkgV69LCs9+tnmbMoFrm6woYNULq0Zfvp/jakWL+ZZI4p2HdzH+Vml+Oaz7VYvaaIiIiIiIjI2yLGiaJTp06RJUsWihUrxqxZs+jTpw/Vq1endevW/Pbbb3ERo7xt+vWDtGnh3DmYOzfWT588OWzeDMWLW7YfHfsQ16UHyOiahQsPL1BmdhmO3j4a69cVERERERERedPFOFGUOnVqVqxYQZcuXfjss8+YMmUKmzZtYtSoUTg4OMRFjPK2SZkSBg2yrA8eDH5+sX6JVKlg61YoVMiy7f1nIYyfDpEvVVG8/bypPL8yXhe8Yv26IiIiIiIiIm+yV5rMetq0aUyZMoXmzZuTI0cOunXrxsmTJ2M7NnmbffEF5MgBd+/CpElxcol06WDnTssc2gD3Lr+Dz6R9VMjoiX+wPw2WN2D6kelxcm0RERERERGRN1GME0U1atRg2LBhzJ8/n8WLF/PHH39QsWJFypQpw/jx4+MiRnkbOTnBqFGW9fHjwds7Ti6TPr0lWVSkiGX73s3kXB6xniY5PsFsmOmyqQt9tvbBbEScoF1EREREREQksYlxoig0NJRTp07RuHFjAJIkScLMmTP55Zdf+O6772I9QHmLffwxvPcePHtmeRpaHAnrWVS0qGX77m1H9vb9ie6FRwLw7cFvab6qOQEhAXEWg4iIiIiIiMibIMaJom3btpE5c+YI5bVq1eL06dOxEpQkEnZ2lt5EAD/+CBcvxtml0qaFHTugWDHL9r27JpZ9OYCxpRfiaOfIijMrqLawGg/9H8ZZDCIiIiIiIiIJ3SvNURSVdOnSxebpJDGoUgVq1YKQEPjmmzi9VFiyKOxpaPfuwaS2rfjxgy2kdE7J/pv7KTenHFd9rsZpHCIiIiIiIiIJVawmikReydixlt5Fq1bBwYNxeqk0aWD7dihRwrLt7Q39mlZhToUDuKdw5+LDi5T5uQxHbh+J0zhEREREREREEiIlisT2ChWCdu0s6336gGHE6eXCkkUlS1q2vb3hk7oFmVnyEMUzFue+/30qz6vMuvPr4jQOERERERERkYRGiSJJGIYNgyRJ4MAB8PKK88ulTm1JFpUpY9n28YFmtTIzOvceauSqwfOQ5zRY3oDvj3wf57GIiIiIiIiIJBSvnCgKCgriwoULhISExGY8klhlyQJffWVZ//pry5xFcSxVKti6FSpXtmw/ewYNayena9r1dCrRCQODrpu60ntrb8yGOc7jEREREREREbG1GCeK/P396dixI66urhQsWJCbN28C0LVrV8aOHRvrAUoi0rev5Vn2Fy7A7NnxcsnkyeHXX6FmTcv28+dQv64DNYJ/ZHTV0QBMPDiRZr80IyAkIF5iEhEREREREbGVGCeK+vfvz8mTJ9m9ezcuLi7Wcg8PD5YvXx6rwUkikzIlDBpkWR8yxNLFZ/t20lWsaBknFkeSJIE1a6BxY8t2cDB8/LEJ9xv9WdRgEY52jqw8uxKPBR489H8YZ3GIiIiIiIiI2FqME0Vr167l+++/p0KFCphMJmt5wYIFuXLlSqwGJ4nQ559DjhyWZ9dPnIhpwAAcLl3CNGBAnE5y7eQES5dC27aW7dBQaNMG/A61ZEurLaR0TsmBWwcoO7ssVx7pfS4iIiIiIiJvpxgniu7fv4+bm1uEcj8/v3CJo9iSLVs2TCZThKVz586xfi1JAJycYMwYy/qYMZiOHQOw/Ny6NU4v7eAAc+bAl19atg0DPvsMfl9VhQMdDvBuyne59OgSZWeX5fBfh+M0FhERERERERFbiHGi6L333mPjxo3W7bDk0M8//0zZsmVjL7J/HD16lDt37liXbdu2AdCkSZNYv5YkEE2awHvvQWAgxj/vL8Pe3jIsLQ57FQHY2cH331umSwrTpw8smFiQgx0OUSJTCe7736fK/CqsPb82TmMRERERERERiW8OMT1g9OjRfPTRR5w9e5aQkBCmTJnC2bNn+e2339izZ0+sB5g+ffpw22PHjiVnzpxUqlQp1q8lCYTJBB9/DMeOYfonMWQKDYWjRy29ijw94/zyY8daJroOmzJp/Hi4fz8TO77fQ8u1Tfn10q80XN6QyTUm0610tziNR0RERERERCS+xDhRVKFCBU6cOMHYsWMpXLgwW7dupUSJEhw8eJDChQvHRYxWQUFBLFq0iJ49e0Y5zC0wMJDAwEDrtq+vLwBmsxmzOfYecW42mzEMI1bPKf8wDEwrVgDw71fZsLeHgQMxPDws2Zw49s03kCYNdOliwjBMzJ0L9+8nZfHiNXy9pys/Hf+J7pu7c83nGhOqTcDOFOMOegmG3s9xT20cP9TO8SOu2lmvm4iIiIjtxThRBJAzZ05mzZoV27G81Nq1a3n8+DHt2rWLss6YMWMYNmxYhPL79+8TEBB7jzc3m808efIEwzCws3tzEwQJkdOuXaT5Z26ifzOFhsKxY/isWEFQlSrxEkvDhuDk5EznzqkICjKxYYMJz2owb94w0jumZ9ThUUw+PJlL3peYVnUaSRySxEtcsU3v57inNo4fauf4EVft/PTp01g7l4iIiIi8GpNhxGzSF3t7e+7cuRNhQuuHDx/i5uZGaGhorAb4b56enjg5ObF+/foo60TWo8jd3R0fHx9SpEgRa7GYzWbu379P+vTp9WUkNhkGpjJl4I8/LImh/+62t4fixTEOHYqXXkVhdu6Ehg1NPH1quWahQgabNhnsebSUDl4dCAoNomyWsqxtupZ0runiLa7Yovdz3FMbxw+1c/yIq3b29fUlderUPHnyJFb/ZkvM+fr6kjJlSr0WIiIiCVxc/M2OcY+iqPJKgYGBODk5vXZAUblx4wbbt29n9erVL6zn7OyMs7NzhHI7O7tY/9JgMpni5LyJ2pYtEElvojBhvYpM27fH+VxF/+bhAbt3w0cfgbc3/PmniQoVTGzd2pKtrbJQf3l9Dv51kPJzy7Op5SZypckVb7HFFr2f457aOH6oneNHXLSzXjMRERER24t2omjq1KmA5YPhzz//TLJkyaz7QkND2bt3L/ny5Yv9CP8xd+5c3NzcqFWrVpxdQ2zMMCyzR9vbw8t6pn39NVSvHq+9ikqUgAMHLJe9dg1u3IDy5WHjxkr81uE3Plr8EZcfXabs7LJ4NfOirHvsPwVQREREREREJC5FO1H03XffAZYeRT/88AP29vbWfU5OTmTLlo0ffvgh9iPE0sV97ty5tG3bFgeHV5pWSd4EW7danmwWHSdPwrx50L59nIb0X7lyWZJFNWrAqVPw4AFUrgzLluXn0CeHqL2kNr/f+Z2qC6qyuOFiGuZvGK/xiYiIiIiIiLyOaPfxvnbtGteuXaNSpUqcPHnSun3t2jUuXLjAli1bKF26dJwEuX37dm7evEmHDh3i5PySAIT1JorJsINPP4U//oi7mKKQKRPs2QOVKlm2nz+HBg1g1fyM7G63m9p5ahMQEkDjFY2ZcmhKvMcnIiIiIiIi8qpiPBnArl27SJ06dVzEEqXq1atjGAZ58uSJ1+tKPAoKgps3ISaPRg4JsWRr9uyJu7iikCqVZTql5s0t22YzdOkCQ79Jxqoma/jivS8wMOixpQdfbf6KUHPcTfIuIiIiIiIiElteaRzXX3/9hZeXFzdv3iQoKCjcvkmTJsVKYJLIODtbhp3dvx+u2Gw28+jRI9KkSRN+ktOnT6FvXzhyxDKp9fLlUK9evIe8aBFkywZjxljKJk6EmzcdmD9/OtlSZePr7V8z+fBkbvreZFGDRSRxTBKvMYqIiIiIiIjERIwTRTt27KBu3brkyJGD8+fPU6hQIa5fv45hGJQoUSIuYpTEwt3dsvyb2UyItze4uUUclrZ7NzRrBl5e0LAhzJoF8Tw80c4ORo+GrFnhyy8tPYtWroS//zaxbl1f3k35Lm3XtmX1udVUfVoVr2ZepE+aPl5jFBEREREREYmuGA8969+/P7179+b06dO4uLiwatUqbt26RaVKlWjSpElcxCgSuSRJYNUqy4TWZjN07Ajjx1vmO4pnn30G69dD0qSW7QMHoFw5KJWkGdtabyO1S2oO/XWIsrPLcunhpXiPT0RERERERCQ6YpwoOnfuHG3atAHAwcGB58+fkyxZMoYPH864ceNiPUCRF3JwgNmzLcPQAL7+Gvr0idlcR7GkZk3YuxcyZrRsX7wIZcqA3a2K/NbxN7KlysYVnyuUnV2W3279Fu/xiYiIiIiIiLxMjBNFSZMmtc5LlClTJq5cuWLd9+DBg9iLTCS6TCYYNw4mTLBsT5xoGYIWHBzvoZQoAYcOQf78lu0HD+DDD+Hwxnwc6niI9zK/x8PnD6k6vyqrzq6K9/hEREREREREXiTGiaIyZcqwf/9+AGrWrEmvXr0YNWoUHTp0oEyZMrEeoEi09e4N8+aBvT3Mn2+Zt8jfP97DyJrVMvTMw8OyHRQE7drBdyMzsLP1burkqUNgaCBNVjbhu4PfxXt8IiIiIiIiIlGJcaJo0qRJlC5dGoBhw4bx4Ycfsnz5crJly8bs2bNjPUCRGGnbFtasARcX2LABqlcHH594DyN1avj1V/jii/+XjRsHrZslZWGtNXz53pcYGPTc2pPum7oTag6N9xhFRERERERE/ivGTz3LkSOHdT1p0qT88MMPsRqQyGurUwe2bYPatS1deypVgs2bIXPmeA3D0RFmzIACBaB7d8u0SevWQaWK9qxb9z3ZUmWj7/a+TD0ylZu+N1nccDGujq7xGqOIiIiIiIjIv8W4R1GOHDl4+PBhhPLHjx+HSyKJ2FSFCv+fWfr0aShfHi7Z5mljXbrApk2QMqVl++RJKF3axAf2fVjWaBlO9k6sPb+WqvOr4u3nbZMYRUREREREROAVEkXXr18nNDTiMJnAwEBu374dK0GJxIoiReC33yBXLrh+3ZI8On7cJqFUrw4HD0LOnJbte/egcmUIPdWU7a23k9olNYdvH6bs7LJcfHjRJjGKiIiIiIiIRHvomZeXl3V9y5YtpAzrHgGEhoayY8cOsmXLFqvBiby27Nlh/3746CP44w9LdmbdOqhSJd5DyZ8fDh+GRo1gzx4IDISWLaFPnw/Y3/sgtZd+xFWfq5SdXRavZl6Uf7d8vMcoIiIiIiIiiVu0E0X169cHwGQy0bZt23D7HB0dyZYtGxMnTozV4ERiRYYMsGsX1K8Pu3dDjRqwdKnlqWjxLG1a2LrVMhxt1ixL2YQJcOJEXjbOPkjbzXU4+vdRPlzwIYsaLqJxgcbxHqOIiIiIiIgkXtEeemY2mzGbzbz77rt4e3tbt81mM4GBgVy4cIHatWvHZawiry5lSstEQQ0aWJ5X36TJ/zM18czJCX78Eb7/Hhz+SdVu2wa1KmVgaold1M1bl8DQQD5e+TGTDk7CMAybxCkiIiIiIiKJT4znKLp27Rrp0qWLi1hE4paLC6xYAZ98YnkE2aefwujRYINEjMkEnTvDjh3g5mYpu3YNqn6QlKbGajqX6oyBQa+tvei2qRuh5ojzgomIiIiIiIjEtmgnig4ePMiGDRvClS1YsIDs2bPj5ubGp59+SmBgYKwHKBKrHBzgp5+gf3/L9oAB0LOnJXFkAxUrwrFj8N57lu3nz6FlC3uS7J7G+A+/BeD7o9/TaEUj/IP9bRKjiIiIiIiIJB7RThQNHz6cM2fOWLdPnz5Nx44d8fDwoF+/fqxfv54xY8bESZAiscpksvQkmjTJsj15MrRtC8HBNgnH3R327YN27f5f9u0EE1uH9WJ2jRU42zuz7sI6qsyvgreft01iFBERERERkcQh2omiEydO8OGHH1q3ly1bRunSpZk1axY9e/Zk6tSprFixIk6CFIkTX30FCxdaehktWgT16oGfn01CcXGBOXNg2rT/z1u0fTuMaNaE6aV3kCZJGo7cPkKZn8tw4cEFm8QoIiIiIiIib79oJ4p8fHzIkCGDdXvPnj189NFH1u1SpUpx69at2I1OJK61agXr1kGSJJbJrqtVg0ePbBKKyWR5GtqOHZA+vaXs+nX4snZ5uic9SI7UObj2+Brl5pRj/839NolRRERERERE3m7RThRlyJCBa9euARAUFMTx48cpU6aMdf/Tp09xdHSM/QhF4lrNmpbuO6lSwcGDlomDbt+2WTgVK8Lvv8P771u2g4JgSJc8lDh+kFKZSvPo+SM8Fniw8sxKm8UoIiIiIiIib6doJ4pq1qxJv3792LdvH/3798fV1ZUPPvjAuv/UqVPkzJkzToIUiXPlylkmCsqcGc6csWxfsN0Qr7B5i7p2/X/ZL/PdeDZ9Jx++U4/A0EA+/uVjvv3tWwwbPLVNRETi1/Tp08mWLRsuLi6ULl2aI0eOROu4ZcuWYTKZqF+/ftwGKCIiIm+NaCeKRowYgYODA5UqVWLWrFnMmjULJycn6/45c+ZQvXr1OAlSJF4UKgQHDkDu3HDzJlSoYHkkmY04OcHUqbBsGSRLZik7d8qVg1+twjOVJYPUZ1sfum7qSqg51GZxiohI3Fq+fDk9e/ZkyJAhHD9+nKJFi+Lp6Ym394sfcHD9+nV69+4d7j/2RERERF4m2omidOnSsXfvXnx8fPDx8aFBgwbh9q9cuZIhQ4bEeoAi8SpbNti/H0qUgAcPoEoVy6RBNtS0qSVfVbCgZdv/mT1bekyh/LNJmDAx/eh0GixvgF+QbSbiFhGRuDVp0iQ6depE+/btKVCgAD/88AOurq7MmTMnymNCQ0Np2bIlw4YNI0eOHPEYrYiIiLzpop0oCpMyZUrs7e0jlKdJkyZcDyORN5abG+zaBVWrwrNnljmMfvnFpiHlzQuHD0Pr1mElJg58+xU5fl+Bk50z6y+up/L8ytx7ds+GUYqISGwLCgri999/x8PDw1pmZ2eHh4cHBw8ejPK44cOH4+bmRseOHaN1ncDAQHx9fcMtIiIikjjFOFEkkiikSAG//gqNG1tmk/74Y/jhB5uGlDQpzJ8PP/0Ezs6WsivrG+OweCfJ7dNy7O9jlJldhvMPzts0ThERiT0PHjwgNDQ03JNnwfKQkbt370Z6zP79+5k9ezazZs2K9nXGjBlDypQprYu7u/trxS0iIiJvLiWKRKLi7GyZIOizz8Aw4IsvYMQIy7qNmEzQqZPl4Wxhc8f7XyjH0+8Okjw4J9cfX6fc7HLsu7HPZjGKiIjtPH36lNatWzNr1izSpUsX7eP69+/PkydPrMutW7fiMEoRERFJyJQoEnkRe3uYORMGDbJsDx4M3buD2WzTsIoXh+PHoUWLfwoe5ebpdwdxeVAanwAfPBZ6sPzP5TaNUUREXl+6dOmwt7fn3r3wQ4vv3btHxowZI9S/cuUK169fp06dOjg4OODg4MCCBQvw8vLCwcGBK1euRHodZ2dnUqRIEW4RERGRxEmJIpGXMZlg+HCYMsWyPW0atGplGZJmQylSwKJFMHcuuLoC/ukJ+HEndhcaEBQaRLNVzRh/YDyGDXtAiYjI63FycqJkyZLs+NeDFcxmMzt27KBs2bIR6ufLl4/Tp09z4sQJ61K3bl2qVKnCiRMnNKRMREREXkqJIpHo6tYNFi8GBwdYuhTq1gU/2z5pzGSCdu0svYuKFQOCXTEvWwmHugPw9fav6fxrZ0LMIbYMU0REXkPPnj2ZNWsW8+fP59y5c3zxxRf4+fnRvn17ANq0aUP//v0BcHFxoVChQuGWVKlSkTx5cgoVKqQHj4iIiMhLKVEkEhMtWsD69ZYuPFu2wIcfwsOHto6KvHnh0CFLLgvDHjZPhs3fgWFi5rGZNFjeAL8g2ya1RETk1TRt2pRvv/2WwYMHU6xYMU6cOMHmzZutE1zfvHmTO3fu2DhKEREReVuYjLd8XIqvry8pU6bkyZMnsTre3mw24+3tjZubG3Z2yrfFlQTbzocOQa1a8OgR5M9vSRolkO7869dD+/b/5K/yr4aGLcExgBIZS7Kx5QYyJos4p0WCbee3iNo4fqid40dctXNc/c2WmNNrISIi8maIi7/Z+hQt8irKlIF9+yBLFjh3DsqXh/MJ47H0derAyZNQpQpwriHM3wl+6Th+93dKzizDufvnbB2iiIiIiIiIJFBKFIm8qgIF4MABy7ivW7egQgU4csTWUQHwzjuwfTt8+y04eZeF2QfhYS7+9r9ByRnl2H1tLwABIQEsPLmQxisb09CrIY1XNmbhyYUEhATY+A5ERERERETEFpQoEnkd774L+/dDqVKWsV5Vq8K2bbaOCgA7O+jVC44dg8Lv5LIki26V5TmPqTqvGp/+0pvMEzPTZm0b1l1Yx8E7B1l3YR1t1rYh88TMrL+w3ta3ICIiIiIiIvFMiSKR15UuHezYAR4elqeg1aoFy5fbOiqrwoXh6FHo/WU6WLADzjbEsAti1pmJ+AT4AGA2zOF+Pg54TL1l9fC64GWzuEVERERERCT+KVEkEhuSJ4cNG+DjjyE4GJo3hxkzbB2VlbMzTJgAu7Ym4Z2jCyDE+YX1DSxz3Ldb207D0ERERERERBIRJYpEYouzMyxZAl9+CYYBnTvD0KGW9QSicmUYuHg1OAS+tK6BgU+AD7+c/SXuAxMREREREZEEQYkikdhkbw/ff29JEAEMGwZdukBoqE3D+rdtt9ZiZ4rer76dyY4159fEcUQiIiIiIiKSUChRJBLbTCYYMsSSMDKZLEPQWrSAwJf34okPD/0fWuciehmzYeaR/6M4jkhEREREREQSCiWKROJK586wdCk4OsKKFVC7Njx7ZuuoSOuaNto9ikyYSJMkTRxHJCIiIiIiIgmFEkUicalpU8sk10mTwvbtULUqPHhg05Dq560f7R5FBgaHrp7l4K1DcRyViIiIiIiIJARKFInEterVYedOSJvW8pz6ChXg5k2bhdOkYBNSu6TGhOnFFQ3L8nfQecrNKYvn3Pqc8T4TLzGKiIiIiIiIbShRJBIf3n8f9u0Dd3e4cAHKlYOzZ20SiouDC/PrzweIMllkKTfButnw+ydgtmPrzXUUnlmYNqvbcv3x9fgLWEREREREROKNEkUi8SV/fjhwwPLz9m344AM4ZJshXXXy1mFts7WkckkFYJ2zKOxnKpdUrGu2jhXfdCDDkVkw4wycaYyBwcLTC8g9NQ/dN3XH28/bJvGLiIiIiIhI3FCiSCQ+ubtbehaVLg2PHsGHH8LmzTYJpW7euvzd628WNlhIvbz1KJupLPXy1mNhg4X83etv6uarQ5MmcO4cdGqQD1auhJ+OwpVqhBjBTD0ylRxTcjB412CeBDyxyT2IiIiIiIhI7ErwiaLbt2/TqlUr0qZNS5IkSShcuDDHjh2zdVgiry5tWsvE1p6e4O8PdepYno5mAy4OLrQq0opfmvzC6rqr+aXJL7Qq0goXBxdrndSp4aefYM8eyJv8PVi4FeZvh9ul8Av2Y8TeEeScmpNJBycREBJgk/sQERERERGR2JGgE0U+Pj6UL18eR0dHNm3axNmzZ5k4cSKpU6e2dWgirydZMvDygubNISQEWrSAadNsHdULVawIJ07A4MHg+NeHMOswLF8F9/Px8PlDem3tRe5puZl9fDYh5hBbhysiIiIiIiKvIEEnisaNG4e7uztz587l/fffJ3v27FSvXp2cOXPaOjSR1+fkBIsWQZculu1u3SxZGMOwbVwv4OICw4ZZEkblypngXEOYeRrWzsH01J2/fP/ik/WfUGhGIVadXYWRgO9FREREREREInKwdQAv4uXlhaenJ02aNGHPnj288847fPnll3Tq1CnKYwIDAwkMDLRu+/r6AmA2mzGbzbEWm9lsxjCMWD2nRJQo2nnyZEifHrshQ2DECIx79zC+/x7s7eMthJi2c758lqFoP/4IA/7X3p3HVVXnfxx/X0AumoC4sClq7lsuaBKaORVlZZY1lZWj5q9laqxU2jRTNEuszLHSsmzRZqY0HTUnDRfMFsVcMVfcl0xQU8EVhHt+f5wrSoABcldez8fjPJTDOfd87ver3K9vv+d7hvkqM7W/jE0PSR0+kN+NY5T2e5rum3mfOkR00Os3va64BnEOfgfur0L8WXYDtLNzOKqd6TcAAADXsxhu/F/+AQHmOinx8fG6//77tXr1ag0cOFCTJ09Wv379ijxn5MiRGjVqVKH927dvV2BgYLnVZrPZlJmZqeDgYPn4uPXELI9Wkdq58rRpCho6VBbD0Lk779SJiRMlq9Up176Sdj5yxEejRwdq5szK5g5rlhT7tvxuGK9cn1OSpC61u+jlji+rbWjbcq7cc1SkP8uuRDs7h6Pa+eTJk2rSpIkyMzMVFBRUbq+L0svKylJwcDB9AQCAm3PEZ7ZbB0X+/v7q0KGDVqxYkb/v2Wef1erVq5WSklLkOUXNKIqKitLx48fLdaBjs9l05MgR1apVi3+MOFCFa+dZs2Tp00eWnBwZN90kY/ZsqRwDzuKURzv/+KP09NMWbdpkMXdcdVgBt7yu3LaTlascSdI9ze7R6L+MVvNazcurdI9R4f4suwjt7ByOauesrCyFhIQQTrgBgiIAADyDIz6z3frWs4iICLVo0aLAvubNm+u///1vsedYrVZZi5iF4ePjU+7/aLBYLA55XRRUodr5gQfMp6L17CnL0qWy3HyztGCBFBrq8EtfaTt37SqtWydNnGgutXTqVKjOzX1H+i5ete4fqd/rfK452+bo67Sv1a9NP438y0jVDa5bzu/CvVWoP8suRDs7hyPamT4DAABwPbcekXXu3FlpaWkF9m3fvl316tVzUUWAE9x8s/Tdd1LNmtLatVKXLtLeva6uqkQqVZIGD5bS0qQHH7TvzKynIx9/Jtv7v6jemZ6yGTZ9lvqZGr/XWPEL43X0zFGX1gwAAAAAuMitg6LBgwdr5cqVGjNmjHbu3KkvvvhCH330kQYMGODq0gDH6tBBWr5cqltX2r5d6txZ2rTJ1VWVWGSk9OWX0pIl5sLXkqTDLbXvzTmq8kWKGvj8RTl5Ofrnyn+qwTsN9Or3r+pk9kmX1gwAAAAAcPOg6Nprr9WcOXP05ZdfqlWrVho9erQmTJig3r17u7o0wPGaNJFWrJBatpR++0264Qbzaw9y883Shg3SuHHShdtlz2y/TrtHLFXEkoVqUDlaJ3NOKmFZghq+21DvrHxH2bnZl39RAAAAAIDDuHVQJEl33nmnNm7cqHPnzmnr1q16/PHHXV0S4Dy1a0s//CDFxkrHj0txceaaRR7E31967jlpxw7piScki0WSLDr0063aPWS1Wm39SvWqNtaRM0c0aOEgNZnYRFNTpyrPlufq0gEAAACgwnH7oAio8KpXN+/huv126exZ6a67pH//29VVlVpoqPThh9L69dJf/mLfafho04z7tX/IZnXN+kjhV0Vqf+Z+9f+6v1pPbq252+bKjR/MCAAAAABeh6AI8ARVqkhffy397W9SXp7Up480YYKrqyqTNm2kpUulWbOk+vXNfUZuJX0//nGdTtyp233fUkhAiLYc2aJ7Ztyj2E9i9d2e71xaMwAAAABUFARFgKeoVEmaNk0aNMj8evBg6eWXJQ+ccWOxSH/9q7R1qzRmjHTVVeb+k8cq69vhz6vyR7t1Z9AwValURT8f/Fk3fX6Tuv27m9b+tta1hQMAAACAlyMoAjyJj480fryZrkhSYqK58E9urmvrKqOAAGnoUHP9osceM9+eJP22u5q+iX9NdefsUo+wp1XJp5IW7VqkDlM6qNesXtr++3bXFg4AAAAAXoqgCPA0FouZrnz0kZmsfPyx9MAD0rlzrq6szCIipClTpF9+ke688+L+bWvC9b+n3tO1K9PUvU4fWWTRV5u/UotJLfTE/57Qr1m/uq5oAAAAAPBCBEWAp3r8cWnmTPOxYnPmmItdZ2W5uqor0rKl9L//Sd99J3XocHH/igVXa/5jn+v2fRt0c50eyjPyNGXdFDV+r7FeXPyifj/zu+uKBgAAAAAvQlAEeLJ775WSkqTAQGnZMvNxYhkZrq7qiv3lL9LPP0vTp0tXX31x/4LPrtEPT83TPcd/Ukx4F53LPae3VrylBu820Os/vK5TOadcVjMAAAAAeAOCIsDT3XijGRLVqmU+e/7666U9e1xd1RXz8ZF69TIXvP7nP6Xq1c39589Lc97prNRB3+veswvUskYbZWVn6ZXvXlGjdxtp4qqJysnLcW3xAAAAAOChCIoAbxAdLS1fbj5vfudOqVMnc8EfL2C1mg9627VLGjbs4hPSss9ZNPuN27Xv5XW61/aFrg5uqIzTGXrm22fUbGIz/fuXfyvPlufS2gEAAADA0xAUAd6icWMzLGrVSkpPl264QfrpJ1dXVW6qVZNee03avVsaPNgMkCTp1EkfzX71IZ14fat6+n2gsKvCtefEHvWZ00ftPmynb7Z/I8MwXFo7AAAAAHgKgiLAm0RGSj/8IHXuLGVmSrfcYq4O7UVCQ6Xx482JU3//u+TnZ+4/frSS5r7ypIwJu9Sj8lhVs1bTxsMb1ePLHrr+s+v1474fXVs4AAAAAHgAgiLA24SESIsWSd27S+fOSffcI02b5uqqyl2dOtLkydK2bVKfPpLFYu4/fLCK/vfSS7JO3q2brUNU2a+yVhxYoRum3qDuX3RXanqqS+sGAAAAAHdGUAR4oypVpDlzpL59pbw86ZFHpLffdnVVDtGwofT559KmTdJf/3pxf8a+ECUPTdRVn+xUrN+T8vPx04IdC9Tuw3Z6+L8Pa+exna4rGgAAAADcFEER4K0qVZI++0x67jnz6+efl156SfLS9XpatJBmzZLWrTMnUV1wdE+kUl75QIHTtqqNz0OSpC83fanmk5rrqW+e0m8nf3NRxQAAAADgfgiKAG/m4yONGye98Yb59ZtvSo8+KuXmurYuB2rXTpo9W9qwQbr//ou3pB3f1UgbRnyh4C/Xq4nlDuXacjV57WQ1ereRhi4ZquNnj7u2cAAAAABwAwRFQEXw4ovSJ5+YwdFnn5n3aJ09e/H7S5ao5g03SEuWuK7Gcta6tfTVV9LGjdJDD10MjDLT2mp7wnxVnfm96hiddDb3rMYuH6sG7zbQGz+9oTPnz7i2cAAAAABwIYIioKL4v/8zp9pYrdK8edJtt5lPRjMMWYYNk9+OHbIMG+Z1t6a1bCl98YW0ZYu56LWP/afeqc036NdRP8l/1jzVyG2lE+dOaEjyEDV6t5Emr5ms83nnXVs4AAAAALgAQRFQkdx9t7RwoRQUJP3wg9S1qzR9uixr1kiS+euiRS4u0jGaNTMXvd62zVzb289PkizK2dRDv7+eKsvcf+mq8/V16NQhPTX/KTWf1FxfbvxSNsPm4soBAAAAwHkIioCKpmtX6fvvpbAwcyGfRx6RYZ9mY/j6SsOHe92soks1bmzefbdrlzRwoPmAOBm+MlL/ptNj06QF78n/fKh2Hd+lh2c/rOgPo/Xtjm9leHGbAAAAAMAFBEVARdS2rbR8uRQeLuXkyGIzZ81Y8vKk1au9dlbRperWlSZMkPbvl0aOlKpXl5TnL616Wjlv7ZKSX5Pv+SBtyNigO764Q12ndtXy/ctdXDUAAAAAOBZBEVBRNWhgzir6Ix8f6ZVXvHpW0aVq1JASEszA6J13pKgoSTlVpR+HKW/8bmn587LkBujH/T/q+s+u111f3qWNGRtdXTYAAAAAOARBEVBRLVpk3nr2RzabtGaN9Mwz0pmK8wSwq66Snn3WvCVt2jRzEWydrSEtfkvGuzuktY9LNl/9b/v/1GZyG/WZ00e7j+92ddkAAAAAUK4IioCKyDDMtYh8fYs/ZtIk8/6sUaOko0edV5uLVaok9e0r/fKL+XC4rl0lZdWR/veRNGmLtOkBGTL071/+rabvNdMzC55RxqkMV5cNAAAAAOWCoAioiBYtMtciysu7/HG//24u4FO3rjRggDndpoLw8ZF69JCWLZPWrpX69JEqZTWRZs2QPlwj7bxVucZ5TVw9UfX/2VAvL3lFmecyC73Oudxz+teGf+m+mffp3nn36r6Z9+lfG/6lc7nnnP+mAAAAAOBPEBQBFU1JZhNJ5vcbNJDatZPOnpXef19q0kR64AEzZKpAoqOlzz+X9u6VXn5Zqp7dXvr3QmlasvRrR52znVbi8tcV+UYDvbpknM6ePytJmpc2T5FvR6rv3L76Ou1rpRxK0ddpX6vv3L6KfDtS/0v7n2vfGAAAAAD8AUERUNGUdDZRXp60e7c0ZoyUnCzddpu5ftHMmVLHjtJf/iLNn2/uqyAiI6XXX5cOHJAmT5aa+t8kfbxSmj5HOtJcZ3RMCctfUM1XG+uBqQPUc3pPnTh3QpJkM2wFfj1x7oTunn635qXNc9XbAQAAAIBCCIqAiuTCbCKfEv7V9/GRRoyQbrxR+vZbc/HrPn0kPz/p+++lO++UWreWpk6VcnIcWro7qVJF+vvfpS1bpPnzLYqr01N6f6M09zPpRF2d8Tuomfvel2EYMlT00+Mu7H9k7iPchgYAAADAbRAUARVJTo75HPiSzgKy2czpMxdCoNatzXuwdu+WnntOCgyUNm+W+veXrr5aevNNKbPwOj3eysdHuuMOafFiaesWXz3T5REFTtsubehtHmC5/PmGDB0/d1yztsxyfLEAAAAAUAIERUBFYrWat52tXVtgs61eraMLF8pWxPe0erV53qWioqRx48zQ6Y03pIgI6bffpJdeMr/3wgvSr7+65j26SLNm0rvvSr/tt6ptx7OSUbIfrz4WH83ZNsfB1QEAAABAyfi5ugAAThYVZW6XstmUe/iwFBpa8tvSJKlaNenFF6WBA6UvvjDDoy1bzF8nTJAeflh6/nnpmmvK8x24tapVpeDw36V9JZu1ZTNsOnbmmIOrAgAAAICSYUYRgCtntZq3n23cKH3zjdS1q5Sba96m1rq1eX/Wd9+ZayRVADWq1JCPpYQ/Xg1p+c6NevTj8dp5dK9D6wIAAACAP0NQBKD8+PhI3btLy5ZJP/8s3Xefue/bb6WbbpKuvVaaMcMMkbxYz6Y9859u9qcs0vlKv+vTg8+p8aSrFTGyvZ7/+nVtO7rNsUUCAAAAQBEIigA4RseO0syZ0vbt0j/+IVWubK559OCDUpMm0sSJ0unTrq7SIe5veb9CAkJk+dPVrC2yZAdLSW9Le7tKNh+lW9bp7dRX1HxSc9VJbKEhi17R+kPrZVSQ2VgAAAAAXIugCIBjNWwoTZok7dsnJSRINWpIe/ZIzzwj1a0rjRghHT7s6irLVYBfgKb1nCZJxYZFFllksUiz+/xL37wSr7uPL5PPPw9J86ZIO26T8irpYM5WvZHyuqI/ilbtNxsofuFzWnFgRclnKwEAAABAKREUAXCOWrWkkSPNJ6VNmiQ1aCAdOyaNHi3Vqyc9+aS0Y4erqyw3PZr20NwH56paQDVJyl+z6MKv1QKq6esHv1bP5j3Uvbs0d670a1qoxtz3mBqkfCu9dVj677+lrfdI5yvr0Lm9+ufK8er8aWdFvFVH/5j/DyXvTtb5vPMueocAnGnSpEmqX7++AgICFBMTo1WrVhV77JQpU9SlSxeFhIQoJCREcXFxlz0eAADgUhbDy+9nyMrKUnBwsDIzMxUUFFRur2uz2XT48GGFhobKpzRPiUKp0M7O4ZJ2zsuTZs+W3npLWr3a3GexSPfcI73wgnTddc6pw8HO5Z7TrC2zNHvrbKVnpis8OFz3Nr9X97W4TwF+AUWeY7OZyzxNmybNmiWdOX9aarRQav5fqck3UkBW/rHVA6rrrmZ36a/N/6q4BnHFvmZFwc8M53BUOzvqM9vTzZgxQ3379tXkyZMVExOjCRMmaObMmUpLS1NoaGih43v37q3OnTurU6dOCggI0BtvvKE5c+Zo8+bNql27domuSV8AAOAZHPGZTVBURvxjxDloZ+dwaTsbhvTDD2ZgNH/+xf3XXy+9+KK5OLYX9H1Z2/jkSem//zVDo2XLJPlmS1cvlZrPlprNla46mn9sVf+q6t64u/7a/K+6vfHtqupftdzfh7vjZ4ZzEBQ5V0xMjK699lpNnDhRktn+UVFReuaZZzRkyJA/PT8vL08hISGaOHGi+vbtW6Jr0hcAAHgGR3xmM4oG4FoWi9S1q/TNN9KmTVL//lKlStJPP0l33SW1bCl98omUne3qSl0iMFB65BHpu+/MpZ1GjbCqoXG79L8p0tuHpKnfST8/I2XV1qmcU5qxeYYemPWAar5ZU3dPv1ufb/hcx88ed/XbAFBGOTk5Wrt2reLi4vL3+fj4KC4uTikpKSV6jTNnzuj8+fOqXr16scdkZ2crKyurwAYAAComgiIA7qNlS+nTT81E5MUXpaAgads26bHHpPr1pbFjpRMnXF2ly9Svb679vWOH9OOP0mP/56egY3+Rvn1X+ud+acpK6acXpWMNlZ2XrXlp89Rvbj+FjgvVrf+6VR+u+VDpp9Jd/TYAlMLRo0eVl5ensLCwAvvDwsKUnl6yv88vvfSSIiMjC4RNf5SYmKjg4OD8LSoq6orqBgAAnougCID7qV1beuMN6cABadw48+v0dGnoUCkqSoqPNxfFrqAsFvPOvClTpEOHpC+/lO7q4aNKh2OkJW9I7+6QPtggLUuQMlop15arxbsX68n5Tyry7Uh1+ayLJqycoH0n9rn6rQBwsLFjx2r69OmaM2eOAgKKX8Ns6NChyszMzN8OHDjgxCoBAIA7ISgC4L6CgqTnnpN27zYX6WnVSjp1SvrnP6WGDaU+faQNG1xdpUtVqSI9+KD09ddmljZlinTjjRZZDreWlo2UPtgovZcmLR4rHbxWhgz9tP8nDV44WPXfqa8OH3XQmB/HKO1omqvfCoAi1KxZU76+vsrIyCiwPyMjQ+Hh4Zc9d9y4cRo7dqwWLVqk1q1bX/ZYq9WqoKCgAhsAAKiYCIoAuD9/f6lvX+mXX6Rvv5VuuknKzZX+/W+pbVupWzdpyRJzYewKrHp18y69pUvNyVjjx0sdOkj6vYm0/CVpyirpn/ukb9+R9t4gGRatPbRWw5YOU7NJzdTy/ZYavnS4UtNT5eXPOQA8hr+/v9q3b6/k5OT8fTabTcnJyYqNjS32vDfffFOjR49WUlKSOnTo4IxSAQCAlyAoAuA5LBbpttuk5GRpzRqpVy/ziWiLFkm33CK1b2/eh5Wb6+pKXa52bWnwYGn1amn7dmnUKKlpU0mZdaWfn5Wmfi+NOyTN+0ja2U0Wm5+2HNmi1358Te0+bKeG7zbU84ueV8qBFNkMm6vfDlChxcfHa8qUKZo2bZq2bt2qp556SqdPn1b//v0lSX379tXQoUPzj3/jjTc0fPhwffrpp6pfv77S09OVnp6uU6dOueotAAAAD+LWQdHIkSNlsVgKbM2aNXN1WQDcQfv20vTp0s6d0jPPmPdgrV8vPfyw1KiR9M475m1qUOPG5iLYW7ead+q98oo9NDodJq17XPp3kow3j0iz/yVt7SlLXoD2nNijt1PeVqdPO6nO+DoaMH+Alu5ZqlwbIRzgbL169dK4ceM0YsQItW3bVqmpqUpKSspf4Hr//v06dOhQ/vEffPCBcnJydN999ykiIiJ/GzdunKveAgAA8CAWw43vLxg5cqRmzZqlJUuW5O/z8/NTzZo1S/waWVlZCg4OVmZmZrneb2+z2XT48GGFhobKx8et8zaPRjs7h1e08++/S++/L733nnTkiLkvJER66ikzSPqTtTwczd3a2DCkzZulWbOkmTOlLVsu+Wal01KjJKnFf+XT7BvZKp3M/1aNyjV0V9O79Nfmf1VcgzhZ/azOL/4y3K2dvZWj2tlRn9koPfoCAADP4IjPbLcfRfv5+Sk8PDx/K01IBKACqVFDGj5c2rdPmjzZnEZz/Lg0Zoz5XPknnpDSWLD5AovFXBt85EgzMNq8WXr1VemaaySdv0ra+lfpv1/INvaI9J/50rr/k8+5Gvr97O/6LPUz3fnlnar1Vi099N+HNGvLLJ3KYfYWAAAA4A3cPijasWOHIiMj1aBBA/Xu3Vv7K/AjsQGUQOXK0t//bt5nNXu2dN11Una2+Tiw5s2lnj2l5ctdXaXbadHCzNl++UXatk16/XUpOlpSnlXacYc07xPZ3kyXpi6VVg2Q5VSkTuac1PRN03X/zPtV661a6jm9p/614V86fva4q98OAAAAgDJy61vPvv32W506dUpNmzbVoUOHNGrUKB08eFCbNm1SYGBgkedkZ2crOzs7/+usrCxFRUXp+PHj5X7r2ZEjR1SrVi1ub3Ag2tk5vLqdDUNavlyWceNk+d//Lu7u1EnGc89Jd91lLojtYJ7axgcOSP/7n/T11xYtWybl5lrMb1hsUu1VUvPZUov/SiG788/x8/HTjfVv1L3N79XdTe5WWNUwp9Xrqe3saRzVzllZWQoJCeF2JzfArWcAAHgGR3xmu3VQ9EcnTpxQvXr1NH78eD366KNFHjNy5EiNGjWq0P7t27cXGy6Vhc1mU2ZmpoKDg/nHiAPRzs5RUdrZd/t2XfXhh6o8a5YsOTmSpNyGDXX673/X2fvvlwICHHZtb2jjzEyLli61auFCq5YuterkyQvvw5DCfrkYGoVuzj/HIos6hndU9wbddXv921UnsI5Da/SGdvYEjmrnkydPqkmTJoQTboCgCAAAz1DhgyJJuvbaaxUXF6fExMQiv8+MIu9COztHhWvnQ4dkmThRmjxZlhMnJElGWJiMp5+WnnxSql693C/pbW2ckyMtWybNm2fRvHnSwYOWi9+skSY1nyM1/69Ue02B8zpEdNC9ze/VPc3uUZMaTcq9Lm9rZ3fFjCLvR1AEAIBnqPBB0alTp1S3bl2NHDlSzz77bInO4alnno12do4K284nT0offyz985/mPVaSdNVV0qOPSoMHm4tglxNvbmPDkNatk775RlqwQFq92twnSQreLzWbY840qvuTZLn4kdOyVkv9tflfdW/ze9U6rLUsFkvRFygFb25nd8JTz7wffQEAgGeocE89e/755/X9999r7969WrFihe655x75+vrqoYcecnVpALxBYKAZCO3aJf3731KbNtLp09K770qNGkkPPyytX+/qKt2exSK1by8lJEg//yylp0uffy49+KAU4lNX+nmg9NkP0rhD0v8+lHbeKuX5afORzXr1h1fV9sO2uvqfjfXi4he18teVshk2V78lAAAAoMJy6xlFDz74oH744Qf9/vvvqlWrlq6//nq9/vrratiwYYlfgxlFno12dg7a2c4wpCVLpLfekhYvvrj/5pulF1+UbrnFTEXKoKK2cW6uGR4tWGBuqan2bwQcl5p8Y840arhQqnQu/5walWrrry3v0YOt71WXel3k5+P3p9c5l3tOMzfP1Jxtc5Sema7w4HDd0+we3d/yfgX4OW7tqYqKGUXej74AAMAzVPhbz8qCoMiz0c7OQTsXYf16adw4acYMKS/P3NemjfT881KvXlKlSqV6OdrYdPCglJQkzZ8vJSdLWVmS/E9JjZLMNY2afCNZT+UfX0U1dXPtu/X49ffq1kY3y+pnLfSa89Lm6ZG5j+j4uePysfjIZtjyfw0JCNG0ntPUo2kPJ75L70dQ5P3oCwAAPANBURkQFHk22tk5aOfL2LdPmjBBmjLFvC1NkqKipEGDpMcfN29fKwHauLDz56VVq8zJW4sXmzOP8iznpKuTzZlGTb+WqhzLP94vL0htq9yp3tH36rGut6mq9SrNS5unntN7SpIMFf44s8icATb3wbm6q+ldTnlfFQFBkfejLwAA8AwERWVAUOTZaGfnoJ1L4Phx6YMPzPWLMjLMfcHB0lNPSc8+K0VEXPZ02vjPZWaaT1JbtMgMjnbsypXq/WDONGo+Rwo8dPHg3ABFnrlFR4OSdV5niwyJLrDIomoB1fTbc79xG1o5ISjyfvQFAACeocItZg0AbiMkRHr5ZWnvXnN2UdOmZrIxdqz5dLRHH5W2bi3+/CVLVPOGG8w1kFCk4GDp7rulSZOk7dulPbv89NHQm3T/VZMU8tmv0scrpBXPScevlvzO6beg/ylHZy4bEknmTKPj545r1pZZTnonAAAAgOciKAKA0ggIkB57TNqyRZo7V+rcWcrJkT79VGrRQurRQ/rxx0ueDy/JMGQZNkx+O3bIMmxYwe+hWPXrm3f3ffWVdOSwj9bMjdW4W8fpjp27VOXz9dKRZvqTjCifj8VHc7bNcWi9AAAAgDcgKAKAsvDxMae//PSTtGKFdM895hPRvvlGuuEGKTZW+u9/zYWwFy2SZc0aSTJ/XbTIxcV7Hl9fqX176bnnpPnfWJSZ1lbRTcOkEj6EzmbYNP+XFbo3cZI+TkrR8VOnHVswAAAA4KH+/JnDAIDLi42VZs8275d6+21p2jRzZeb77pMaNpRyc2X4+sqSl2f+Ony4dOutZrCEMvHzk+qH1VDqcfPpZiWRXSldc3Ke1pyfpcdTfFTlbDM1qBytzldH666O0ercoK2CA4IdXDkAAADg3giKAKC8NGkiffih9Oqr0sSJ5mI7u3ZJujjxxZKXJ61ebc4q6tbNdbV6gZ5Ne2r21tklP2Hr3ZJvrhSxTgo8pDNXbdEmbdGmff/Wh/vMQ4LzGql5tWh1bdJONzaLVvvIaNWsUtMxbwAAAABwQzz1rIx4gpFz0M7OQTs7yKlTUsuW0v79hb9Xtar05JPS9ddLnTpJtWo5vz4Pdy73nCLfjtSJcydK9NSzJbf/ptUpAfrxR2nZ2kM6mLfeDI0ubNX2FXl+sKLUqka0/tI0WtfVi1Z0RLQiqkbIUoFnhPHUM+9HXwAA4Bkc8ZnNjCIAcJTly4sOiSQzRBo3ztwkqXFjc2HsTp3MX5s1M9dBQrEC/AI0rec03T39bllkKTIsstjnck3rOU3RTQMU3Vr6+98lKUIHDkRo+fI79OOP0k9LpV92/i6F/yE8qrFDmTqg5b8f0PIVX0srzNcN8glTm7BodWlozjpqF95O9avVr9DhEQAAALwDM4rKiBkYzkE7Owft7ACGIcXESOvWmQta/5HFItWoYc4k2rq18PdDQsy1jy6ERx07SlWqOL5uDzQvbZ4emfuIjp87Lh+LuWbRhV9DAkI0rec09Wja409f58QJc13ylSvN7eefpazsLCk8tWCAVGuL5FN4XaSqviFqG95O19U1Zx1FR0SrcY3G8rF4398pZhR5P/oCAADP4IjPbIKiMuIf1s5BOzsH7ewACxdKt93258clJZkhUEqKmVIsX24mFGfPFjzOz09q29YMji6ER7VrO6R0T3Qu95xmbZml2VtnKz0zXeHB4bq3+b26r8V9CvALKNNr2mzStm1md1wIjzZtkmy+Z6TQjQVnHoVtlHzPF3qNyj5V1TqsrWKiLoZHzWs1l5+PZ0/oJSjyfvQFAACegaCoDAiKPBvt7By0czn7s9lEF/j6StHRZhJx6S1L589LGzaYodGF8OjgwcLn16tX8Ha1a64xX7MCc/Sf5ZMnpTVrLgZHK1dKhw9L8s2Ram0uGB6Fb5AqnS30Gv4+AWpVq7U61olWu4h2io6IVqvQVmUOtFyBoMj70RcAAHgGgqIyICjybLSzc9DO5ayks4kuSEq6/BPQDEM6cMAMjC6ERxs2mFNeLlW1qnTddRfDo+uukyrYP/Cc/WfZMKR9+8zAaO1aM0Rat07KypLkkyvVSLMHR5fcumY9Weh1fC1+alq9pTpGRau9feZRm7A2usr/Koe/h7IgKPJ+9AUAAJ6BoKgMCIo8G+3sHLRzObowm2jt2sJBTlF8fKT27QvPKvozJ0+a51yYcZSSYu67lMVizjK69Ha1+vVLdx0P4w5/lm02aedOMzS6sK1bJ50+Lclik0J2F5x5FLFOqvJ7odexyKKrA5sppl60OkSa4VHb8LaqFlDN6e/pjwiKvB99AQCAZyAoKgOCIs9GOzsH7VyOsrPNW8IyMkp+Tni4tHevZLWW/bp5edLmzQVvV9uzp/BxEREXb1Xr3Nlc98jfv+zXdTPu+mc5L0/avr1geLR+/YWlqAwp+EDh8CjwUJGvVbtyQ11bp506XrLuUa2rajn1/RAUeT/6AgAAz0BQVAYERZ6NdnYO2rmcHTggHTlSYJfNZtOxY8dUvXr1wm0cGirVqVP+dRw6dDE0WrHCnNZy/g8LLgcEmItpXwiPOnWSqlcv/1qcxJP+LOfmmotlb9ggpaZe3I4etR9Q9dDFW9YuPHUtZG+Rr1Xdr47a1IrW9Q2jdW0dMzyKDIyUxUGzxwiKvB99AQCAZyAoKgOCIs9GOzsH7ex4btHGZ8+aU1kuXevo2LHCxzVrVvB2tSZNPOZ2Nbdo5ytgGGa+d2lwtGGDtGOH+T1VPnYxNLqw1dxe5GtdpVA1CYzWdXWjdWOzaHWoHa361epfUXh0LvecZm6eqTnb5uQ/Xe6eZvfo/pb3l8ti3IQT7oO+AADAMxAUlQFBkWejnZ2DdnY8t2xjw5DS0grerpaWVvi4GjUK3q7Wvr1UubLz6y0Bt2zncnDqlLRxY8HwaONG6cwZSdYsKWxDwfCo1hbJp/AaWf551RRVqZ3ahEara5NoxbWMVtOajeXr8+dPy5uXNk+PzH1Ex88dl4/FRzbDlv9rSECIpvWcph5Ne1zR+ySccB/0BQAAnoGgqAwIijwb7ewctLPjeUwbHz1qLox9ITxavVo6d67gMZUqmWHRpberhYe7pt4/8Jh2Lgc2m7m01aZNBbdt26TzOiOFbrwkPFpvfu2XU+h1fHKvUvWctmpYJVrtI6N1U/No3dquuQKvqpR/zLy0eeo5vackyVDhYYNF5iyluQ/O1V1N7yrzeyKccB/0BQAAnoGgqAwIijwb7ewctLPjeWwb5+SYqy5fuF1t+fKiF+pu0OBiaNS5s9SypflENyfz2HYuR+fPm7eqXRoebd4s7didI6PmloIzj8JTpUpnC79IrlXWE60VZotWw6BWSqn8srKNU0WGRBdYZFG1gGr67bnfynwbGuGE+6AvAADwDARFZUBQ5NloZ+egnR3Pa9rYMMynqV24VW35cjOJ+ONHSXCwdN11F8OjmBipalWHl+c17ewAZ8+as40unXm0NS1Pu06kyRZ2aXi0XgrIKvN1/nXPv/S31n8r07mEE+6DvgAAwDM44jPbr1xeBQBQMVgs5uyhBg2kv9nDgMxMaeXKi+HRypXmvoULzU2SfH2lNm0KrnUUFeW691EBVa4stWtnbhf5KienhXbubKFt2/6mbdukLVtt2rB/t3acXqfskHVS28+kqw5LJVgD28fioznb5pQ5KAIAAIDrERQBAK5McLDUrZu5SeZz3zduLPh0tf37pXXrzG3iRPO4OnUK3q7Wpo3kd4UfS0uWqObTT5vXuPXWK3utCsLfX2rRwtxMPpIayTAa6eDBB9R95kr9knW4RK9lM2w6dqaIJ+kBAADAYxAUAQDKl5/fxakrTz9t7vv114szjlasMNc9+vVXacYMc5OkKlXMW9QuhEexsVK1aiW/rmHIMmyY/HbskDFsmHTLLeYMKJSJxWJmeY1q19Cmk+bTzf6Mj8VH1atUd0J1AAAAcBSCIgCA49WpIz3wgLlJ0unT0qpVF8OjlBTpxAnpu+/MTTKTipYtCz5drWHD4sOfRYtkWbPGPHXNGmnRoouznFBmPZv21Oyts0t0rM2w6Z5m9zi4IgAAADgSQREAwPmuukq68UZzk8xnvW/denHG0fLl0s6dF1de/ugj87iwMDMwuhAeRUdLVqu5mPbw4TJ8fWXJyzN/HT7cvP2MWUVX5P6W92tg0kCdOHeiRE89u6/FfU6sDgAAAOWNoAgA4Ho+PubsoZYtpSeeMPdlZJgzjS6ER2vWmPvmzDE3yQyJOnSQIiOl1avz11u25OVJq1czq6gcBPgFaFrPabp7+t2yyFJkWGSxt/y0ntMU4Bfg7BIBAABQjgiKAADuKSxM6tnT3CTp3DlzMexLF8k+csT8fVEsFqlfP2noUKluXXOLipJq1WKWUSn1aNpDcx+cq0fmPqLj547Lx2KuWXTh12oB1TSt5zT1aNrD1aUCAADgChEUAQA8Q0DAxdvOXnjBvN1s505p8mRp/PjCxxuGOQNp0KCC+61WMzC6dLsQIl34fVCQU96SJ7mr6V367bnfNGvLLM3eOlvpmekKDw7Xvc3v1X0t7mMmEQAAgJcgKAIAeCaLRWrUSPrxR8nXV8rLK/qYatXM4379VUpPl7KzzYBp587iXzsoqHCAdOnXdeqYwVUFE+AXoL+1/psebvWwDh8+rNDQUPn4+Li6LAAAAJQjgiIAgOdatMhci6g4hiEdPy6NHm2uVZSTIx08KB04cHHbv7/g748fl7KypM2bza04oaGXn5UUEWEGWAAAAIAHISgCAHgm+5POip1NdIGvr3ncrbdK/v7S1VebW3FOny4cIP3x6zNnpMOHzW3t2uKvGxlZOEC6NFyqWZP1kgAAAOBWCIoAAJ7pz2YTXVDaJ6BddZXUrJm5FcUwpGPHip+RdOCAOWspN/fi/uIEBBQ/I+nC7wMD/7xmZ1uyRDWfflqaONEM4AAAAOA1CIoAAJ7nwmwiHx/JZvvz4318Ls4qutIZPBaLVKOGubVtW/QxeXnmQtpFhUgXfp+RYT7JbccOcytOcHDxIVLdulLt2uYC3c5iGLIMGya/HTtkDBsm3XILs6IAAAC8CEERAMDz5OSYYUtJQiLJPO7AAfM8Z4QqF247i4yUrruu6GOys82ZR5e7xe3ECSkzU9q40dyKExZ2+VlJ4eHlt17SokWyrFkjSeavJZ2pBQAAAI9AUAQA8DxWq3k72ZEjBXbbbDYdO3ZM1atXL/w0rtBQ5868+TNWq9SggbkV5+TJogOkS78+d86cnZSRIdkDnEL8/MyZR5e7za169T+fGWSfyWX4+sqSl2f+Wl4ztQAAAOAWCIoAAJ7pQtBxKZtNuYcPm6GQNzy2PTBQatHC3IpiGNLvv1/+FrfffjPXS9q3z9yKU7ly4cW2//j18uXS6tW6EAlZSrv+EwAAANweQREAAJ7KYjGfnFazphQdXfQxublSenrxC28fOGA+ve3sWSktzdyKU9Tta5c+VY5ZRQAAAB6PoAgAAG/m5yfVqWNusbFFH3PunPTrr5d/kltWlrlI9x8xqwgAAMCrEBQBAFDRBQRIjRqZW1EMQ2rfXtqwoegFxJlVBAAA4DW8YAEHAADgUIsWSevXF/+UuUtnFQEAAMCjERQBAIDi2Z90VuT6RJe6MKvIMJxTFwAAAByCoAgAABRv0SJztlBR6xNdillFAAAAXoGgCAAAFO3CbCKfEg4XfHyYVQQAAODhPCooGjt2rCwWiwYNGuTqUgAA8H45OeZTz4pbm+iPbDbzCWk5OY6tCwAAAA7jMU89W716tT788EO1bt3a1aUAAFAxWK3m7WRHjhTYbbPZdOzYMVWvXl0+f5xtFBpqngcAAACP5BFB0alTp9S7d29NmTJFr732mqvLAQCg4oiKMrdL2WzKPXzYDIVKelsaAAAAPIJHjO4GDBig7t27Ky4uztWlAAAAAAAAeC23n1E0ffp0rVu3TqtXry7R8dnZ2crOzs7/OisrS5I5Td5W0jUWSsBms8kwjHJ9TRRGOzsH7ex4tLFz0M7O4ah2pt8AAABcz62DogMHDmjgwIFavHixAgICSnROYmKiRo0aVWj/kSNHdO7cuXKrzWazKTMzU4ZhFF6fAeWGdnYO2tnxaGPnoJ2dw1HtfPLkyXJ7LQAAAJSNxTDc9xm2c+fO1T333CNfX9/8fXl5ebJYLPLx8VF2dnaB70lFzyiKiorS8ePHFRQUVG612Ww2HTlyRLVq1eIfIw5EOzsH7ex4tLFz0M7O4ah2zsrKUkhIiDIzM8v1M9sbTJo0SW+99ZbS09PVpk0bvffee+rYsWOxx8+cOVPDhw/X3r171bhxY73xxhu64447Sny9rKwsBQcH0xcAALg5R3xmu/WMoptvvlkbN24ssK9///5q1qyZXnrppUIhkSRZrVZZi3jaio+PT7n/o+FCYMU/RhyLdnYO2tnxaGPnoJ2dwxHtTJ8VbcaMGYqPj9fkyZMVExOjCRMmqFu3bkpLS1NoaGih41esWKGHHnpIiYmJuvPOO/XFF1+oZ8+eWrdunVq1auWCdwAAADyJW4/IAgMD1apVqwLbVVddpRo1ajDQAQAAFcL48eP1+OOPq3///mrRooUmT56sKlWq6NNPPy3y+HfeeUe33XabXnjhBTVv3lyjR49WdHS0Jk6c6OTKAQCAJ3LrGUXl4cKddRcWtS4vNptNJ0+eVEBAAP8D6kC0s3PQzo5HGzsH7ewcjmrnC5/VbnxXvNPl5ORo7dq1Gjp0aP4+Hx8fxcXFKSUlpchzUlJSFB8fX2Bft27dNHfu3GKv88db9zMzMyWV//gJAACUL0eMnzwuKFq2bFmpjr+wMGZUVJQDqgEAAOXt5MmTCg4OdnUZbuHo0aPKy8tTWFhYgf1hYWHatm1bkeekp6cXeXx6enqx1ynuYSCMnwAA8Ay///57uY2fPC4oKq3IyEgdOHBAgYGBslgs5fa6FxbJPnDgAIs8OhDt7By0s+PRxs5BOzuHo9rZMAydPHlSkZGR5faaKJmhQ4cWmIV04sQJ1atXT/v37ye0cyF+prkP+sJ90BfugX5wH5mZmapbt66qV69ebq/p9UGRj4+P6tSp47DXDwoK4i+GE9DOzkE7Ox5t7By0s3M4op0JJQqqWbOmfH19lZGRUWB/RkaGwsPDizwnPDy8VMdLxT8MJDg4mL9LboCfae6DvnAf9IV7oB/cR7k+YKTcXgkAAADlyt/fX+3bt1dycnL+PpvNpuTkZMXGxhZ5TmxsbIHjJWnx4sXFHg8AAHApr59RBAAA4Mni4+PVr18/dejQQR07dtSECRN0+vRp9e/fX5LUt29f1a5dW4mJiZKkgQMHqmvXrnr77bfVvXt3TZ8+XWvWrNFHH33kyrcBAAA8BEFRGVmtViUkJBQ5TRvlh3Z2DtrZ8Whj56CdnYN2dq5evXrpyJEjGjFihNLT09W2bVslJSXlL1i9f//+AtPNO3XqpC+++EKvvPKKXn75ZTVu3Fhz585Vq1atSnxN+tg90A/ug75wH/SFe6Af3Icj+sJi8AxaAAAAAAAAiDWKAAAAAAAAYEdQBAAAAAAAAEkERQAAAAAAALAjKAIAAAAAAIAkgqIymzRpkurXr6+AgADFxMRo1apVri7Jq/zwww/q0aOHIiMjZbFYNHfuXFeX5HUSExN17bXXKjAwUKGhoerZs6fS0tJcXZbX+eCDD9S6dWsFBQUpKChIsbGx+vbbb11dllcbO3asLBaLBg0a5OpSvMrIkSNlsVgKbM2aNXN1WbgCpR3LzJw5U82aNVNAQICuueYaLViwwEmVerfS9MOUKVPUpUsXhYSEKCQkRHFxcYxBy1FZx/fTp0+XxWJRz549HVtgBVLavjhx4oQGDBigiIgIWa1WNWnShJ9R5aC0/TBhwgQ1bdpUlStXVlRUlAYPHqxz5845qVrvVJZ/Fy9btkzR0dGyWq1q1KiRpk6dWurrEhSVwYwZMxQfH6+EhAStW7dObdq0Ubdu3XT48GFXl+Y1Tp8+rTZt2mjSpEmuLsVrff/99xowYIBWrlypxYsX6/z587r11lt1+vRpV5fmVerUqaOxY8dq7dq1WrNmjW666Sbdfffd2rx5s6tL80qrV6/Whx9+qNatW7u6FK/UsmVLHTp0KH/76aefXF0Syqi0Y5kVK1booYce0qOPPqr169erZ8+e6tmzpzZt2uTkyr1Lafth2bJleuihh/Tdd98pJSVFUVFRuvXWW3Xw4EEnV+59yjq+37t3r55//nl16dLFSZV6v9L2RU5Ojm655Rbt3btXs2bNUlpamqZMmaLatWs7uXLvUtp++OKLLzRkyBAlJCRo69at+uSTTzRjxgy9/PLLTq7cu5T238V79uxR9+7ddeONNyo1NVWDBg3SY489poULF5buwgZKrWPHjsaAAQPyv87LyzMiIyONxMREF1blvSQZc+bMcXUZXu/w4cOGJOP77793dSleLyQkxPj4449dXYbXOXnypNG4cWNj8eLFRteuXY2BAwe6uiSvkpCQYLRp08bVZaCclHYs88ADDxjdu3cvsC8mJsb4+9//7tA6vd2Vjilzc3ONwMBAY9q0aY4qscIoS1/k5uYanTp1Mj7++GOjX79+xt133+2ESr1fafvigw8+MBo0aGDk5OQ4q8QKobT9MGDAAOOmm24qsC8+Pt7o3LmzQ+usSEry7+IXX3zRaNmyZYF9vXr1Mrp161aqazGjqJRycnK0du1axcXF5e/z8fFRXFycUlJSXFgZcGUyMzMlSdWrV3dxJd4rLy9P06dP1+nTpxUbG+vqcrzOgAED1L179wI/n1G+duzYocjISDVo0EC9e/fW/v37XV0SyqAsY5mUlJRCf7e6devG2OcKlMeY8syZMzp//jyf3VeorH3x6quvKjQ0VI8++qgzyqwQytIX8+bNU2xsrAYMGKCwsDC1atVKY8aMUV5enrPK9jpl6YdOnTpp7dq1+ben7d69WwsWLNAdd9zhlJphKq/Pa7/yLKoiOHr0qPLy8hQWFlZgf1hYmLZt2+aiqoArY7PZNGjQIHXu3FmtWrVydTleZ+PGjYqNjdW5c+dUtWpVzZkzRy1atHB1WV5l+vTpWrdunVavXu3qUrxWTEyMpk6dqqZNm+rQoUMaNWqUunTpok2bNikwMNDV5aEUyjKWSU9PL/L49PR0h9Xp7cpjTPnSSy8pMjKSgPwKlaUvfvrpJ33yySdKTU11QoUVR1n6Yvfu3Vq6dKl69+6tBQsWaOfOnfrHP/6h8+fPKyEhwRlle52y9MPDDz+so0eP6vrrr5dhGMrNzdWTTz7JrWdOVtzndVZWls6ePavKlSuX6HUIigBowIAB2rRpE+uNOEjTpk2VmpqqzMxMzZo1S/369dP3339PWFRODhw4oIEDB2rx4sUKCAhwdTle6/bbb8//fevWrRUTE6N69erpq6++4n/TARcYO3aspk+frmXLlvGzz8lOnjypPn36aMqUKapZs6ary6nwbDabQkND9dFHH8nX11ft27fXwYMH9dZbbxEUOdGyZcs0ZswYvf/++4qJidHOnTs1cOBAjR49WsOHD3d1eSglgqJSqlmzpnx9fZWRkVFgf0ZGhsLDw11UFVB2Tz/9tL755hv98MMPqlOnjqvL8Ur+/v5q1KiRJKl9+/ZavXq13nnnHX344Ycursw7rF27VocPH1Z0dHT+vry8PP3www+aOHGisrOz5evr68IKvVO1atXUpEkT7dy509WloJTKMpYJDw9n7FPOrmRMOW7cOI0dO1ZLlixh8f5yUNq+2LVrl/bu3asePXrk77PZbJIkPz8/paWlqWHDho4t2kuV5e9FRESEKlWqVOCzvnnz5kpPT1dOTo78/f0dWrM3Kks/DB8+XH369NFjjz0mSbrmmmt0+vRpPfHEExo2bJh8fFj1xhmK+7wOCgoq8WwiiaeelZq/v7/at2+v5OTk/H02m03JycmsOQKPYhiGnn76ac2ZM0dLly7V1Vdf7eqSKgybzabs7GxXl+E1br75Zm3cuFGpqan5W4cOHdS7d2+lpqYSEjnIqVOntGvXLkVERLi6FJRSWcYysbGxBY6XpMWLFzP2uQJlHVO++eabGj16tJKSktShQwdnlOr1StsXzZo1K/S5c9ddd+U/ZSgqKsqZ5XuVsvy96Ny5s3bu3Jkf1knS9u3bFRERQUhURmXphzNnzhQKgy6Mwcx1mOEM5fZ5Xaqlr2EYhmFMnz7dsFqtxtSpU40tW7YYTzzxhFGtWjUjPT3d1aV5jZMnTxrr16831q9fb0gyxo8fb6xfv97Yt2+fq0vzGk899ZQRHBxsLFu2zDh06FD+dubMGVeX5lWGDBlifP/998aePXuMX375xRgyZIhhsViMRYsWubo0r8ZTz8rfc889ZyxbtszYs2ePsXz5ciMuLs6oWbOmcfjwYVeXhjL4s7FMnz59jCFDhuQfv3z5csPPz88YN26csXXrViMhIcGoVKmSsXHjRle9Ba9Q2n4YO3as4e/vb8yaNavAZ/fJkydd9Ra8Rmn74o946ln5KW1f7N+/3wgMDDSefvppIy0tzfjmm2+M0NBQ47XXXnPVW/AKpe2HhIQEIzAw0Pjyyy+N3bt3G4sWLTIaNmxoPPDAA656C17hz/5dPGTIEKNPnz75x+/evduoUqWK8cILLxhbt241Jk2aZPj6+hpJSUmlui5BURm99957Rt26dQ1/f3+jY8eOxsqVK11dklf57rvvDEmFtn79+rm6NK9RVPtKMj777DNXl+ZV/u///s+oV6+e4e/vb9SqVcu4+eabCYmcgKCo/PXq1cuIiIgw/P39jdq1axu9evUydu7c6eqycAUuN5bp2rVroc/cr776ymjSpInh7+9vtGzZ0pg/f76TK/ZOpemHevXqFfnZnZCQ4PzCvVBp/05ciqCofJW2L1asWGHExMQYVqvVaNCggfH6668bubm5Tq7a+5SmH86fP2+MHDnSaNiwoREQEGBERUUZ//jHP4zjx487v3Av8mf/Lu7Xr5/RtWvXQue0bdvW8Pf3Nxo0aFCmf99ZDIN5YAAAAAAAAGCNIgAAAAAAANgRFAEAAAAAAEASQREAAAAAAADsCIoAAAAAAAAgiaAIAAAAAAAAdgRFAAAAAAAAkERQBAAAAAAAADuCIgAAAAAAAEgiKAJwiUceeUQ9e/Z02fX79OmjMWPGuOz65WHq1KmqVq1aiY5NSkpS27ZtZbPZHFsUAAAAAJQQQRFQQVgslstuI0eO1DvvvKOpU6e6pL4NGzZowYIFevbZZ11yfVe47bbbVKlSJf3nP/9xdSkAAAAAIEnyc3UBAJzj0KFD+b+fMWOGRowYobS0tPx9VatWVdWqVV1RmiTpvffe0/333+/SGlzhkUce0bvvvqs+ffq4uhQAAAAAYEYRUFGEh4fnb8HBwbJYLAX2Va1atdCtZ3/5y1/0zDPPaNCgQQoJCVFYWJimTJmi06dPq3///goMDFSjRo307bffFrjWpk2bdPvtt6tq1aoKCwtTnz59dPTo0WJry8vL06xZs9SjR48C+99//301btxYAQEBCgsL03333Zf/PZvNpsTERF199dWqXLmy2rRpo1mzZhU4f/PmzbrzzjsVFBSkwMBAdenSRbt27co//9VXX1WdOnVktVrVtm1bJSUl5Z+7d+9eWSwWzZ49WzfeeKOqVKmiNm3aKCUlpcA1pk6dqrp166pKlSq655579Pvvvxf4/oYNG3TjjTcqMDBQQUFBat++vdasWZP//R49emjNmjX5dQEAAACAKxEUAbisadOmqWbNmlq1apWeeeYZPfXUU7r//vvVqVMnrVu3Trfeeqv69OmjM2fOSJJOnDihm266Se3atdOaNWuUlJSkjIwMPfDAA8Ve45dfflFmZqY6dOiQv2/NmjV69tln9eqrryotLU1JSUm64YYb8r+fmJiozz//XJMnT9bmzZs1ePBg/e1vf9P3338vSTp48KBuuOEGWa1WLV26VGvXrtX//d//KTc3V5L0zjvv6O2339a4ceP0yy+/qFu3brrrrru0Y8eOArUNGzZMzz//vFJTU9WkSRM99NBD+a/x888/69FHH9XTTz+t1NRU3XjjjXrttdcKnN+7d2/VqVNHq1ev1tq1azVkyBBVqlQp//t169ZVWFiYfvzxx7J0DwAAAACUK4thGIariwDgXFOnTtWgQYN04sSJAvsfeeQRnThxQnPnzpVkzijKy8vLDzHy8vIUHByse++9V59//rkkKT09XREREUpJSdF1112n1157TT/++KMWLlyY/7q//vqroqKilJaWpiZNmhSqZ+7cubrvvvt0/vx5WSwWSdLs2bPVv39//frrrwoMDCxwfHZ2tqpXr64lS5YoNjY2f/9jjz2mM2fO6IsvvtDLL7+s6dOnKy0trUAwc0Ht2rU1YMAAvfzyy/n7OnbsqGuvvVaTJk3S3r17dfXVV+vjjz/Wo48+KknasmWLWrZsqa1bt6pZs2Z6+OGHlZmZqfnz5+e/xoMPPqikpKT8tg0KCtJ7772nfv36Fdsf0dHRuvvuu5WQkFDsMQAAAADgDMwoAnBZrVu3zv+9r6+vatSooWuuuSZ/X1hYmCTp8OHDksxbrb777rv8NY+qVq2qZs2aSVKxt1edPXtWVqs1PySSpFtuuUX16tVTgwYN1KdPH/3nP//Jn7W0c+dOnTlzRrfcckuB63z++ef510hNTVWXLl2KDImysrL022+/qXPnzgX2d+7cWVu3bi32/UdERBR4r1u3blVMTEyB4y8NriQpPj5ejz32mOLi4jR27Ngi26By5cr57w0AAAAAXInFrAFc1h+DFovFUmDfhXDnwiPeT506pR49euiNN94o9FoXgpY/qlmzps6cOaOcnBz5+/tLkgIDA7Vu3TotW7ZMixYt0ogRIzRy5EitXr1ap06dkiTNnz9ftWvXLvBaVqtVkhm+lIfLvdeSGDlypB5++GHNnz9f3377rRISEjR9+nTdc889+cccO3ZMtWrVKpd6AQAAAOBKMKMIQLmKjo7W5s2bVb9+fTVq1KjAdtVVVxV5Ttu2bSWZt3Zdys/PT3FxcXrzzTf1yy+/aO/evVq6dKlatGghq9Wq/fv3F7pGVFSUJHMm0I8//qjz588Xul5QUJAiIyO1fPnyAvuXL1+uFi1alPi9Nm/eXD///HOBfStXrix0XJMmTTR48GAtWrRI9957rz777LP87507d067du1Su3btSnxdAAAAAHAUgiIA5WrAgAE6duyYHnroIa1evVq7du3SwoUL1b9/f+Xl5RV5Tq1atRQdHa2ffvopf98333yjd999V6mpqdq3b58+//xz2Ww2NW3aVIGBgXr++ec1ePBgTZs2Tbt27dK6dev03nvvadq0aZKkp59+WllZWXrwwQe1Zs0a7dixQ//617+UlpYmSXrhhRf0xhtvaMaMGUpLS9OQIUOUmpqqgQMHlvi9Pvvss0pKStK4ceO0Y8cOTZw4scCT086ePaunn35ay5Yt0759+7R8+XKtXr1azZs3zz9m5cqVslqthW5ZAwAAAABXICgCUK4uzNTJy8vTrbfeqmuuuUaDBg1StWrV5ONT/I+cxx57TP/5z3/yv65WrZpmz56tm266Sc2bN9fkyZP15ZdfqmXLlpKk0aNHa/jw4UpMTFTz5s112223af78+br66qslSTVq1NDSpUt16tQpde3aVe3bt9eUKVPybyV79tlnFR8fr+eee07XXHONkpKSNG/ePDVu3LjE7/W6667TlClT9M4776hNmzZatGiRXnnllfzv+/r66vfff1ffvn3VpEkTPfDAA7r99ts1atSo/GO+/PJL9e7dW1WqVCnxdQEAAADAUXjqGQC3cPbsWTVt2lQzZsyoMLNrjh49qqZNm2rNmjX5ARcAAAAAuBIzigC4hcqVK+vzzz/X0aNHXV2K0+zdu1fvv/8+IREAAAAAt8GMIgAAAAAAAEhiRhEAAAAAAADsCIoAAAAAAAAgiaAIAAAAAAAAdgRFAAAAAAAAkERQBAAAAAAAADuCIgAAAAAAAEgiKAIAAAAAAIAdQREAAAAAAAAkERQBAAAAAADAjqAIAAAAAAAAkgiKAAAAAAAAYEdQBAAAAAAAAEkERQAAAAAAALAjKAIAAAAAAIAkgiIAAAAAAADYERQBAAAAAABAEkERAAAAAAAA7AiKAAAAAAAAIImgCAAAAAAAAHYERQAAAAAAAJBEUAQAAAAAAAA7giIAAAAAAABIcnJQ9MMPP6hHjx6KjIyUxWLR3Llz//ScZcuWKTo6WlarVY0aNdLUqVMdXicAAIC7YPwEAACcyalB0enTp9WmTRtNmjSpRMfv2bNH3bt314033qjU1FQNGjRIjz32mBYuXOjgSgEAANwD4ycAAOBMFsMwDJdc2GLRnDlz1LNnz2KPeemllzR//nxt2rQpf9+DDz6oEydOKCkpyQlVAgAAuA/GTwAAwNH8XF3A5aSkpCguLq7Avm7dumnQoEHFnpOdna3s7Oz8r202m44dO6YaNWrIYrE4qlQAAHCFDMPQyZMnFRkZKR8fllEsK8ZPAABUHI4YP7l1UJSenq6wsLAC+8LCwpSVlaWzZ8+qcuXKhc5JTEzUqFGjnFUiAAAoZwcOHFCdOnVcXYbHYvwEAEDFU57jJ7cOispi6NChio+Pz/86MzNTdevW1YEDBxQUFOTCygAAwOVkZWUpKipKgYGBri6lwmH8BACAZ3LE+Mmtg6Lw8HBlZGQU2JeRkaGgoKAi/zdMkqxWq6xWa6H9QUFBDHQAAPAA3Op0ZRg/AQBQ8ZTn+MmtFwCIjY1VcnJygX2LFy9WbGysiyoCAABwb4yfAADAlXBqUHTq1CmlpqYqNTVVkvn41tTUVO3fv1+SOe25b9+++cc/+eST2r17t1588UVt27ZN77//vr766isNHjzYmWUDAAC4DOMnAADgTE4NitasWaN27dqpXbt2kqT4+Hi1a9dOI0aMkCQdOnQof9AjSVdffbXmz5+vxYsXq02bNnr77bf18ccfq1u3bs4sGwAAwGUYPwEAAGeyGIZhuLoIR8rKylJwcLAyMzO5xx4AADfGZ7b7oC8AAPAMjvjMdus1igAAAAAAAOA8BEUAAAAAAACQRFAEAAAAAAAAO4IiAAAAAAAASCIoAgAAAAAAgB1BEQAAAAAAACQRFAEAAAAAAMCOoAgAAAAAAACSCIoAAAAAAABgR1AEAAAAAAAASQRFAAAAAAAAsCMoAgAAAAAAgCSCIgAAAAAAANgRFAEAAAAAAEASQREAAAAAAADsCIoAAAAAAAAgiaAIAAAAAAAAdgRFAAAAAAAAkERQBAAAAAAAADuCIgAAAAAAAEgiKAIAAAAAAIAdQREAAAAAAAAkERQBAAAAAADAjqAIAAAAAAAAkgiKAAAAAAAAYEdQBAAAAAAAAEkERQAAAAAAALAjKAIAAAAAAIAkgiIAAAAAAADYERQBAAAAAABAEkERAAAAAAAA7AiKAAAAAAAAIImgCAAAAAAAAHYERQAAAAAAAJBEUAQAAAAAAAA7giIAAAAAAABIIigCAAAAAACAHUERAAAAAAAAJBEUAQAAAAAAwI6gCAAAAAAAAJIIigAAAAAAAGBHUAQAAAAAAABJBEUAAAAAAACwIygCAAAAAACAJIIiAAAAAAAA2BEUAQAAAAAAQBJBEQAAAAAAAOwIigAAAAAAACCJoAgAAAAAAAB2BEUAAAAAAACQRFAEAAAAAAAAO4IiAAAAAAAASCIoAgAAAAAAgB1BEQAAAAAAACQRFAEAAAAAAMCOoAgAAAAAAACSCIoAAAAAAABgR1AEAAAAAAAASQRFAAAAAAAAsHN6UDRp0iTVr19fAQEBiomJ0apVqy57/IQJE9S0aVNVrlxZUVFRGjx4sM6dO+ekagEAAFyP8RMAAHAWpwZFM2bMUHx8vBISErRu3Tq1adNG3bp10+HDh4s8/osvvtCQIUOUkJCgrVu36pNPPtGMGTP08ssvO7NsAAAAl2H8BAAAnMmpQdH48eP1+OOPq3///mrRooUmT56sKlWq6NNPPy3y+BUrVqhz5856+OGHVb9+fd1666166KGH/vR/0QAAALwF4ycAAOBMTguKcnJytHbtWsXFxV28uI+P4uLilJKSUuQ5nTp10tq1a/MHNrt379aCBQt0xx13FHud7OxsZWVlFdgAAAA8EeMnAADgbH7OutDRo0eVl5ensLCwAvvDwsK0bdu2Is95+OGHdfToUV1//fUyDEO5ubl68sknLzt1OjExUaNGjSrX2gEAAFyB8RMAAHA2t37q2bJlyzRmzBi9//77WrdunWbPnq358+dr9OjRxZ4zdOhQZWZm5m8HDhxwYsUAAACuxfgJAABcCafNKKpZs6Z8fX2VkZFRYH9GRobCw8OLPGf48OHq06ePHnvsMUnSNddco9OnT+uJJ57QsGHD5ONTOOeyWq2yWq3l/wYAAACcjPETAABwNqfNKPL391f79u2VnJycv89msyk5OVmxsbFFnnPmzJlCgxlfX19JkmEYjisWAADADTB+AgAAzua0GUWSFB8fr379+qlDhw7q2LGjJkyYoNOnT6t///6SpL59+6p27dpKTEyUJPXo0UPjx49Xu3btFBMTo507d2r48OHq0aNH/oAHAADAmzF+AgAAzuTUoKhXr146cuSIRowYofT0dLVt21ZJSUn5CzTu37+/wP+AvfLKK7JYLHrllVd08OBB1apVSz169NDrr7/uzLIBAABchvETAABwJovh5XOQs7KyFBwcrMzMTAUFBbm6HAAAUAw+s90HfQEAgGdwxGe2Wz/1DAAAAAAAAM5DUAQAAAAAAABJBEUAAAAAAACwIygCAAAAAACAJIIiAAAAAAAA2BEUAQAAAAAAQBJBEQAAAAAAAOwIigAAAAAAACCJoAgAAAAAAAB2BEUAAAAAAACQRFAEAAAAAAAAO4IiAAAAAAAASCIoAgAAAAAAgB1BEQAAAAAAACQRFAEAAAAAAMCOoAgAAAAAAACSCIoAAAAAAABgR1AEAAAAAAAASQRFAAAAAAAAsCMoAgAAAAAAgCSCIgAAAAAAANgRFAEAAAAAAEASQREAAAAAAADsCIoAAAAAAAAgiaAIAAAAAAAAdgRFAAAAAAAAkERQBAAAAAAAADuCIgAAAAAAAEgiKAIAAAAAAIAdQREAAAAAAAAkERQBAAAAAADAjqAIAAAAAAAAkgiKAAAAAAAAYEdQBAAAAAAAAEkERQAAAAAAALAjKAIAAAAAAIAkgiIAAAAAAADYERQBAAAAAABAEkERAAAAAAAA7AiKAAAAAAAAIImgCAAAAAAAAHYERQAAAAAAAJBEUAQAAAAAAAA7giIAAAAAAABIIigCAAAAAACAHUERAAAAAAAAJBEUAQAAAAAAwI6gCAAAAAAAAJIIigAAAAAAAGBHUAQAAAAAAABJBEUAAAAAAACwIygCAAAAAACAJIIiAAAAAAAA2BEUAQAAAAAAQBJBEQAAAAAAAOwIigAAAAAAACCJoAgAAAAAAAB2BEUAAAAAAACQRFAEAAAAAAAAO6cHRZMmTVL9+vUVEBCgmJgYrVq16rLHnzhxQgMGDFBERISsVquaNGmiBQsWOKlaAAAA12P8BAAAnMXPmRebMWOG4uPjNXnyZMXExGjChAnq1q2b0tLSFBoaWuj4nJwc3XLLLQoNDdWsWbNUu3Zt7du3T9WqVXNm2QAAAC7D+AkAADiTxTAMw1kXi4mJ0bXXXquJEydKkmw2m6KiovTMM89oyJAhhY6fPHmy3nrrLW3btk2VKlUq0zWzsrIUHByszMxMBQUFXVH9AADAcfjMLhrjJwAAUBxHfGY77daznJwcrV27VnFxcRcv7uOjuLg4paSkFHnOvHnzFBsbqwEDBigsLEytWrXSmDFjlJeXV+x1srOzlZWVVWADAADwRIyfAACAszktKDp69Kjy8vIUFhZWYH9YWJjS09OLPGf37t2aNWuW8vLytGDBAg0fPlxvv/22XnvttWKvk5iYqODg4PwtKiqqXN8HAACAszB+AgAAzubWTz2z2WwKDQ3VRx99pPbt26tXr14aNmyYJk+eXOw5Q4cOVWZmZv524MABJ1YMAADgWoyfAADAlXDaYtY1a9aUr6+vMjIyCuzPyMhQeHh4kedERESoUqVK8vX1zd/XvHlzpaenKycnR/7+/oXOsVqtslqt5Vs8AACACzB+AgAAzua0GUX+/v5q3769kpOT8/fZbDYlJycrNja2yHM6d+6snTt3ymaz5e/bvn27IiIiihzkAAAAeBPGTwAAwNmceutZfHy8pkyZomnTpmnr1q166qmndPr0afXv31+S1LdvXw0dOjT/+KeeekrHjh3TwIEDtX37ds2fP19jxozRgAEDnFk2AACAyzB+AgAAzuS0W88kqVevXjpy5IhGjBih9PR0tW3bVklJSfkLNO7fv18+Phezq6ioKC1cuFCDBw9W69atVbt2bQ0cOFAvvfSSM8sGAABwGcZPAADAmSyGYRiuLsKRsrKyFBwcrMzMTAUFBbm6HAAAUAw+s90HfQEAgGdwxGe2Wz/1DAAAAAAAAM5DUAQAAAAAAABJBEUAAAAAAACwIygCAAAAAACAJIIiAAAAAAAA2BEUAQAAAAAAQBJBEQAAAAAAAOwIigAAAAAAACCJoAgAAAAAAAB2BEUAAAAAAACQRFAEAAAAAAAAO4IiAAAAAAAASCIoAgAAAAAAgB1BEQAAAAAAACQRFAEAAAAAAMCOoAgAAAAAAACSCIoAAAAAAABgR1AEAAAAAAAASQRFAAAAAAAAsCMoAgAAAAAAgCSCIgAAAAAAANgRFAEAAAAAAEASQREAAAAAAADsCIoAAAAAAAAgiaAIAAAAAAAAdgRFAAAAAAAAkERQBAAAAAAAADuCIgAAAAAAAEgiKAIAAAAAAIAdQREAAAAAAAAkERQBAAAAAADAjqAIAAAAAAAAkgiKAAAAAAAAYEdQBAAAAAAAAEkERQAAAAAAALAjKAIAAAAAAIAkgiIAAAAAAADYERQBAAAAAABAEkERAAAAAAAA7AiKAAAAAAAAIImgCAAAAAAAAHYERQAAAAAAAJBEUAQAAAAAAAA7giIAAAAAAABIIigCAAAAAACAHUERAAAAAAAAJBEUAQAAAAAAwI6gCAAAAAAAAJIIigAAAAAAAGBHUAQAAAAAAABJBEUAAAAAAACwIygCAAAAAACAJIIiAAAAAAAA2BEUAQAAAAAAQBJBEQAAAAAAAOwIigAAAAAAACCJoAgAAAAAAAB2BEUAAAAAAACQRFAEAAAAAAAAO6cHRZMmTVL9+vUVEBCgmJgYrVq1qkTnTZ8+XRaLRT179nRsgQAAAG6G8RMAAHAWpwZFM2bMUHx8vBISErRu3Tq1adNG3bp10+HDhy973t69e/X888+rS5cuTqoUAADAPTB+AgAAzuTUoGj8+PF6/PHH1b9/f7Vo0UKTJ09WlSpV9OmnnxZ7Tl5ennr37q1Ro0apQYMGTqwWAADA9Rg/AQAAZ3JaUJSTk6O1a9cqLi7u4sV9fBQXF6eUlJRiz3v11VcVGhqqRx99tETXyc7OVlZWVoENAADAEzF+AgAAzua0oOjo0aPKy8tTWFhYgf1hYWFKT08v8pyffvpJn3zyiaZMmVLi6yQmJio4ODh/i4qKuqK6AQAAXIXxEwAAcDa3ferZyZMn1adPH02ZMkU1a9Ys8XlDhw5VZmZm/nbgwAEHVgkAAOA+GD8BAIAr5eesC9WsWVO+vr7KyMgosD8jI0Ph4eGFjt+1a5f27t2rHj165O+z2WySJD8/P6Wlpalhw4aFzrNarbJareVcPQAAgPMxfgIAAM7mtBlF/v7+at++vZKTk/P32Ww2JScnKzY2ttDxzZo108aNG5Wampq/3XXXXbrxxhuVmprKlGgAAOD1GD8BAABnc9qMIkmKj49Xv3791KFDB3Xs2FETJkzQ6dOn1b9/f0lS3759Vbt2bSUmJiogIECtWrUqcH61atUkqdB+AAAAb8X4CQAAOJNTg6JevXrpyJEjGjFihNLT09W2bVslJSXlL9C4f/9++fi47bJJAAAATsf4CQAAOJPFMAzD1UU4UlZWloKDg5WZmamgoCBXlwMAAIrBZ7b7oC8AAPAMjvjM5r+fAAAAAAAAIImgCAAAAAAAAHYERQAAAAAAAJBEUAQAAAAAAAA7giIAAAAAAABIIigCAAAAAACAHUERAAAAAAAAJBEUAQAAAAAAwI6gCAAAAAAAAJIIigAAAAAAAGBHUAQAAAAAAABJBEUAAAAAAACwIygCAAAAAACAJIIiAAAAAAAA2BEUAQAAAAAAQBJBEQAAAAAAAOwIigAAAAAAACCJoAgAAAAAAAB2BEUAAAAAAACQRFAEAAAAAAAAO4IiAAAAAAAASCIoAgAAAAAAgB1BEQAAAAAAACQRFAEAAAAAAMCOoAgAAAAAAACSCIoAAAAAAABgR1AEAAAAAAAASQRFAAAAAAAAsCMoAgAAAAAAgCSCIgAAAAAAANgRFAEAAAAAAEASQREAAAAAAADsCIoAAAAAAAAgiaAIAAAAAAAAdgRFAAAAAAAAkERQBAAAAAAAADuCIgAAAAAAAEgiKAIAAAAAAIAdQREAAAAAAAAkERQBAAAAAADAjqAIAAAAAAAAkgiKAAAAAAAAYEdQBAAAAAAAAEkERQAAAAAAALAjKAIAAAAAAIAkgiIAAAAAAADYERQBAAAAAABAEkERAAAAAAAA7AiKAAAAAAAAIImgCAAAAAAAAHYERQAAAAAAAJBEUAQAAAAAAAA7giIAAAAAAABIIigCAAAAAACAHUERAAAAAAAAJBEUAQAAAAAAwI6gCAAAAAAAAJIIigAAAAAAAGBHUAQAAAAAAABJLgiKJk2apPr16ysgIEAxMTFatWpVscdOmTJFXbp0UUhIiEJCQhQXF3fZ4wEAALwR4ycAAOAsTg2KZsyYofj4eCUkJGjdunVq06aNunXrpsOHDxd5/LJly/TQQw/pu+++U0pKiqKionTrrbfq4MGDziwbAADAZRg/AQAAZ7IYhmE462IxMTG69tprNXHiREmSzWZTVFSUnnnmGQ0ZMuRPz8/Ly1NISIgmTpyovn37luiaWVlZCg4OVmZmpoKCgq6ofgAA4Dh8ZheN8RMAACiOIz6znTajKCcnR2vXrlVcXNzFi/v4KC4uTikpKSV6jTNnzuj8+fOqXr16scdkZ2crKyurwAYAAOCJGD8BAABnc1pQdPToUeXl5SksLKzA/rCwMKWnp5foNV566SVFRkYWGCz9UWJiooKDg/O3qKioK6obAADAVRg/AQAAZ/OYp56NHTtW06dP15w5cxQQEFDscUOHDlVmZmb+duDAASdWCQAA4D4YPwEAgNLyc9aFatasKV9fX2VkZBTYn5GRofDw8MueO27cOI0dO1ZLlixR69atL3us1WqV1Wq94noBAABcjfETAABwNqfNKPL391f79u2VnJycv89msyk5OVmxsbHFnvfmm29q9OjRSkpKUocOHZxRKgAAgFtg/AQAAJzNaTOKJCk+Pl79+vVThw4d1LFjR02YMEGnT59W//79JUl9+/ZV7dq1lZiYKEl64403NGLECH3xxReqX79+/r34VatWVdWqVZ1ZOgAAgEswfgIAAM7k1KCoV69eOnLkiEaMGKH09HS1bdtWSUlJ+Qs07t+/Xz4+Fyc5ffDBB8rJydF9991X4HUSEhI0cuRIZ5YOAADgEoyfAACAM1kMwzBcXYQjZWVlKTg4WJmZmQoKCnJ1OQAAoBh8ZrsP+gIAAM/giM9sj3nqGQAAAAAAAByLoAgAAAAAAACSCIoAAAAAAABgR1AEAAAAAAAASQRFAAAAAAAAsCMoAgAAAAAAgCSCIgAAAAAAANgRFAEAAAAAAEASQREAAAAAAADsCIoAAAAAAAAgiaAIAAAAAAAAdgRFAAAAAAAAkERQBAAAAAAAADuCIgAAAAAAAEgiKAIAAAAAAIAdQREAAAAAAAAkERQBAAAAAADAjqAIAAAAAAAAkgiKAAAAAAAAYEdQBAAAAAAAAEkERQAAAAAAALAjKAIAAAAAAIAkgiIAAAAAAADYERQBAAAAAABAEkERAAAAAAAA7AiKAAAAAAAAIImgCAAAAAAAAHYERQAAAAAAAJBEUAQAAAAAAAA7giIAAAAAAABIIigCAAAAAACAHUERAAAAAAAAJBEUAQAAAAAAwI6gCAAAAAAAAJIIigAAAAAAAGBHUAQAAAAAAABJBEUAAAAAAACwIygCAAAAAACAJIIiAAAAAAAA2BEUAQAAAAAAQBJBEQAAAAAAAOwIigAAAAAAACCJoAgAAAAAAAB2BEUAAAAAAACQRFAEAAAAAAAAO4IiAAAAAAAASCIoAgAAAAAAgB1BEQAAAAAAACQRFAEAAAAAAMCOoAgAAAAAAACSCIoAAAAAAABgR1AEAAAAAAAASQRFAAAAAAAAsCMoAgAAAAAAgCSCIgAAAAAAANgRFAEAAAAAAEASQREAAAAAAADsCIoAAAAAAAAgiaAIAAAAAAAAdk4PiiZNmqT69esrICBAMTExWrVq1WWPnzlzppo1a6aAgABdc801WrBggZMqBQAAcA+MnwAAgLM4NSiaMWOG4uPjlZCQoHXr1qlNmzbq1q2bDh8+XOTxK1as0EMPPaRHH31U69evV8+ePdWzZ09t2rTJmWUDAAC4DOMnAADgTBbDMAxnXSwmJkbXXnutJk6cKEmy2WyKiorSM888oyFDhhQ6vlevXjp9+rS++eab/H3XXXed2rZtq8mTJ5fomllZWQoODlZmZqaCgoLK540AAIByx2d20Rg/AQCA4jjiM9uvXF6lBHJycrR27VoNHTo0f5+Pj4/i4uKUkpJS5DkpKSmKj48vsK9bt26aO3dusdfJzs5WdnZ2/teZmZmSzMYDAADu68JntRP/D8vtMX4CAACX44jxk9OCoqNHjyovL09hYWEF9oeFhWnbtm1FnpOenl7k8enp6cVeJzExUaNGjSq0PyoqqgxVAwAAZ/v9998VHBzs6jLcAuMnAABQEuU5fnJaUOQsQ4cOLfC/aCdOnFC9evW0f/9+Bp0ulpWVpaioKB04cIBp7C5EP7gP+sI90A/uIzMzU3Xr1lX16tVdXUqFw/jJPfHzyX3QF+6DvnAP9IP7cMT4yWlBUc2aNeXr66uMjIwC+zMyMhQeHl7kOeHh4aU6XpKsVqusVmuh/cHBwfwBdhNBQUH0hRugH9wHfeEe6Af34ePj9Ieyui3GT5D4+eRO6Av3QV+4B/rBfZTn+MlpIzF/f3+1b99eycnJ+ftsNpuSk5MVGxtb5DmxsbEFjpekxYsXF3s8AACAN2H8BAAAnM2pt57Fx8erX79+6tChgzp27KgJEybo9OnT6t+/vySpb9++ql27thITEyVJAwcOVNeuXfX222+re/fumj59utasWaOPPvrImWUDAAC4DOMnAADgTE4Ninr16qUjR45oxIgRSk9PV9u2bZWUlJS/4OL+/fsLTJfq1KmTvvjiC73yyit6+eWX1bhxY82dO1etWrUq8TWtVqsSEhKKnE4N56Iv3AP94D7oC/dAP7gP+qJojJ8qLvrBfdAX7oO+cA/0g/twRF9YDJ5BCwAAAAAAADlxjSIAAAAAAAC4N4IiAAAAAAAASCIoAgAAAAAAgB1BEQAAAAAAACR5SVA0adIk1a9fXwEBAYqJidGqVasue/zMmTPVrFkzBQQE6JprrtGCBQucVKl3K00/TJkyRV26dFFISIhCQkIUFxf3p/2Gkivt34kLpk+fLovFop49ezq2wAqktH1x4sQJDRgwQBEREbJarWrSpAk/o8pBafthwoQJatq0qSpXrqyoqCgNHjxY586dc1K13uuHH35Qjx49FBkZKYvForlz5/7pOcuWLVN0dLSsVqsaNWqkqVOnOrzOioLxk3tg/OQ+GD+5D8ZP7oHxk+u5bOxkeLjp06cb/v7+xqeffmps3rzZePzxx41q1aoZGRkZRR6/fPlyw9fX13jzzTeNLVu2GK+88opRqVIlY+PGjU6u3LuUth8efvhhY9KkScb69euNrVu3Go888ogRHBxs/Prrr06u3PuUti8u2LNnj1G7dm2jS5cuxt133+2cYr1cafsiOzvb6NChg3HHHXcYP/30k7Fnzx5j2bJlRmpqqpMr9y6l7Yf//Oc/htVqNf7zn/8Ye/bsMRYuXGhEREQYgwcPdnLl3mfBggXGsGHDjNmzZxuSjDlz5lz2+N27dxtVqlQx4uPjjS1bthjvvfee4evrayQlJTmnYC/G+Mk9MH5yH4yf3AfjJ/fA+Mk9uGrs5PFBUceOHY0BAwbkf52Xl2dERkYaiYmJRR7/wAMPGN27dy+wLyYmxvj73//u0Dq9XWn74Y9yc3ONwMBAY9q0aY4qscIoS1/k5uYanTp1Mj7++GOjX79+DHTKSWn74oMPPjAaNGhg5OTkOKvECqG0/TBgwADjpptuKrAvPj7e6Ny5s0PrrGhKMth58cUXjZYtWxbY16tXL6Nbt24OrKxiYPzkHhg/uQ/GT+6D8ZN7YPzkfpw5dvLoW89ycnK0du1axcXF5e/z8fFRXFycUlJSijwnJSWlwPGS1K1bt2KPx58rSz/80ZkzZ3T+/HlVr17dUWVWCGXti1dffVWhoaF69NFHnVFmhVCWvpg3b55iY2M1YMAAhYWFqVWrVhozZozy8vKcVbbXKUs/dOrUSWvXrs2fXr17924tWLBAd9xxh1NqxkV8ZjsG4yf3wPjJfTB+ch+Mn9wD4yfPVV6f137lWZSzHT16VHl5eQoLCyuwPywsTNu2bSvynPT09CKPT09Pd1id3q4s/fBHL730kiIjIwv9oUbplKUvfvrpJ33yySdKTU11QoUVR1n6Yvfu3Vq6dKl69+6tBQsWaOfOnfrHP/6h8+fPKyEhwRlle52y9MPDDz+so0eP6vrrr5dhGMrNzdWTTz6pl19+2Rkl4xLFfWZnZWXp7Nmzqly5sosq82yMn9wD4yf3wfjJfTB+cg+MnzxXeY2dPHpGEbzD2LFjNX36dM2ZM0cBAQGuLqdCOXnypPr06aMpU6aoZs2ari6nwrPZbAoNDdVHH32k9u3bq1evXho2bJgmT57s6tIqlGXLlmnMmDF6//33tW7dOs2ePVvz58/X6NGjXV0aAORj/OQ6jJ/cC+Mn98D4ybt49IyimjVrytfXVxkZGQX2Z2RkKDw8vMhzwsPDS3U8/lxZ+uGCcePGaezYsVqyZIlat27tyDIrhNL2xa5du7R371716NEjf5/NZpMk+fn5KS0tTQ0bNnRs0V6qLH8vIiIiVKlSJfn6+ubva968udLT05WTkyN/f3+H1uyNytIPw4cPV58+ffTYY49Jkq655hqdPn1aTzzxhIYNGyYfH/6PxVmK+8wOCgpiNtEVYPzkHhg/uQ/GT+6D8ZN7YPzkucpr7OTRveXv76/27dsrOTk5f5/NZlNycrJiY2OLPCc2NrbA8ZK0ePHiYo/HnytLP0jSm2++qdGjRyspKUkdOnRwRqler7R90axZM23cuFGpqan521133aUbb7xRqampioqKcmb5XqUsfy86d+6snTt35g82JWn79u2KiIhgkFNGZemHM2fOFBrMXBh8musIwln4zHYMxk/ugfGT+2D85D4YP7kHxk+eq9w+r0u19LUbmj59umG1Wo2pU6caW7ZsMZ544gmjWrVqRnp6umEYhtGnTx9jyJAh+ccvX77c8PPzM8aNG2ds3brVSEhI4PGu5aC0/TB27FjD39/fmDVrlnHo0KH87eTJk656C16jtH3xRzy1o/yUti/2799vBAYGGk8//bSRlpZmfPPNN0ZoaKjx2muvueoteIXS9kNCQoIRGBhofPnll8bu3buNRYsWGQ0bNjQeeOABV70Fr3Hy5Elj/fr1xvr16w1Jxvjx443169cb+/btMwzDMIYMGWL06dMn//gLj3h94YUXjK1btxqTJk0q0yNeURjjJ/fA+Ml9MH5yH4yf3APjJ/fgqrGTxwdFhmEY7733nlG3bl3D39/f6Nixo7Fy5cr873Xt2tXo169fgeO/+uoro0mTJoa/v7/RsmVLY/78+U6u2DuVph/q1atnSCq0JSQkOL9wL1TavxOXYqBTvkrbFytWrDBiYmIMq9VqNGjQwHj99deN3NxcJ1ftfUrTD+fPnzdGjhxpNGzY0AgICDCioqKMf/zjH8bx48edX7iX+e6774r82X+h/fv162d07dq10Dlt27Y1/P39jQYNGhifffaZ0+v2Voyf3APjJ/fB+Ml9MH5yD4yfXM9VYyeLYTAPDAAAAAAAAB6+RhEAAAAAAADKD0ERAAAAAAAAJBEUAQAAAAAAwI6gCAAAAAAAAJIIigAAAAAAAGBHUAQAAAAAAABJBEUAAAAAAACwIygCAAAAAACAJIIiAAAAAAAA2BEUAQAAAAAAQBJBEQAAAAAAAOwIigAAAAAAACBJ+n/XQ9XTa8ET/gAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "============================================================\n", + "WRONG APPROACH: Naive Discretization\n", + "============================================================\n", + "Step 1: x = 7.000\n", + "Step 2: x = 5.500\n", + "Step 3: x = 4.750\n", + "Step 4: x = 4.375\n", + "Step 5: x = 4.188\n", + "\n", + "============================================================\n", + "CORRECT APPROACH: Solving the Differential Equation\n", + "============================================================\n", + "\n", + "We need to solve: dx/dt = Ax + Bu\n", + "This is a first-order linear ODE with constant coefficients.\n", + "\n", + "Step 1: Homogeneous solution (u=0)\n", + " dx/dt = Ax\n", + " Solution: x_h(t) = e^(At) * x(0)\n", + "\n", + "Step 2: Particular solution (variation of parameters)\n", + " Full solution: x(t) = e^(At)*x(0) + ∫[0,t] e^(A(t-τ))*B*u(τ) dτ\n", + "\n", + "Step 3: Apply ZOH (u is constant over [0,Δ])\n", + " x(Δ) = e^(AΔ)*x(0) + (∫[0,Δ] e^(As) ds)*B*u\n", + " x(Δ) = e^(AΔ)*x(0) + A^(-1)*(e^(AΔ) - 1)*B*u\n", + "\n", + "Discretized system:\n", + "A_d = e^(AΔ) = e^(-0.5*1.0) = 0.607\n", + "B_d = (e^(AΔ)-1)/A * B = 1.574\n", + "Step 1: x = 7.639\n", + "Step 2: x = 6.207\n", + "Step 3: x = 5.339\n", + "Step 4: x = 4.812\n", + "Step 5: x = 4.493\n" + ] + }, + { + "ename": "IndexError", + "evalue": "index 1000 is out of bounds for axis 0 with size 1000", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mIndexError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[1], line 148\u001b[0m\n\u001b[1;32m 145\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m i \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mrange\u001b[39m(\u001b[38;5;28mlen\u001b[39m(x_naive_history)):\n\u001b[1;32m 146\u001b[0m \u001b[38;5;66;03m# Get continuous value at discrete time point\u001b[39;00m\n\u001b[1;32m 147\u001b[0m continuous_idx \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mint\u001b[39m(i \u001b[38;5;241m*\u001b[39m \u001b[38;5;28mlen\u001b[39m(t_continuous) \u001b[38;5;241m/\u001b[39m \u001b[38;5;241m5\u001b[39m)\n\u001b[0;32m--> 148\u001b[0m continuous_value \u001b[38;5;241m=\u001b[39m \u001b[43mx_continuous\u001b[49m\u001b[43m[\u001b[49m\u001b[43mcontinuous_idx\u001b[49m\u001b[43m]\u001b[49m\n\u001b[1;32m 150\u001b[0m errors_naive\u001b[38;5;241m.\u001b[39mappend(\u001b[38;5;28mabs\u001b[39m(x_naive_history[i] \u001b[38;5;241m-\u001b[39m continuous_value))\n\u001b[1;32m 151\u001b[0m errors_correct\u001b[38;5;241m.\u001b[39mappend(\u001b[38;5;28mabs\u001b[39m(x_correct_history[i] \u001b[38;5;241m-\u001b[39m continuous_value))\n", + "\u001b[0;31mIndexError\u001b[0m: index 1000 is out of bounds for axis 0 with size 1000" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABIoAAANECAYAAADfVMS/AAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQABAABJREFUeJzs3XV8FEcbwPHfxYlDSICUECRAElyLe3EtWrxQrEgp0hZa3IpTtI5LaXFocShSKFCkuHsJBIgQQvTm/WPfO3K5SwgQQZ4vn/2Qm53dnZ2T3XtuRKeUUgghhBBCCCGEEEKIt55VRhdACCGEEEIIIYQQQrwaJFAkhBBCCCGEEEIIIQAJFAkhhBBCCCGEEEKI/5NAkRBCCCGEEEIIIYQAJFAkhBBCCCGEEEIIIf5PAkVCCCGEEEIIIYQQApBAkRBCCCGEEEIIIYT4PwkUCSGEEEIIIYQQQghAAkVCCCGEEEIIIYQQ4v8kUCREEnQ6HX369MnoYjy3zp07kzt37ufe7tq1a+h0OhYsWJDqZXoZuXPnpmHDhhldDCGEEOKt8Lre/wjxNqpWrRrVqlXL6GKIN5AEisQbZ+XKleh0OtasWWO2rlixYuh0Onbt2mW2LleuXFSoUCE9imji2rVrfPjhh+TLlw8HBweyZ89OlSpVGDFiRLqXJaOcOXOGkSNHcu3atQwth1KKxYsXU6VKFdzd3XF0dKRIkSKMHj2ax48fm+WvVq0aOp0OnU6HlZUVrq6uFCxYkA4dOrBt2zaLx8idO7dxm8RL3bp1ky3f7t27jXmXLFliMU/FihXR6XQULlz4+SsAmDt3rsVg4avyHAkhhLDsVb3/ed7rx8iRI9HpdNy/f9/ievkByTLDD34JF1dXV4oXL87s2bOJj49/of3+/vvvjBw5MnUL+4oJDg7mk08+wd/fn0yZMuHl5UXZsmX5/PPPiYiIyOjiCZEhbDK6AEKktkqVKgGwb98+mjVrZkwPDw/n1KlT2NjYsH//fqpXr25cd/PmTW7evEmbNm3StayXLl2iTJkyZMqUiS5dupA7d27u3LnD0aNHmThxIqNGjUq3svj6+vLkyRNsbW3T7ZgGZ86cYdSoUVSrVu2FWkOlhvj4eNq2bcvKlSupXLkyI0eOxNHRkb179zJq1Ch+/fVXtm/fTrZs2Uy2y5kzJxMmTADg8ePHXLp0idWrV7NkyRJatWrFkiVLzOq0ePHiDBw40KwM3t7eKSqrg4MDy5Yto3379ibp165d46+//sLBweF5Tt3E3LlzyZo1K507dzZJfxWeIyGEEEl7Ve9/5PqRvj744APq168PQFhYGL///jt9+/bl+vXrTJ48+bn39/vvvzNnzpw3Nlj08OFDSpcuTXh4OF26dMHf358HDx7w77//Mm/ePHr16oWzs3NGF1OIdCeBIvHG8fb2Jk+ePOzbt88k/cCBAyilaNmypdk6w2PDTVZ6mT59OhERERw/fhxfX1+Tdffu3UvXsuh0upcKMLzuJk2axMqVKxk0aJDJjVT37t1p1aoVTZs2pXPnzvzxxx8m27m5uZkFbL7++mv69evH3LlzyZ07NxMnTjRZ/84775ht8zzq16/P+vXruX//PlmzZjWmL1u2jGzZspE/f35CQkJeeP/p6fHjxzg5OWV0MYQQ4rX3Ot3/iLRTsmRJk3uMjz/+mHfffZdly5a9UKDoTffTTz9x48YN9u/fb9ayLjw8HDs7uwwqmRAZS7qeiTdSpUqVOHbsGE+ePDGm7d+/n0KFClGvXj0OHjyIXq83WafT6ahYsaLZvtauXUvhwoWxt7enUKFCbN682bhu165dSTbzXrZsGTqdjgMHDiRZzsuXL5MzZ06zIBGAl5eXWdrcuXMpVKgQ9vb2eHt707t3b0JDQ5Pcf2xsLFmyZOHDDz80WxceHo6DgwODBg0CLI9R1LlzZ5ydnbl9+zZNmzbF2dkZT09PBg0aZNaE+cGDB3To0AFXV1fc3d3p1KkTJ06ceOa4RwsWLKBly5YAVK9e3dhcevfu3Sb59u3bR9myZXFwcCBv3rwsWrTIbF+hoaH0798fHx8f7O3t8fPzY+LEiSbPtSVPnjxh8uTJFChQwNg6KKFGjRrRqVMnNm/ezMGDB5PdF4C1tTUzZ84kMDCQ2bNnExYW9sxtnkeTJk2wt7fn119/NUlftmwZrVq1wtra2myb+fPnU6NGDby8vLC3tycwMJB58+aZ5MmdOzenT5/mzz//ND4P1apVS9Fz9Mcff1C5cmWcnJxwcXGhQYMGnD592mT/htfT5cuXqV+/Pi4uLrRr1w6Aixcv0rx5c7Jnz46DgwM5c+akTZs2qV53QgjxJkuv+x+A69ev8/HHH1OwYEEyZcqEh4cHLVu2NOliltJr/MuaMmUKFSpUwMPDg0yZMlGqVCl+++03s3yG8ZeWLl1KwYIFcXBwoFSpUuzZs8ckn6H727lz52jVqhWurq54eHjwySefEBUVZbbfJUuWUKpUKTJlykSWLFlo06YNN2/eNMlTrVo1ChcuzJkzZ6hevTqOjo688847TJo0yWx/s2bNolChQjg6OpI5c2ZKly7NsmXLXqhudDod2bJlw8bGvH3As67dnTt3Zs6cOcb9GBbQAlLvv/++yf6KFCmCTqfj33//Nab98ssv6HQ6zp49a0y7ffs2Xbp0IVu2bMbX188//2xWvujoaEaMGIGfnx/29vb4+Pjw2WefER0dbXaOffr0eeZr1pLLly9jbW1NuXLlzNa5urqa/Ii6d+9eWrZsSa5cuYzl+fTTT03eb4Z6c3Z25saNGzRs2BBnZ2feeecdY12ePHmSGjVq4OTkhK+vr9lzu2DBAnQ6HXv27KFHjx54eHjg6upKx44dU/RDYErrbdu2bVSqVAl3d3ecnZ0pWLAgQ4cOfeb+xdtBAkXijVSpUiViY2P5+++/jWmGXwoqVKhAWFgYp06dMlnn7++Ph4eHyX727dvHxx9/TJs2bZg0aRJRUVE0b96cBw8eANpF38fHh6VLl5qVYenSpeTLl4/y5csnWU5fX19u3rzJzp07n3lOI0eOpHfv3nh7ezN16lSaN2/Od999R+3atYmNjbW4ja2tLc2aNWPt2rXExMSYrFu7di3R0dHPbG4eHx9PnTp18PDwYMqUKVStWpWpU6fy/fffG/Po9XoaNWrE8uXL6dSpE+PGjePOnTt06tTpmedVpUoV+vXrB8DQoUNZvHgxixcvJiAgwJjn0qVLtGjRgvfee4+pU6eSOXNmOnfubHIzExkZSdWqVVmyZAkdO3Zk5syZVKxYkSFDhjBgwIBky7Bv3z5CQkJo27atxRspgI4dOwKwcePGZ54TaMGiDz74gMjISLNfcGNjY7l//77ZkvhGIymOjo40adKE5cuXG9NOnDjB6dOnadu2rcVt5s2bh6+vL0OHDmXq1Kn4+Pjw8ccfG29aAGbMmEHOnDnx9/c3Pg9ffvnlM5+jxYsX06BBA5ydnZk4cSLDhg3jzJkzVKpUyWxMiri4OOrUqYOXlxdTpkyhefPmxMTEUKdOHQ4ePEjfvn2ZM2cO3bt358qVK8kGQoUQQphKr/sfgMOHD/PXX3/Rpk0bZs6cSc+ePdmxYwfVqlUjMjISSNk1PikPHz60eK209OPPN998Q4kSJRg9ejTjx4/HxsaGli1bsmnTJrO8f/75J/3796d9+/aMHj2aBw8eULduXZN6MWjVqhVRUVFMmDCB+vXrM3PmTLp3726SZ9y4cXTs2JH8+fMzbdo0+vfvz44dO6hSpYrZNSwkJIS6detSrFgxpk6dir+/P59//rlJa+UffviBfv36ERgYyIwZMxg1ahTFixc3eU6TExkZaayrK1euMGfOHDZv3mx2T5aSa3ePHj147733jPkNC0DlypVN7m8ePnzI6dOnsbKyYu/evcb0vXv34unpaXzO7969S7ly5di+fTt9+vThm2++wc/Pj65duzJjxgzjdnq9nsaNGzNlyhQaNWrErFmzaNq0KdOnT6d169Zm552S16wlvr6+xMfHG88rOb/++iuRkZH06tWLWbNmUadOHWbNmmW8R0woPj6eevXq4ePjw6RJk8idOzd9+vRhwYIF1K1bl9KlSzNx4kRcXFzo2LEjV69eNdtHnz59OHv2LCNHjqRjx44sXbqUpk2bopRKsowprbfTp0/TsGFDoqOjGT16NFOnTqVx48bs37//mfUg3hJKiDfQ6dOnFaDGjBmjlFIqNjZWOTk5qYULFyqllMqWLZuaM2eOUkqp8PBwZW1trbp162ayD0DZ2dmpS5cuGdNOnDihADVr1ixj2pAhQ5S9vb0KDQ01pt27d0/Z2NioESNGJFvOU6dOqUyZMilAFS9eXH3yySdq7dq16vHjxyb57t27p+zs7FTt2rVVfHy8MX327NkKUD///LMxrVOnTsrX19f4eMuWLQpQGzZsMNln/fr1Vd68eY2Pr169qgA1f/58k30BavTo0SbblihRQpUqVcr4eNWqVQpQM2bMMKbFx8erGjVqmO3Tkl9//VUBateuXWbrfH19FaD27NljUh/29vZq4MCBxrQxY8YoJycndeHCBZPtv/jiC2Vtba1u3LiR5PFnzJihALVmzZok8zx8+FAB6v333zemVa1aVRUqVCjJbdasWaMA9c0335idj6VlwoQJSe5LKaV27dqlAPXrr7+qjRs3Kp1OZzyvwYMHG59PS+WKjIw021+dOnVMXgNKKVWoUCFVtWpVs7xJPUePHj1S7u7uZu+foKAg5ebmZpJueD198cUXJnmPHTtmPC8hhBAvLj3vfyxdVw4cOKAAtWjRImNactd4S0aMGJHkddKwNGjQwGSbxGWJiYlRhQsXVjVq1DA7N0AdOXLEmHb9+nXl4OCgmjVrZlaGxo0bm2z/8ccfK0CdOHFCKaXUtWvXlLW1tRo3bpxJvpMnTyobGxuT9KpVq5rVTXR0tMqePbtq3ry5Ma1JkybJ3lskxXAfZ2np1auX0uv1xrzPc+3u3bu3svSV0fC8njlzRiml1Pr165W9vb1q3Lixat26tTFf0aJFTeq2a9euKkeOHOr+/fsm+2vTpo1yc3MzPpeLFy9WVlZWau/evSb5vv32WwWo/fv3G9NS+pq1JCgoSHl6eipA+fv7q549e6ply5aZ3NcbWHrNT5gwQel0OnX9+nVjmuF+Z/z48ca0kJAQlSlTJqXT6dSKFSuM6efOnVOAyXeG+fPnK0CVKlVKxcTEGNMnTZqkALVu3TpjWtWqVU3u21Jab9OnT1eACg4OTrZ+xNtLWhSJN1JAQAAeHh7GXzpOnDjB48ePjX2PK1SoYIyYHzhwgPj4eIv982vVqkW+fPmMj4sWLYqrqytXrlwxpnXs2JHo6GiTJs6//PILcXFxzxyHplChQhw/fpz27dtz7do1vvnmG5o2bUq2bNn44YcfjPm2b99OTEwM/fv3x8rq6du2W7duuLq6WvzFzKBGjRpkzZqVX375xZgWEhLCtm3bLP4iY0nPnj1NHleuXNmkDjZv3oytrS3dunUzpllZWdG7d+8U7f9ZAgMDqVy5svGxp6cnBQsWNCnDr7/+SuXKlcmcObPJL4+1atUiPj7erFl5Qo8ePQLAxcUlyTyGdeHh4Skut2HwQ8P+Dd599122bdtmtnzwwQcp3nft2rXJkiULK1asQCnFihUrkt0+U6ZMxr/DwsK4f/8+VatW5cqVKy/VvWvbtm2EhobywQcfmNS7tbU17777rsUZdnr16mXy2M3NDYAtW7YYf4UWQgjx/NLz/ifhdSU2NpYHDx7g5+eHu7s7R48efelzWbVqlcVrZeJJJRKXJSQkhLCwMCpXrmyxHOXLl6dUqVLGx7ly5aJJkyZs2bLFrFt94vuYvn37AtoAzwCrV69Gr9fTqlUrk2tg9uzZyZ8/v9k10NnZ2eTe0M7OjrJly5rUq7u7O7du3eLw4cPPrCNLunfvbqyrVatW0bt3b7777juT1tUvcu1OzHBfZri/2rt3L2XKlOG9994ztigKDQ3l1KlTxrxKKVatWkWjRo1QSpkcu06dOoSFhRmfs19//ZWAgAD8/f1N8tWoUQPArIwpec1aki1bNk6cOEHPnj0JCQnh22+/pW3btnh5eTFmzBiT1jsJX2ePHz/m/v37VKhQAaUUx44dM9v3Rx99ZPzb3d2dggUL4uTkRKtWrYzpBQsWxN3d3WI5u3fvbjIhSq9evbCxsTG+/ixJab25u7sDsG7dumcO0SDeTjKYtXgj6XQ6KlSowJ49e9Dr9ezfvx8vLy/8/PwA7UZp9uzZAMYbJks3Srly5TJLy5w5s0n/YH9/f8qUKcPSpUvp2rUroHU7K1eunPF4ySlQoACLFy8mPj6eM2fOsHHjRiZNmkT37t3JkycPtWrV4vr164B2MUnIzs6OvHnzGtdbYmNjQ/PmzVm2bBnR0dHY29uzevVqYmNjUxQocnBwwNPTM9k6uH79Ojly5MDR0dEkX0rOPyVS8jxcvHiRf//916ysBskNDm4IAiUO6CSUkmBSYoYpVRNvkzVrVmrVqpXi/Vhia2tLy5YtWbZsGWXLluXmzZtJdjsD7XU+YsQIDhw4YBaMCQsLMwZrntfFixcBjDcgibm6upo8trGxIWfOnCZpefLkYcCAAUybNo2lS5dSuXJlGjduTPv27V+4XEII8TZKz/ufJ0+eMGHCBObPn8/t27dNvlCnxvhyVapUMZmwwcDSxBsbN25k7NixHD9+3GQcFsN4Ognlz5/fLK1AgQJERkYSHBxM9uzZk8ybL18+rKysjF2zLl68iFLK4j4Bs1lPc+bMaVamzJkzm4zp8/nnn7N9+3bKli2Ln58ftWvXpm3bthbHkbIkf/78JvcY77//PjqdjhkzZtClSxeKFCny3NduSwyTZ+zdu5cePXqwd+9eqlevTpUqVejbty9Xrlzh7Nmz6PV6Y6AoODiY0NBQvv/+e5MhDBIy3K9dvHiRs2fPpvi+LiWv2aTkyJGDefPmMXfuXC5evMiWLVuYOHEiw4cPJ0eOHMaAz40bNxg+fDjr168322/i17yl+2c3NzeLrwE3NzeL5Uz8unJ2diZHjhxm3foTSmm9tW7dmh9//JGPPvqIL774gpo1a/L+++/TokULkx+lxdtLAkXijVWpUiU2bNjAyZMnzWYyqFChAoMHD+b27dvs27cPb29v8ubNa7YPS4MCA2Z9gzt27Mgnn3zCrVu3iI6O5uDBg8YbsZSytramSJEiFClShPLly1O9enWWLl360gEFgDZt2vDdd9/xxx9/0LRpU1auXIm/vz/FihVLUbkyWkqeB71ez3vvvcdnn31mMW+BAgWS3L+h3/y///5L06ZNLeYx3MQFBgampMgAxvEOUitglljbtm359ttvGTlyJMWKFUuybJcvX6ZmzZr4+/szbdo0fHx8sLOz4/fff2f69Okv9UuSYdvFixeb3FwbJB7zyd7e3uINyNSpU+ncuTPr1q1j69at9OvXjwkTJnDw4EGzwJIQQoikpdf9T9++fZk/fz79+/enfPnyuLm5odPpaNOmTbq2UNi7dy+NGzemSpUqzJ07lxw5cmBra8v8+fNfeADopCT+gq/X69HpdPzxxx8W6yzxtOopqdeAgADOnz/Pxo0b2bx5M6tWrWLu3LkMHz6cUaNGvVC5a9asyezZs9mzZw9FihR57mt3UipVqsSOHTt48uQJ//zzD8OHD6dw4cK4u7uzd+9ezp49i7OzMyVKlACe3jO0b98+yXEsixYtasxbpEgRpk2bZjGfj4+PyeOU3rMnR6fTUaBAAQoUKECDBg3Inz8/S5cu5aOPPiI+Pp733nuPhw8f8vnnn+Pv74+TkxO3b9+mc+fOZq/5pMqTGuVMTkrrLVOmTOzZs4ddu3axadMmNm/ezC+//EKNGjXYunXrK3H/LzKWBIrEG8vwC9m+ffvYv38//fv3N64rVaoU9vb27N69m7///pv69eu/1LHatGnDgAEDWL58OU+ePMHW1jbF3bosKV26NAB37twBMM6Kdv78eZMbupiYGK5evfrMYFKVKlXIkSMHv/zyC5UqVWLnzp18+eWXL1y+xHx9fdm1axeRkZEmrYouXbqUou0t/eL3vPLly0dERMQLBdYMMz4sW7aML7/80uLF0TDLWsOGDVO0z/j4eJYtW4ajo2OaTTtcqVIlcuXKxe7du5k4cWKS+TZs2EB0dDTr1683+cXNUtPypJ6LpNINzby9vLxeOqhpCJR+9dVX/PXXX1SsWJFvv/2WsWPHvtR+hRDibZJe9z+//fYbnTp1YurUqca0qKgoswGcU+Man5xVq1bh4ODAli1bsLe3N6bPnz/fYn5Da5qELly4gKOjo1krjIsXL5InTx7j40uXLqHX68mdOzegXQOVUuTJkyfZH6Sel5OTE61bt6Z169bExMTw/vvvM27cOIYMGWKxRdWzxMXFAU9bOj/PtTu5569y5crMnz+fFStWEB8fT4UKFbCysqJSpUrGQFGFChWM91Wenp64uLgQHx//zOPmy5ePEydOULNmzTR/DVmSN29eMmfObLwXP3nyJBcuXGDhwoUmg1dv27Ytzcpw8eJFqlevbnwcERHBnTt3kn3fPk+9WVlZUbNmTWrWrMm0adMYP348X375Jbt27UqVH6rF603alYk3VunSpXFwcGDp0qXcvn3b5Bc1e3t7SpYsyZw5c3j8+PFLf5HPmjUr9erVY8mSJSxdupS6detabC6d2N69ey3OWGboe2zoalarVi3s7OyYOXOmyS8OP/30E2FhYTRo0CDZ41hZWdGiRQs2bNjA4sWLiYuLe6lAVmJ16tQhNjbWZFwlvV5vMqNWcpycnABeaoarVq1aceDAAbZs2WK2LjQ01HiTZImjoyODBg3i/PnzFgNomzZtYsGCBdSpU8fi9KmJxcfH069fP86ePUu/fv1S1IT7Reh0OmbOnMmIESPo0KFDkvkMN2iJuwVYuol2cnKy+Dwk9RzVqVMHV1dXxo8fb/G1HBwc/MzzCA8PN3t+ihQpgpWVldlUrkIIIZKXXvc/1tbWZq0gZs2aZTbOT2pc459VDp1OZ3Lca9eusXbtWuPjX375xXhPdeDAAZo3b067du0AuHnzJuvWraN27dpmPxQlvo+ZNWsWAPXq1QNg9uzZWFlZMWrUKLO6UEo9c8atxB49eoROp+P48ePGNDs7OwIDA1FKERsby/Lly5+7pe2GDRsAjC3Jn+fandzzZ+hSNnHiRIoWLWrsLl65cmV27NjBkSNHTMaYtLa2pnnz5qxatcriLHMJj9uqVStu375tcm9p8OTJEx4/fvzM806JgIAAiz+2HTp0iAcPHhhfN5bupZRSfPPNNwBMmTIlVX+EBfj+++9Nnp958+YRFxdnfP1ZktJ6e/jwodn64sWLA8i9lwCkRZF4g9nZ2VGmTBn27t2Lvb29ycCFoDW/NvwKlhotPjp27EiLFi0AGDNmTIq2mThxIv/88w/vv/++sant0aNHWbRoEVmyZDH+Cujp6cmQIUMYNWoUdevWpXHjxpw/f565c+dSpkyZZw6aDVpf5FmzZjFixAiKFCmSoqlpU6pp06aULVuWgQMHcunSJfz9/Vm/fr3xIvSsXzSKFy+OtbU1EydOJCwsDHt7e2rUqIGXl1eKyzB48GDWr19Pw4YN6dy5M6VKleLx48ecPHmS3377jWvXriUbvPviiy84duwYEydONN5EZsqUiX379rFkyRICAgJYuHCh2XZhYWEsWbIE0KakvXTpEqtXr+by5cu0adPG4mvh9u3bxm0ScnZ2TrLrW1KaNGlCkyZNks1Tu3Zt7OzsaNSoET169CAiIoIffvgBLy8v4y9lBqVKlWLevHmMHTsWPz8/vLy8qFGjRrLP0bx58+jQoQMlS5akTZs2eHp6cuPGDTZt2kTFihWf2Q1z586d9OnTh5YtW1KgQAHi4uJYvHix8YZSCCFEyqXX/U/Dhg1ZvHgxbm5uBAYGcuDAAbZv346Hhwe3bt0yu/Y3a9bM+Hfjxo1Zt24dMTExzJo1iyVLlnDhwgWsra0tXvvnz59Pr169zMYSjIiIYPr06SilqFu3Lm3btuXevXvMmTMHPz8//v33X4KDg+nUqRNz586la9euBAYG8ueff9KrVy8mTZrE3LlzASx267p69SqNGzembt26HDhwgCVLltC2bVuKFStGREQEN2/epGfPnsybN49r167RtGlTXFxcuHr1KmvWrKF79+4MGjTIYv19+umnXL9+3eTHpBMnTgAwZMgQKleuTLZs2Th79iyzZ8+mQYMGuLi40KhRI+rWrZvk83L06FHjPcajR4/YsWMHq1atokKFCtSuXRvQxiBK6bXb8Prp168fderUwdramjZt2gBa1/rs2bNz/vx540DfoLVk//zzzwFMAkUAX3/9NYsWLaJ06dL07NmTwMBAHj58yNGjR9m+fbvx3rFDhw6sXLmSnj17smvXLipWrEh8fDznzp1j5cqVbNmyxdgC/0XFxcVx8eJFxo4dy/nz5ylVqhR2dnacPXuWn3/+GQcHB4YOHQpoY5Lmy5ePTz/9lM6dOzNp0iR+//1349hCPXr0MJnUJTXExMRQs2ZNWrVqZbzvr1SpEo0bN05ym5TW2+jRo9mzZw8NGjTA19eXe/fuMXfuXHLmzJlmLeHFayYdZ1gTIt0NGTJEAapChQpm61avXq0A5eLiouLi4szWA6p3795m6b6+vqpTp05m6dHR0Spz5szKzc1NPXnyJEXl279/v+rdu7cqXLiwcnNzU7a2tipXrlyqc+fO6vLly2b5Z8+erfz9/ZWtra3Kli2b6tWrlwoJCTHJ06lTJ+Xr62u2rV6vVz4+PgpQY8eONVtvmFY14VT2nTp1Uk5OTmZ5DdPGJhQcHKzatm2rXFxclJubm+rcubPav3+/AkymAU3KDz/8oPLmzausra1NptH19fU1mwZXKfPpQJXSpnsdMmSI8vPzU3Z2dipr1qyqQoUKasqUKSbTiyYlPj5ezZ8/X1WsWFG5uroqBwcHVahQITVq1CgVERFhsQwkmH7W2dlZ5c+fX7Vv315t3brV4jF8fX2TnL7W0vOW0K5du1I0jXzVqlXNptZdv369Klq0qHJwcFC5c+dWEydOVD///LMC1NWrV435goKCVIMGDZSLi4sCTOo4qefIULY6deooNzc35eDgoPLly6c6d+5sMgVxUq+nK1euqC5duqh8+fIpBwcHlSVLFlW9enW1ffv2ZM9TCCGEZelx/xMSEqI+/PBDlTVrVuXs7Kzq1Kmjzp07p3x9fVX79u3VnTt3jMuUKVOUr6+v0ul0ClA//PCDevDggSpdurQKCAhQy5cvV5cuXVKHDh1S/v7+ClB79uwxHqt3796qdOnSxnIY7gv+/PNP4zTk+fPnV/b29srf31/Nnz/feK+yZMkS5e3tbXJuS5YsMeYvUaKEyfVMqaf3OWfOnFEtWrRQLi4uKnPmzKpPnz7Ge7z9+/crKysr9fjxY7Vq1SpVqVIl5eTkpJycnJS/v7/q3bu3On/+vHGfia/NFSpUUOPGjTO5b5s1a5by9vZWVapUUR4eHsre3l7ly5dPDR48WIWFhSX3lBvv4xIuNjY2Km/evGrw4MHq0aNHZtuk5NodFxen+vbtqzw9PY3PX0ItW7ZUgPrll1+MaTExMcrR0VHZ2dmZ3RNfuHBBAapz587Kx8dH2draquzZs6uaNWuq77//3iRvTEyMmjhxoipUqJCyt7dXmTNnVqVKlVKjRo0yqY/nvWc3OHnypAJU3759VcmSJVWWLFmUjY2NypEjh2rZsqU6evSoSf4zZ86ogIAApdPpVNasWVW3bt3UiRMnUnz/bOn+zFDOhPe68+fPV4D6888/Vffu3VXmzJmVs7OzateunXrw4IHZPhPfD6ek3nbs2KGaNGmivL29lZ2dnfL29lYffPCBunDhQpL1Jd4uEigSIpXExsYqT09P1aVLl4wuyitjzZo1ClD79u3L6KIIIYQQIoPExcWp1q1bKw8PD3X8+HGl1+tVzZo1VdGiRVV4eLhJ3ujoaJU7d27Vrl07Y1r58uVVjx49zPY7depU5ebmpvR6vcXj1qxZ0yx40qFDB5MfSbZv367s7e3Vli1bVNmyZZWDg4PKnj27AlRwcLBxXzNmzFC+vr7K0dFRde3aVX3zzTeqYMGCxvW7du1SVapUUW5ubsrFxUWVLl1anTlzxqxM0dHRysbGxqRM7777rlJKqa5du6rWrVurMWPGKB8fH+Xs7Ky6detmsr2vr69JUOLbb79VhQsXVpkyZVJZsmRRderUUfHx8Uk8E0rt27dPValSRTk4OChvb281YsQI47pr164pOzs7tXr1amPa/Pnzlaurqzp27JhSSqmjR4+qhg0bqmzZsikHBwdVokQJtXv3bpNjXL9+XXXs2FF5eXkpBwcHVbRoUbV37171008/mT0f8+bNs1hOvV6v5s+frwICAowBwITl8vT0VD/88IPJNocOHVL29vbqypUrSimlHjx4oLp37668vLyUq6uratq0qfrvv/+M+RcvXqxy5cplfNy6dWuz19nAgQONQRzDayfhcvbsWbVr1y5la2uroqOjjdutXr1alS5dWmXKlEnly5dP/fzzzyb7zZkzp5o2bZrq1KmTcnNzMzkfQ6Do8OHDFutGiPQggSIhUsmvv/6qALOL5dsiMjLS5HFcXJyqUaOGcnV1NVsnhBBCiLdDXFycatOmjTFIpJRSq1atSvaL8AcffKAKFy5s3N7R0dGstYkhX7Vq1ZI89v3791XVqlVVv3791J07dxSg6tevr9zd3Y15pk6dqjJlyqQaNGigDh48qE6dOqW8vLxMAkVjx45V3t7eat26derSpUvqo48+UpkzZ1atW7dWSil18eJF5erqqubOnasuX76sTp06pebOnavu3btnVqb4+Hj1999/K0AdP35c3blzx9g6vFSpUipLlixq3Lhx6vz58+rXX39VOp3OeG8ZEhKiAGPQZsmSJSpnzpxq48aN6tq1a+rw4cNq5syZSdbHr7/+qjw8PNSCBQvU5cuX1R9//KE8PDzU4sWLjXn69OmjihYtqvR6vdq6datydHRU27ZtM67fuHGjWrZsmTpz5ow6d+6c+uijj5SXl5dx/bVr11S2bNlUy5Yt1cGDB9WFCxfU999/r06cOKEiIiLUgAEDVNWqVY2tzaKioiyW1VCOzZs3qytXrqjZs2cre3t7Y4v7GjVqqE8//dRkm+rVq6t+/foppZQKDQ1VgYGBqkmTJuro0aPq5MmTqkqVKqpGjRrG/AMHDlSNGzc2Pi5QoICaO3euyT5r1qyphg4dqpRS6uHDh6pcuXJqyJAhxvLr9Xo1Y8YMVaxYMeM233//vXJzc1MLFixQV65cUQsWLFBWVlbGVnIPHjxQgMqfP79atmyZunz5svr888+Vvb29io6OlkCReCVIoEiIl3Tw4EH1/fffKx8fH1WiRImMLk6G6dq1q2rbtq2aNWuWmjJliqpQoYKxObgQQggh3j6GIFGWLFmMwQ2llGrcuLEqW7Zskts1bdpUlSxZUiml1KlTp5Lssg2oAQMGJFuGbNmyqeXLlyultC5KZcqUUVWqVDGu79ixo/Lz8zPpIlWuXDljoOjy5cvKxsZG7dixw7j+0aNHSqfTqQkTJiillPrpp5+Ur69virq5K6W1uPbw8DBJi42NVQ4ODmrYsGEm6VmyZDF2Od+9e7dJy5UOHTqotm3bpuiYYWFhysPDw+Q8lNICMh9++KHxcVBQkHJyclLDhw9Xbm5uJkEkS86fP68AY7CrXr16qkmTJknmb9KkiRo4cGCy+9y/f7/KnDmzun//vkl64cKF1cKFC5VSSvXr10/VqVPHuG7z5s3KxcXFGNwbOHCgKlq0qIqNjTXm2bp1q9LpdMbhBGrWrKmGDx+ulFIqIiJCWVlZqf3795scM2vWrCbd6tzd3dWGDRtM8nTu3Fl17NhRKaXU3bt3VaZMmdTKlStN8lSoUEF99tlnSimldu7cqQCTANy///5rfM1JoEi8CmQwayFe0rx581iyZAnFixdnwYIFGV2cDFOjRg2mTp3Kxo0biYqKws/Pj1mzZtGnT5+MLpoQQggh0ll8fDwdOnRg69at7NixwzijEsCxY8eSnLlJKcWxY8eoWbMmoA3ObGdnx4EDB7Cyejphc2RkJBUrVjQbrDuhoKAg7t69a5ztC+DBgwc0bNjQ+PjEiRO0b9/eZNr5hDN8LV68mMDAQGrUqGFMs7GxQafTGc+pYsWKREREEBAQwAcffECrVq0oUqRIkuU6duyYSZkAzp07R1RUFB999JExLTw8nIcPH+Ln52csa0BAAHZ2dgDUr1+fDh06cO3aNVq1akWrVq3IkSOHxWOuXbuWBw8emA2EHBMTQ+fOnY2Ps2XLRvfu3Rk9ejQTJkwwmzBl2bJl/PDDD1y6dInQ0FDi4+NxdnbG3d2d69ev88cff3Ds2LFkz/1ZE1X8+OOPhIeH4+vra5L+5MkTbGy0r69FihRhzZo1gPaaGTJkCIMHDzZOXLJ06VJGjBhhzA/aQO/q/7PHgVafH3/8sfFvpZTJ83b79m3u379vfK6uXr1KaGio2XN34sQJ4+yzq1evxt3d3ewc7ezsjLOJnThxgjx58phMQX/16lXc3NxSNGuyEOlBAkVCvKQFCxa81QEig7Zt29K2bduMLoYQQgghMpghSLRlyxa2b99uEiQCiI2NNQnMJLRp0yauX79Ox44dAfjnn38oVKgQJUuWNMn3119/AZilJ3T8+HEyZcpEgQIFAC2gkDdvXuMX/djYWM6ePcukSZNMtouOjmb+/PlkzZqV48ePmx3j+PHj6PV643kVLFiQK1eusGnTJn777TeKFy/O1KlTjbPXWipX4mDD8ePHyZEjB7ly5TJJs7W1JTAw0OJ2bdq0oWLFiqxdu5alS5cyZMgQduzYQfny5S0es169esycOdNsXZYsWYx/37x5k19++QVbW1s8PDxM8o0aNYq5c+cyatQoSpcujZubG9OnTzfO1nb8+HHs7OzMnm+DkJAQbty4YXbulso6bNgw2rVrZ7bunXfeAaBw4cLcunWLiIgINmzYwJ07dxgwYACgBQODgoIoXLiwybZnzpwhZ86cuLu7c+vWLe7fv28sqyF44+LiYsx/5MgRHB0dyZ8/v7FcWbJkwcfHx5gnNjaWM2fOGM/p9OnTBAYGmgQ19Xo9586dMwbdTpw4Qbly5czO2VCWzp07mwTvhMgIVs/OIoQQQgghhBDPFh8fT8eOHY1BohIlSpjlKVGiBHv37kUpZZIeHBxM3759admyJVWrVgW0FkWWgkFHjx7F2dnZGASy5Pjx4xQuXBhra2tAa6Fz7do14xfys2fPEhMTY1LGsLAwrl69asxjZWXFkydPTPY7bdo0smXLRvbs2Y1prq6ufPDBB6xatYp27dqxf//+JMt18uRJs2DK8ePHzerq2LFjBAYGGlsQnThxwmw7Hx8f+vbty4EDB/D09OTo0aMWj2lra0t4eDh+fn5miyFQFBoaSt26dWnUqBGjRo1i9OjRREVFGffx008/8eWXX9KzZ09Kly5Njhw5+OOPP4xlsrW1JS4ujsjIyCTP287OjoCAgCTrxrCf2NhYi2XNlCkTgDEI9O+//zJs2DCGDx+Ok5MTAE5OTmbPW1xcHDNnzjQGn44fP46rqyt58uQB4PLly/j7+5uUY9myZRQuXNgY9Dl58qTFlmDR0dHGOnBxcTF7vSxfvpxHjx7RtGlTQHseLT3XSQXYhMgIEigSQgghhBBCvDS9Xk/Hjh1Zu3YtS5YsIUeOHAQFBZks8fHxDB06lJMnT9K1a1eOHz/OpUuXWLhwIWXLliVv3rz8/PPPgNYCyFKLHtACRcWLFzdpuZFYwlYaoH1Bt7a2plChQsbH77zzDp6enibbJGzFU6FCBdavX8+6deu4fPkyQ4cOZfXq1cb9/vXXXwwfPpxDhw5x7do1fvvtNzZt2pRs9yq9Xs/58+f577//CAsLMx43cfAgYfnj4uI4ffq0MVDx448/Mm/ePE6ePMnly5eZNGkSISEh1KlTx+Ix69Wrx19//cWYMWO4ePEip0+fZsWKFUyfPh3QWlE1bdqUPHnyMGfOHPr27UtUVBRz58417sPDw4OtW7dy6dIl9u7dS7Nmzbh165ax3O+++y5ubm706tWLs2fPcubMGb799lsuXrxoPG/DcxoUFERMTIzFstavX5/Zs2ezYsUKrl69yj///MPMmTPZuHGjMY+zszO+vr4MHDgQKysrunXrZlzn4OBAtWrVGD9+PKdPn+b48eO0aNECGxsbhg4daqzbokWLotPpAMiaNSvnz58nPDyc2NhYJk6cyNq1a00CQ3q9nuDgYC5fvkxQUJBxPzlz5jQG2+rVq8fBgwdZvHgx165dY9GiRfTu3ZuZM2fi4eFBXFwcZ86cSfa5FuJV8MZ3PdPr9fz333+4uLgYPwiEEEII8epRSvHo0SO8vb2T/fIn0p7cP4kXceTIEZYtWwZoX/YT0+l03Lhxg6JFi7J27Vq+/vprKlasiE6no0CBAvTp04cuXbqg1+sJDw/n4sWLPHr0iIIFCxIeHm52rIoVK5qlJ3T06FF69OhhzPP3339ToEABoqOjiY6O5tChQxQuXNhkHwcOHMDf35+oqCiioqJo3749hw8fpn379tjb29OkSRMqVKhAQEAA4eHhhIeHs337dmbOnElMTAx+fn6MHz+e+vXrJ1m2L7/8khEjRjB+/Hj69u3L2LFjOX78OB07djTZ5p9//qFt27aEh4dz5swZoqOjyZcvH+Hh4URERPDjjz8yaNAgbG1tKV68OKtXr8bLy8vicUuWLMl3333HjBkzmDBhAo6OjhQqVIj+/fsTFhZGly5dCAsLY/ny5Tx+/BiAfv36MX78eFq3bo2LiwuTJ0+mT58+FClShCJFijB48GC2b9+On58f4eHh2NrasmLFCoYNG0aZMmWwtbWlTJkyNGjQgPDwcIoWLUrjxo2pXr06jx8/5uDBgxZbF/Xp04dHjx7x+eefExQURNasWSlTpgxVqlQxObeAgAD++OMPFixYYNaKadasWQwcOJDy5ctjb29PgwYN2LBhA6C1LDt8+DCBgYHG/bVp04bff/+dXLly4e3tTZcuXfDz8zN57bVq1YpNmzYRGBiIi4sLV65cMXsNFStWjKlTpzJ8+HCCgoLw9/dn7ty5NGzY0OLzCFpLrmvXrhnrUYjnlRb3TzqVuM3nG+bWrVsm/UiFEEII8Wq7efMmOXPmzOhivNXk/kkIIYR4vaTm/dMb36LIMCDZzZs3cXV1TbX9Gpoeenp6yq+eaUjqOX1IPac9qeP0IfWcPtKqnsPDw/Hx8TEZTFRkDLl/er1JPac9qeP0IfWcPqSe015a1nFa3D+98YEiQ3NpV1fXVL/RiYqKwtXVVd5MaUjqOX1IPac9qeP0IfWcPtK6nqWrU8aT+6fXm9Rz2pM6Th9Sz+lD6jntpUcdp+b9k7wKhBBCCCGEEEIIIQQggSIhhBBCCCGEEEII8X8SKBJCCCGEEEIIIYQQwFswRpEQQjxLfHw8sbGxaXoMvV5PbGwsUVFR0vc7DUk9p48XrWdbW1usra3TsGRCCCGEEOJlSaBICPHWUkoRFBREaGhouhxLr9fz6NEjGag3DUk9p4+XqWd3d3eyZ88uz48QQgghxCtKAkVCiLeWIUjk5eWFo6Njmn5xVUoRFxeHjY2NfEFOQ1LP6eNF6lkpRWRkJPfu3QMgR44caVlE8QpSSnE/8j43H93EytkKTydPeZ8KIYQQryAJFAkh3krx8fHGIJGHh0eaH08CGOlD6jl9vGg9Z8qUCYB79+7h5eUl3dDeEqFRoSw8vpBZh2ZxOeSyMT1f5nz0LduXTsU74e7gnnEFFEIIIYQJGcDhOUXFRbH4xGJazKlC69F5aTGnCotPLCYqLiqjiyaEeA6GMYkcHR0zuCRCvF0M77m0HhdMvBq2XNpCzmk5+XTLp1wJuWKy7krIFT7d8ik5p+Vky6UtGVRCIYQQQiQmgaLnsP78erynetNxbUfWPdjPnmxPWPdgPx3XdsR7qjcbzm/I6CIKIZ6TtDoRIn3Je+7tseXSFhosa8CT2Ceo//9LyJD2JPYJDZY1kGCREEII8YrI0EDRnj17aNSoEd7e3uh0OtauXWuyXinF8OHDyZEjB5kyZaJWrVpcvHgxQ8q6/vx6mq5oSmhUKAD6/9/nGv4PjQqlyYomrD+/PkPKJ4QQQoi3w+tw/xQaFUrzlc21gc/RJ5tXjx6lFM1XNjfeZwkhhBAi42RooOjx48cUK1aMOXPmWFw/adIkZs6cybfffsvff/+Nk5MTderUISoqfbt5RcVF0XltZwCzX8MMDOmd13aWbmhCCJEOFixYgLu7e0YX45VhKWDwInLnzs2MGTNeej8i7bwO908Ljy8kMjbymUEiAz16ImMjWXRiURqXTAghhBDPkqGDWderV4969epZXKeUYsaMGXz11Vc0adIEgEWLFpEtWzbWrl1LmzZt0q2cv57+lZCokGfmUyhCokL47cxvtC/aPh1KJoR42zyr286IESMYOXJk+hQGuHTpEuPGjWPbtm0EBwfj7e1NuXLlGDhwIKVLl0614+TOnZv+/fvTv39/Y1rr1q2pX79+qh0jI61Zs4aJEydy9uxZ9Ho9uXLl4r333kvTgM2CBQvo378/oaGhJumHDx/GyckpzY4rXl563j/Fx8cTHx9vlq7T6bCysjLJl7AMsw7Neq7jGMz8eyZ9y/Y1ftZZOnZCCQdEfxXy6vVa66jUyGtlZWWsh+fNa3je4uPjzbZ7mf2+TnmVUuj1SQcqE76GXyRvwnpOWKaX3e/rkheSf288T15I+j1nqZ5ftff96/gZkThvwnrW6XSv1Hv5df2MSMxQx3q9PtXfn896Db6IV3bWs6tXrxIUFEStWrWMaW5ubrz77rscOHAgyRud6OhooqOjjY/Dw8MBjB/oL2LNuTVY6azQq2dvb6WzYvXZ1bQt3PaFjiVMGd7sL/rciZR5G+vZcM6GJT0YjvMyx/vvv/+Mf//yyy+MGDGCc+fOGdOcnZ1NjhMfH4+NTdp81B85coRatWpRuHBhvv32W/z9/Xn06BHr1q1j4MCB7N69O1WPl/i5cnBwwMHBwaw+U6Oe09OOHTto3bo1Y8eOpXHjxuh0Os6cOcO2bdue+xye5/WcVD1lzZrVYnpKt09pGS1dl9+mz6C0ktr3T/v377cYOMySJQtFihQxPt63b5/x+QuLDTOZ3SylFIrLIZe5//g+Ho7abJSHDh1KsiWUo6MjZcqUMT4+cuQIkZGRFvM6ODjw7rvvGh8fO3aMR48eWcxra2tLhQoVjI///fdfs4CqgZWVFZUrVzY+PnnyJA8fPrR8gkDVqlWNf58+fZr79+8nmbdSpUrGL43nzp3j7t27SeYtX748dnZ2AFy8eJHbt2/z+PFjnJyczH5gePfdd3FwcADg8uXL3Lp1K8n9li5d2vj8X7t2jevXryeZt0SJEri6ugJw8+ZNrly5kmTeYsWKGVuE3r59m0uXLiWZt3DhwsbZSYOCgjh//nySeQMCAvDy8gK02RXPnj2bZN6CBQuSPXt2AB48eMCpU6eSzOvn58c777wDQGhoKCdOnEApZbGO8+bNi4+PD6C9j44dO5bkfn19fcmdOzegtRQ8cuRIknlz5sxJvnz5AIiKiuLvv/9OMq+3tzf58+cHICYmhgMHDiSZN1u2bPj7+wPal819+/YlmTdr1qwUKlTI+HjPnj1J5k3uMyIxd3d3ihUrZnx84MAB44QHievZxcWFkiVLGvPKZ8SLfUYkvJ8E03ouV66cfEbw8p8RiRnquHDhwvj6+gKp9xnx+PHjJPfxol7ZQFFQUBCgfXgllC1bNuM6SyZMmMCoUaPM0oODg1+4yXVQWFCKgkQAeqUnKCyIe/fuvdCxhCm9Xk9YWBhKKZNfJUTqehvrOTY2Fr1eT1xcHHFxcWl+PEPQBl5uMF/Dl3gAFxcXdDqdMe3PP//kvffeY/369YwYMYJTp07x+++/s2jRIkJDQ1m1apVx24EDB3LixAm2b98OaK+ByZMn89NPPxEUFET+/PkZOnQozZs3T/J8OnfujJ+fHzt37jR53RQuXJjevXsb6/XkyZMMHDiQgwcP4ujoSLNmzZg8eTLOzs4AdO3aldDQUCpWrMiMGTOIiYmhVatWTJ06FVtbW2rVqsX169cZMGAAAwYMALQb30WLFjFw4ECCg4MBGD16NOvXr6dfv36MGTOGkJAQ6tSpw7fffouLiwsA+fPnp2/fvvTr189Y3tKlS9O4cWOGDx8OwI0bN+jfvz+7du3CysqK2rVrM2PGDOP1yFDe5Opz1apVjB07lsuXL+Po6Ejx4sVZtWqVxS/c69evp0KFCnz66afGtLx589KwYUOT1+Z3333H9OnTuXnzJrlz52bIkCG0b2/aejU+Pp64uDjja+HevXvGG63jx49TtmxZLly4wPXr1+nSpQuA8bn76quvGD58uFkdWaqP6dOnG193Y8aMYf369fTv359Ro0ZZrPeE4uLi0Ov1PHjwAFtbW5N1Sd2Ui5RL7funpL5U2djYmNzrPH782Pgl8EH0g+cud0Kjvr5LjZLWFC0ay6NHj4iJibGYLz4+3qQMjx49SvJeLzY21iRveHh4is8tPDw8yZtwKyurFOcFnjuv4UtgWFhYsnmDg4ON76fQ0FAeP35srIvE15zg4GDs7e1N8ibl/v37xvXPyvvgwQPjMUNCQp6Z1/C8piSv4fr58OHDZPMm/AKekryGz79nnVtISIixfh89esTjx49RSlms45CQEGP9Pn78ONn9hoaGGl8TT548SXHe6OjoFOeNjY1NNm9YWJgxb3x8fLJ57ezszN73SUnuMyKxxO+jiIgI4/UvcT0rpcze9/IZ8WKfEQklrGf5jHi6/mU+IxIz1HFISAiZMmUCUu8zIqnX6st4ZQNFL2rIkCHGLxKgvcF8fHzw9PQ0RjCfV3a37FgFpbxFUXa37MZIpXg5er0enU6Hp6fnWxPAyAhvYz1HRUXx6NEjbGxs0qzFjSWJvxi/DMNzZSi/4Wbhq6++YvLkyeTNm5fMmTOzZMkSrKysTM7T0KzYkDZu3DiWLl3KvHnzyJ8/P3v27KFz585kz57d5Nctg2PHjnHmzBmWLl1q/IUqIUMQ4fHjxzRs2JDy5ctz6NAh7t27R7du3fj000+ZP3++8Tz+/PNPvL292blzJ5cuXaJNmzaUKFGCbt26sXr1aooXL063bt3o1q2b8ZwTn7+VlRVXrlxh48aNbNiwgZCQEFq3bs2UKVMYN26cSb0lrgtDml6vp0WLFjg7O7N7927i4uLo06cP7du3Z9euXcbtk6vPO3fu0KFDByZOnEizZs149OgRe/fuxdra2uJrLUeOHKxYsYJz585RuHBhi8/1mjVrGDBgANOnT6dWrVps3LiRbt264evrS/Xq1Y35DMcwvBYSvr4T/l+5cmWmT59u0iLN2dnZpC6Tq48OHTqwdetWbG1tU1zvBobnzsPDw/iLpUHixyL9JHX/VLNmTYv3T4m7lbz33nvGv+9H3oekf3h9pjnTcjDnSRZ0OkVgYA1Kl4bSpRVlykDhwpDwIydhNw1Ln1UJJcxr+PU5tfNmzZo1xd1KnpU3YReJ580bHx9PcHCwxev6y+w3YQuRZ+VN2PLkWXkDAgJSnLdgwYIpyuvp6WlsVWNJwtewp6ensbVOSvLmzp0bvV5vsY4TdxXJlStXivarlCJnzpwpzuvt7Z3ivIZWESnJW6dOnRTlBZ4rb8LPCEsSvjdq1qxp/NtSPSfMm/Aa+Kz9ymdE0nkT1rONjY18RvDynxGJGerYy8vLeL+VWp8RhlbAqemVDRQZPtDu3r1Ljhw5jOl3796lePHiSW5nb29vjIAmZLixfxHN/Jux5tyaFOXVKz3vB7z/1nzZTg+GN4jUadp62+rZcKFI2A8boHRpSOZH95egAEOQyPTX3ezZIZnW5kkylDvx/6NHj6Z27dpJ5k+8TXR0NBMmTGD79u2UL18egHz58rF//36+//57qlWrZrYvQxPggICAZFtILV++nKioKBYtWmRsTTN79mwaNWrExIkTja0eMmfOzJw5c7C2tiYgIIAGDRqwc+dOunfvjoeHB9bW1ri6uppcDyydv16v56effiJz5szodDo6dOjAzp07zc49cZkNaTt37uTkyZNcvXrV2HVg0aJFFCpUiCNHjpg0YU+qPoOCgoiLi6N58+bGpsVFixZNso769evHvn37KFq0KL6+vpQrV47atWvTrl074/Vs6tSpdO7cmd69ewNac+i///6bqVOnUqNGDbPzSFgeS/Vkb2+Pu7s7Op3OpE6fpz7++ecfypUrZ6z3BQsWGFsQWar3xPu29Hnztnz+pKXUvn+ytbVNUYA74XOX3TU7+TLn40rIlSQnAbFIr4PQvPAkCwBK6Th92obTp2HhQkM5oXhxKFNGW8qWhQIFwMrq+V4/b3peQ7DXEMzNiDJkdF4w/dKd2nn1en2K6jgty5DRedPjeX5WPb8Kr7U3IW9S9fyqljc18kL6vo8MdZzwx87UKkNq/hBt8MoGivLkyUP27NnZsWOH8cYmPDycv//+m169eqVrWVoGtuCTlR8SahuPSqbHiE6Be6w1LQIsd9UQQrz6goLg9u202POLdzd7Xs87iPSlS5eIjIw0+7UvJiaGEiVKWNwmpePSnD17lmLFipl0uapYsSJ6vZ7z588bA0WFChUyufjlyJGDkydPPtd5gDbodcLuTjly5HiursBnz57Fx8fHGBQBCAwMxN3dnbNnz5oEipJSrFgxatasSZEiRahTpw61a9emRYsWZM6c2WJ+JycnNm3axOXLl9m1axcHDx5k4MCBfPPNNxw4cABHR0fOnj1L9+7dTbarWLEi33zzTYrP7UU8qz7KlSsHvHy9i9TzKtw/6XQ6+pbty6dbPn125oTbWUFzn35k663j3Dm4eBFu3YKEvVWio+Hvv7XFwNUVSpV6GjwqUwZy5YKX6OUrhBBCvNUyNFAUERFhMjDV1atXOX78OFmyZCFXrlz079+fsWPHkj9/fvLkycOwYcPw9vamadOm6VpOh517WPhbPE0+0IJBSQWLFLDwt3gcquyBZJpiCiFeXcm0zn5JCQMr5i2KUlPicXCsrKzMAjuGQSJB+ywG2LRpk3EgPgNLLQwAChQoAGiDJyYVTHoeiX8JMbRSSe39PKsuUuJZ+7C2tmbbtm389ddfbN26lVmzZvHll1/y999/kydPniT3my9fPvLly8dHH33El19+SYECBfjll1/48MMPn6t8hjKCaUDvec/zeaTW8ydS5nW4f+pUvBNf7vySJ7FP0PPs14IOK+ytMvF+vo44F3x6GxUTA1euwNmzcO4cXL4MiWOQ4eGwa5e2GHh6mgaOypQBGRVACCGESJkMDRQdOXLEpF+poW98p06dWLBgAZ999hmPHz+me/fuhIaGUqlSJTZv3py+YxgoBcOG0eiSFWtX6OncFEIygZUe9FZP/wdAB7ZKB8OGQe3a8lOWEK+hF+kClhJKaYP4av2+0+YYSfH09DSbqeH48ePGL/eBgYHY29tz48aNZ/bfNyhevDiBgYFMnTqV1q1bmzX1DQ0Nxd3dnYCAABYsWGCcSQO0WZSsrKyS7UOemJ2dXapM/enp6cmdO3eMj8PDw7l69arxcUBAADdv3uTmzZvGVjRnzpwhNDSUwMBA4z6Sq0/QAiUVK1akYsWKDB8+HF9fX+M4QymRO3duHB0djYMWBgQEsH//fjp16mTMs3//fmOZLJ0nwJ07d4wtmY4fP26SJyV1mpL6EOnvdbh/cndwZ1WrVTRY1gArZZVssEiHFTp0DMm7Gmcbd5N1dnbg768tBhERWmujs2fh/HktkBQWZrrP4GD4/XdtMciVS+tebAgclSoF7qaHE0IIIQQZHCiqVq1ast0XdDodo0ePZvTo0elYqkRiYuDGDdDraXwe/psKvwXCGn94mAmyPIFm5+CvnDCvLHRprDj523U8YmK0jvRCCJHBatSoweTJk1m0aBHly5dnyZIlnDp1ytgSyMXFhUGDBvHpp5+i1+upVKkSYWFh7N+/H1dXV5PghIFOp2P+/PnUqlWLypUr8+WXX+Lv709ERAQbNmxg69at/Pnnn7Rr144RI0bQqVMnRo4cSXBwMH379qVDhw5mszIlJ3fu3OzZs4c2bdpgb29vMvvb89bFggULaNSoEe7u7gwfPtyky1utWrUoUqQI7dq1Y8aMGcTFxfHxxx9TtWpVY5e+Z9Xn33//zY4dO6hduzZeXl78/fffBAcHJzkQ48iRI4mMjKR+/fr4+voSGhrKzJkziY2NNXYHHDx4MK1ataJEiRLUqlWLDRs2sHr1auMsa4n5+fnh4+PDyJEjGTduHBcuXGDq1KlmdRoREcGOHTsoVqwYjo6OODo6muRJrj5KlSr1Qs+BeHmvxf0TUMevDpvabqL5yuZExmozsiQcs0j3/9aV9laZGJJ3NSVczcdWs8TZGUqU0BaDBw/gwgUteHThAly7BokngblxQ1tWr36alj//0+BR6dLaPv8/IaMQQgjx1nplxyh6Zdjbw+HD2k9TgAPQHmir1/Pw4UOyZMnC2X/jKdvzfXbl+Y9znvDRsLKstrNLxxFJhBAiaXXq1GHYsGF89tlnREVF0aVLFzp27GgyBtCYMWPw9PRkwoQJXLlyBXd3d0qWLMnQoUOT3G/ZsmU5cuQI48aNo1u3bty/f58cOXJQoUIFZsyYAYCjoyNbtmzhk08+oUyZMjg6OtK8eXOmTZv2XOcwevRoevToQb58+YiOjk7xGEmJDRkyhKtXr9KwYUPc3NwYM2aMSYsinU7HunXr6Nu3L1WqVMHKyoq6desya9YsY55n1aerqyt79uxhxowZhIeH4+vry9SpU6lXr57FMlWtWpU5c+bQsWNH7t69S+bMmSlRogRbt241trpq2rQp33zzDVOmTOGTTz4hT548zJ8/3+JA46B1BVu+fDm9evWiaNGilClThrFjx9KyZUtjngoVKtCzZ09at27NgwcPGDFiBCNHjjTZT1L1MXPmzBepfvEWquNXh1sDbrHoxCJm/j2TyyGXjeuy2eWlkVc/anh0wsna7aWO4+EB5ctrC2itOIOCtBZHZ8/CpUtw/br2+19CFy9qy/Ll2mOdDgIC+P9Ma9pSrBgkiqEKIYQQbzSdetG77ddEeHg4bm5uhIWFWZze9UXp9Xru3buHl5cXVlZWfFPpVypebUX5rhBnDUuaLaFd0Xapdry3VeJ6FmnjbaznqKgorl69Sp48edKlO4ZSKkHXMwkjpxWp5/TxMvWc3Hsvra7Z4vmlxXOhlCIo7D4r1lzDVp+bXJ5Z0/V9Gh+vTVZw/rw23tGlS3DzJsTFJb+dtTUUKqQFjUqV0v4vWhTScySE5/U2XtfTm9Rx+pB6Th9Sz2kvLes4La7Z0qIolTRZ3IJ7+coy/M9DDK8BvTb2popvFXzcfJ69sRBCCCHEG06n0+Hh6IGnbTxxcR7pHsy1ttbGKcqVCwyTPMbFaS2NLlx4Gjy6fdt0prX4ePj3X235+WctzcYGChc2DR4VKSKjDgghhHgzSKAoleTOo2Nzm8kM+aUqGwvAoZxhdF7XmW0dtmGlk6isEEIIIcSrxsYG8uXTFkPv0JgYLXh07pzW+ujyZbhzxzR4FBcHx49ry48/amm2tlrwyBA4KlVKgkdCCCFeTxIoSkUfzKvCtt8as2T1eor0tGbn1Z3M/Hsm/cv1z+iiCSGEEEKIFLCz0wa5zp8fGjXS0qKj4epVLXh04YIWPAoK0sZCMoiNhWPHtMVS8KhkSe3/V73bmhBCCCGBolTk5gYPB08g7/iNzNgaT6+G8MX2L6idrzaBnjKNsBBCCCHE68jeHvz9tcUgKgquXNFaHZ0/rwWSkgseGdjYaGMeJQweFSsGmTKl3/kIIYQQyZFAUSprNTKQVXO70uPIDyws6M7B/KG0X92egx8dxM7aLqOLJ4QQQgghUoGDAwQGaotBSoJHcXFw4oS2GMY8srbWZlszBI5KloTixcHZOV1PSQghhAAkUJTqbG3Ba85IItstZfW6UPL0duZY0DFG7R7FuJrjMrp4QgghhBAijTwreHThghY8unPHNHgUHw+nTmnLokVamk4HBQtqQSPDUqIEuLun6ykJIYR4C0mgKA1Ua+vNipEDaXNxDJPXO9OvdQRf7/+aBgUaUMGnQkYXTwghhBBCpBNLwSPDmEfnz8PFi1og6b//TAfMVkobE+ncOVi27Gl63rxawChh8ChbtvQ7HyGEEG8+CRSlkVIrBnOv1Lf0PRvEnH/Lcr7oITqs6cCJnidwtpN2xEIIIYQQbytLYx4lnG3t0iUteHTrltbaKKErV7Rl1aqnad7eT4NGJUpo3dZkwGwhhBAvSgJFaSR/SRfW1BxBsx192LDpCiX8fbgScoWBWwbyXaPvMrp4QgghhBDiFZJwtjWD2Fi4eVPrsnbxojbb2q1bWlApof/+05aNGw0pVri7e1GypM4YPCpZEgoU0MZDEkIIIZJjldEFeJPVWNGdy9b5yR99nxbLqqNDx/dHv2fjhY3P3lgI8XrZvl3rV7B9e0aX5LlUq1aN/v37p/lxHjx4gJeXF9euXUvV/aZW+b/44gv69u378gUSQohUZGurdTWrWxf69oUZM+CXX2D2bPjkE6hfXxvHyFLrodBQK3bu1DF1KrRvr12iXF2hfHn4+GP4/ns4fFgbQ0kIIYRISAJFacgtqy1Xu00AYM613/A53x2Aruu7Evw4OCOLJoRITUrB0KFw9qz2f8IRStNA586d0el0fP311ybpa9euRafTPde+Vq9ezZgxY1KzeBaNGzeOJk2akDt3bgCuXbuGTqezuBw8eDDNy5PYoEGDWLhwIVeuXHmp/WzevBmdTkdQUJBJeo4cOYznbmCogx07dgBa0MtQBw4ODhQoUIAJEyagLLyeFi5cSJkyZXB0dMTFxYWqVauycaPpjxC7d+9Gp9NRqFAh4hP1XXF3d2fBggUmaceOHaN169bkyJEDe3t7fH19adiwIRs2bLBYBiFExrC2hly5oGZN6NkTJk/Wgkfffw+ffQZNm0KRIgpX13izbSMj4eBBmDcPevSAsmW1mdWKFIEOHWDaNNi5Ex4+TP/zEkII8eqQQFEaqz7rff51LIcTkQz+NRZvm0Lce3yP7hu7y423EG+KrVu1n2VB+3/r1jQ/pIODAxMnTiQkJOSl9pMlSxZcXFxSqVSWRUZG8tNPP9G1a1ezddu3b+fOnTsmS6lSpdK0PAnFx8ej1+vJmjUrderUYd68eS+1v0qVKmFjY8Pu3buNaWfPnuXJkyeEhISYtKjatWsX9vb2VKxY0ZjWrVs37ty5w/nz5xkyZAjDhw/n22+/NTnGoEGD6NGjB61bt+bff//l0KFDVKpUiSZNmjB79myzMl25coVFhmmUkrBu3TrKlStHREQECxcu5OzZs2zevJlmzZrx1VdfERYW9mIVIoRIFzodZM8OlSpBly4wdqxi8eJgFizQM2wYtG4NpUtD1qzm2xpmXFuyBAYO1AJQHh7g66sFnUaOhLVrtfGT5NZVCCHeDhIoSmPWNjrUpMkA9IxbSNaFo7G1smXtubUsPLEwg0snhHhpSsGwYU8HfbC21h6n8d10rVq1yJ49OxMmTEgyz4MHD/jggw945513cHR0pEiRIixfvtwkT8KuW0OHDuXdd98120+xYsUYPXq08fGPP/5IQEAADg4O+Pv7M3fu3GTL+vvvv2Nvb0+5cuXM1nl4eJA9e3aTxdbWFtBaTjVt2tQkf//+/alWrVqSx4qOjubzzz8nZ86cODk58e6775oEbRYsWIC7uzvr168nMDAQe3t7bty4AUCjRo1YsWJFsufyLM7OzpQpU8bkmLt376ZSpUpUrFjRLL1cuXI4JOgz4ujoSPbs2fH19eXDDz+kaNGibNu2zbj+4MGDTJ06lcmTJzNo0CD8/PwICAhg3Lhx9O/fnwEDBnDz5k2TMvXt25cRI0YQHR1tscyPHz+ma9euNGjQgE2bNlG7dm3y5s1LQEAAXbt25cSJE7i5ub1UvQghMkbmzFCmDLRrB8OHw88/w/LlMH48dO4MFSvCO++AlYVvBDduwLp1MGoUNGsGuXNrAaQaNWDAAFi0CE6cMB8vSQghxOtPAkXpoFjvShx+pwk2xDPq4iLejdS+cPX7ox/XQq9lbOGEEE8pBY8fP9+yfr3WisjQtSc+Xnu8fn3K9/ECQSVra2vGjx/PrFmzuHXrlsU8UVFRlCpVik2bNnHq1Cm6d+9Ohw4dOHTokMX87dq149ChQ1y+fNmYdvr0af7991/atm0LwNKlSxk+fDjjxo3j7NmzjB8/nmHDhrFwYdKB771796ZbK6E+ffpw8OBBli9fzr///kvLli2pW7cuFy9eNOaJjIxk4sSJ/Pjjj5w+fRovLy8AypYty61bt156HKXq1auza9cu4+Ndu3ZRrVo1qlatapK+e/duqlevbnEfSin27t3LuXPnsLOzM6YvX74cZ2dnevToYbbNwIEDiY2NZVXCqZDQgmtxcXHMmjXL4rG2bt3KgwcP+Oyzz5I8p+ft0iiEeHU5OUHhwvD++/D551o3tF9/henTtbGL3nsP/Py0mdkSCwmBXbu0vJ06abOrOTtrg2V/+KE2htLu3Vo+IYQQry+Z9SydvLPoa+JqbqQp65g+pT8lZ1Tk6P39dFzTkV2ddmFtJVNQCJHhIiO1O97UkKgljA6wTSpvRIR25/6cmjVrRvHixRkxYgQ//fST2fp33nmHQYMGGR/37duXLVu2sHLlSsqWLWuWv1ChQhQrVoxly5YxbNgwQAsMvfvuu/j5+QEwYsQIpk6dyvvvvw9Anjx5OHPmDN999x2dOnWyWM7r16/j7e1tcV2FChWwSvRTdkRERArO3tyNGzdYsGABly9fJleuXOh0OgYNGsTmzZuZP38+48ePByA2Npa5c+dSrFgxk+0NZbx+/brZeELPo3r16owfP547d+6QI0cO/vzzTwYPHkxcXJyxa9uVK1e4ceOGWaBo7ty5/Pjjj8TExBAbG4uDgwP9+vUzrr9w4QL58uUzCR4lLL+rqysXLlwwSXd0dGTEiBEMHTqUbt26mbUOMuQvWLCgMe3w4cMmZVuxYgUNGzZ8wRoRQrzqbG0hXz5tqVtXS9PrISgILl3SlsuXtVZGiXuixsbC8ePaklCuXFogqVixp//nyWO59ZIQQohXiwSK0ol3DX+OlPqI0v98x8S4Lxj4+1KcKxVn7429TDswjcEVB2d0EYUQr6GJEydSo0YNk4CQQXx8POPHj2flypXcvn2bmJgYoqOjcXR0THJ/7dq14+eff2bYsGEopVi+fDkDBgwAtC5Kly9fpmvXrnTr1s24TVxcXLJdk548eWLSvSqhX375hYCAgJSebrJOnjxJfHw8hQoVMkmPjo7Gw8PD+NjOzo6iRYuabZ8pUyZAa3Fkyfjx443BJoAzZ86QK1cus3wVKlTAzs6O3bt3U6xYMZ48eULJkiXR6/UEBwdz9epVdu/eTaZMmcy647Vr144vv/ySkJAQRowYQYUKFahQoYJJnhcZ365r165MnTqViRMnmpxDUooWLcrx/3/ry58/P3Fxcc99TCHE683KCry9taVKlafpoaFw5QpcvKgFj65fh7t3tcBSQjduaMv69U/TXFygaFEtaGQIIBUuDMlcloQQQmQACRSlo0IrRxDpt5hy6m9y/H6MIk1m8N2dj/hq11fU8atD0WzmX1yEEOnI0VFr3ZMSSkHVqtoADfHmM8tgba3dBf/5J+h0KKWIi4vDxsbGvBvPS9whV6lShTp16jBkyBA6d+5ssm7y5Ml88803zJgxgyJFiuDk5ET//v2JSWZAiQ8++IDPP/+co0eP8uTJE27evEnr1q2Bpy19fvjhB7OxjKytk24VmTVr1iQH3fbx8TG2VkrMysrKLCgSGxub5HEiIiKwtrbm4MGD2Nvbm9Szc4KWYpkyZbLYlerh/6f58fT0tLj/nj170qpVK+PjpFpJOTo6UrZsWXbt2sXDhw+pVKkS1tbWWFtbU6FCBXbt2sWuXbuoWLGiWcsgNzc3Y32sXLkSPz8/ypUrR61atQAoUKAA+/btIyYmxmzb//77j/DwcAoUKGBWJhsbG8aNG0fnzp3p06ePybr8+fMDcP78eWPgyt7ePsnnRQjxdnN3h5IltcUgOloLCl26pAWQrl2Dmze19IQePYL9+7XFQKeD/PmfBo8MS86c2johhBDpTwJF6ShT3hycbzWIgr+MZgJDqDXqNPUnruf3y+tpv7o9h7sdxt7GQodwIUT60OlS3gVsyxY4ejTp9fHx2vr9+6FOHS2wFBcHNjapfuf79ddfU7x4cZOuQwD79++nSZMmtG/fHgC9Xs+FCxcIDAxMcl85c+akatWqLF26lCdPnvDee+8Zx/DJli0b3t7eXLlyhXbt2qW4fCVKlGDJkiXPfV6enp6cOnXKJO348ePGwa4tHSc+Pp7g4GDjVPPP49SpU9ja2pq1SDLIkiULWbJkSdG+qlevzooVKwgJCTEZfLtKlSrs3r2bP//8k549eya7D2dnZz755BMGDRrEsWPH0Ol0tGnThpkzZ/Ldd9/Rt29fk/xTpkzB1taW5s2bW9xfy5YtmTx5MqNGjTJJr127NlmyZGHixImsWbMmRecnhBAJ2dtrwZ78+aFePS1Nr4d79552XbtyRQsm/T8mb6QUXLigLb/++jQ9c2bT1kdFi0KhQvD/xp9CCCHSkASK0lmB7wcRsuZb8sdcouF/P6A7+QOeWQ9w8t5Jhu0axqT3JmV0EYUQz5JwpjNLrYkMDDOg1a6dpsUpUqQI7dq1Y+bMmSbp+fPn57fffuOvv/4ic+bMTJs2jbt37yYbKAKt+9OIESOIiYlh+vTpJutGjRpFv379cHNzo27dukRHR3PkyBFCQkKMXdQSM7R4CgkJIXPmzCbrHjx4QFBQkEmau7s7Dg4O1KhRg8mTJ7No0SLKly/PkiVLOHXqFCVKlLB4nAIFCtCuXTu6dOnClClTKFmyJMHBwezYsYOiRYvSoEGDZM977969VK5c2dgF7WVUr16dMWPGEBQUZNItsGrVqkyePJlHjx4lOZB1Qj169GDMmDGsWrWKFi1aUL58eT755BMGDx5MTEwMTZs2JTY2liVLlhhbj/n4+CS5v6+//po6deqYpDk7O/Pjjz/SunVrGjRoQL9+/cifPz8RERFs3rwZSL7FmBBCWGJlBdmza0ulSk/TIyLg6tWn4x5duwb//af9lpJQSIjWKPfPP033WaDA0wCS4X9pfSSEEKlLhpNLZzpXF+K+HAnACEaxbKoDw0v8AMCUv6aw5/qeDCydECJFtm41neksKYYZ0LZuTfMijR49Gn2iASK++uorSpYsSZ06dahWrRrZs2c3m27ekhYtWvDgwQMiIyPN8n/00Uf8+OOPzJ8/nyJFilC1alUWLFhAnjx5ktxfkSJFKFmyJCtXrjRbV6tWLXLkyGGyrF27FtACTMOGDeOzzz6jTJkyPHr0iI4dOyZb9p9//pl27doxaNAgChYsSNOmTTl8+LDFsYQSW7FihcnYSy+jfPny2Nvbo5QymfHt3XffJTY2FmdnZ8qUKfPM/WTJkoWOHTsycuRI4/M7Y8YM5s6dy/LlyylcuDClS5dmz549rF271qyVUWI1atSgRo0aZmMONWvWjL/++gtHR0c6duxIwYIFqVGjBjt37pSBrIUQqcrZGYoUgWbNYNAgmD1ba0k0ezYMGACNGmnjFrm6mm+r18O5c7ByJXz5pZY3Vy7IkkXrDd63L/zwA/z9tzapqBBCiBejUy8yKuZrJDw8HDc3N8LCwnC1dMV5QXq9nnv37uHl5WU2Y88zxcZyP0dhsj64wBi+4s+aY8jVtyvzj/+Mr5sv//b6F1f71Cvr6+yl6lmk2NtYz1FRUVy9epU8efIkOdCyRUrBu+/CP/+Yj9xpiZUVlCqFOniQuPh4y2MUvQU2bdrE4MGDOXXqVJq+xpIdCyoZf/zxBwMHDuTff//FxkYa2z7Li9YzJP/eS6trtnh+afVcxMToWbnyHnFxXnh4vB3Xm4yglB64B3ih072+9RwWZtr66MYNrfXRs36nAa2FUd68Wqsjw1KkiJaWGo0k38Z7p4wg9Zw+pJ7TXlrWcVpcs+VuOCPY2uIyawK0bc4ApjFvRy86dJ3BbvddXA29yiebP2F+k/kZXUohhCUxMdqdakqCRKDlu3lT2+4t7r7ToEEDLl68yO3bt5PtGpVRHj9+zPz58yVIJIQQrxA3N21mtOLFn6bFxWnBosuXtQDS1avaZTYszHRbpbQ8ly9DwuHXHB21FktFijwNHhUpAlmzpscZCSHE60HuiDOIfZtmPBxVniznDzCSkXw5+Ht+3r6Iur9UYcHxBTQu0JhmAc0yuphCiMTs7bXuZMHBKd/Gy0vb7i2fYrx///4ZXYQktWjRIqOLIIQQIgVsbLTuZrlyQcKh3sLDtfGODMEhQ+ujxBN9RkbCoUPaklCOHE+DRoYlMBCep9GxEEK8KSRQlFF0OjL/OBkqV6IrPzHjdn92zK/EZ+99xsT9E+m+sTvlfcqT3Tl7RpdUCJGYj4+2PI83u5evEEIIkaFcXZ92MTOIj4e7d7UZ1y5d0gJJN27Agwfml+U7d7Ql4bCCVlbaTG6JA0h586bLKQkhRIaRQFEG0lWqSETtZjhvXcPXfEHzaes50mEUm7Nt5sTdE3y0/iM2fLDhrRzPRAghhBBCiJdhbQ3e3tqScOa1J0/g+nWt5dGVK9rft25prY0S0uvh/Hlt+e23p+mOjhAYqMPPz5XSpZ92YcuWTWZfE0K8GSRQlMGcZ04gPmA9jdUGysftYcAnVVi8bAmlfyjFpoub+PHoj3QrlTqz8AghhBBCCPG2y5QJ/P21xUApePjwafe1K1e0sY/u3DHvOR4ZCUeO6DhyxJEVK56me3hoASPDGEiFC0OhQtpYS0II8TqRQFFGK1gQ1bUb/PgtkxlMuZ0HObGtMONrjGfQtkF8uuVTauSpQb4s+TK6pEIIIYQQQryRdDot0OPhAaVKPU2Pj9eCRVeuaAGka9e0ANKDBwqlTJsPPXgAu3drS0K5cmlBo4SLv78WsBJCiFeRBIpeATZjRhC3ZDHvRh2iBb8xYEBLzpz9lI0XN7L72m46rOnAng/3YGMlT5cQQgghhBDpxdoacubUlipVnqY/eaK4efMBV69m4fJlK65fh9u3tUG1E7txQ1t+//1pmpUV5MtnGjwqVAgKFABb27Q/LyGESI5EHl4F2bNj88VgGDmSCQwhMLgJQ4fYsWDSAop+W5QDtw4waf8khlYemtElFUIIIYQQ4q3n4AD588eRP7/puESG2deuXtVaId24oQWQoqJMt9fr4eJFbVmz5mm6ra0WLDIEjgz/58unBa2EECI9SKDoVTFwIPFz5uEXfJkefMfsH/rSsaMvs+rNotPaTozYPYK6fnUpmaNkRpdUCCGEEEIIYYGl2deUgvv3nw6gbZh9LSgIYmNNt4+NhdOntSUhe3sICNCCRgmXPHm01klCCJGaJFD0qnB2xnr0SOjVi+GMZiGd6NnTlX/+6cD6gPWsOruKDms6cKTbETLZSodmIV4VUXFR/Hr6V9aeX8uDyAd4OHrQtGBTWhZqiYONQ0YX77VVpUoVevbsSdu2bTO0HGfOnKF27dqcP38eJyenDC2LEEKI15NOB56e2lK69NP0+Hi4e1cLHF25ov1/65aWFh9vuo/oaDh+XFsSypTJcgDJ11cCSEKIFycfH6+Srl1RBQviyX0+YxKnT8O0aTq+bfgt2ZyycSb4DEN3SPczIV4V68+vx3uqNx3XdmTtubX8ef1P1p5bS8e1HfGe6s2G8xvS7NhBQUH07duXvHnzYm9vj4+PD40aNWLHjh1pdsyXsWDBAtzd3VOUd/369dy9e5c2bdoAMHLkSHQ6XZLLqFGjjNs+efKEUaNGUbBgQezt7cmaNSstW7bkdKKfZkeOHEnx4sXNjn3t2jV0Oh3H/38nHhgYSLly5Zg2bdoLnbcQQgiRFGtr8PaGChWgfXv46iv49lv49VeYPRsGDYL339cG186WzXLg58kTOHoUFi+GL76ARo0gb15wcdGCUp06wcSJsHGj1h1Or0//8xRCvH6kRdGrxNYW3ddfQ7NmDGAa8+jF6NHv0Lp1Vn5u8jMNljVgxt8zaFigITXz1szo0grxVlt/fj1NVzQ1PtYrvcn/oVGhNFnRhLVt1tK4YONUPfa1a9eoWLEi7u7uTJ48mSJFihAbG8uWLVvo3bs3586de6H9xsTEYGdnZ5YeGxuLbTqOrDlz5kw+/PBDrP5/Rzxo0CB69uxplm/IkCGsXbvW2OooOjqa9957jxs3bjBlyhTKlSvH3bt3mTBhAu+++y7bt2+nXLlyz12eDz/8kG7dujFkyBBsbOSyKYQQIm3Z2GgzpeXKZTqAdmys1uLI0ALpxg3t8f37Wve2hCIj4Z9/tCUhR0etBVJgoNbyKDBQW3LnljGQhBBPSYuiV02TJlCxIo48YRQjiIqCjz+Gen716VGqBwCd13UmNCo0Y8spxFssKi6Kzms7A6BQFvMY0juv7UxUXJTFPC/q448/RqfTcejQIZo3b06BAgUoVKgQAwYM4ODBg8Z8N27coEmTJjg7O+Pq6kqrVq24e/eucb2hVc2PP/5Injx5cHDQusrpdDrmzZtH48aNcXJyYty4cQCsW7eOkiVL4uDgQN68eRk1ahRxcXHG/YWGhtKjRw+yZcuGg4MDhQsXZuPGjezevZsPP/yQsLAwYyugkSNHWjy34OBgdu7cSaNGjYxpzs7OZM+e3WTZsWMHixcvZsWKFeTPnx+AGTNmcODAAdasWUOrVq3w9fWlbNmyrFq1ioCAALp27YpKfCedAu+99x4PHz7kzz//fO5thRBCiNRia6uNSVS9OnTtCqNGwU8/aS2QZsyATz6Bxo2heHHImtV0kG0DQwDJ0AKpcWPw8wNnZyhRQmvZNG6cNsD2+fOQ4DIvhHiLyE+jrxqdDiZPhgoV+JD5TOdTtmwpxMqVMKXZFHZc3cGlh5fo83sflry/JKNLK8QbRSlFZGzkM/MtP7mckKiQZ+8PRUhUCEv/XUrrQq2Ji4vDRm+DLtGdm6Oto1laUh4+fMjmzZsZN26cxTFzDN279Hq9MUj0559/EhcXR+/evWndujW7d+825r906RKrVq1i9erVWCf4KXHkyJF8/fXXzJgxAxsbG/bu3UvHjh2ZOXMmlStX5vLly3Tv3h2AESNGoNfrqVevHo8ePWLJkiXky5ePM2fOYG1tTYUKFZgxYwbDhw/n/PnzgBb8sWTfvn04OjoSEBCQZB38888/dOvWja+//po6deoY05ctW8Z7771HsWLFTPJbWVnx6aef0q5dO06cOGGxy1ly7OzsKF68OHv37qVmTWnNKYQQ4tViZ6d1N8ub1zQ9OvppC6SrV5/OwGapBVJUlOUxkOzstFnYAgOftkQKDIT8+bUBtoUQbyYJFL2KypeH99/HevVqvuYLGrOBTz6BOnWcWdxsMRV/rsjSk0tpXLAxrQq1yujSCvHGiIyNxHmC5QDGy/how0d8tOGjJNdHDInAyS5lAyVfunQJpRT+/v7J5tuxYwcnT57k6tWr+Pj4ALBo0SIKFSrE4cOHKVOmDKB1N1u0aBGenp4m27dt25YPP/zQ+LhLly588cUXdOrUCYC8efMyZswYPvvsM0aMGMH27ds5dOgQZ8+epUCBAsY8Bm5ubuh0OrJnz55sua9fv062bNmM3c4Su3fvHs2aNaN58+YMGjTIZN2FCxeoVq2axe0MgacLFy4YA0UnT540C1gl1eLI29ub69evJ1t2IYQQ4lVibw/58mlLQtHRWsDo+nWtC9vNm1pAKTjYPIAUEwOnTmlLQtbW2n4TBpACAsDfH2TuByFefxIoelVNmADr1tEofiNV2c2fd6vxxRfw7bflGFppKGP3jqXnxp5U9KnIO67vZHRphRDpJKVdp86ePYuPj48xSATawMzu7u6cPXvWGCjy9fU1CxIBlE44LQtw4sQJ9u/fb+yGBhAfH09UVBSRkZEcP36cnDlzGoNEL+rJkyfGLnCJxcbG0qJFC7Jly8YPP/xgMc/zdC0rWLAg69evN0m7ffu2xWBTpkyZiIx8dmszIYQQ4lVnb/+0BVL16k/TY2O1ANKNG1oA6fp1+O8/uHfPfBa2+Hi4cEFb1q41XefrqwWNDIshiJQlS5qfmhAilUig6FVVoAD06AFz5zLVajBl9H/z3XdWfPABDK86nD8u/cE/d/6h6/qu/NHujxR3WxFCJM3R1pGIIRHPzNd2dVs2XthoHLg6OVY6KxoWaMjSZku1rmc2lruepVT+/PnR6XQvPGB1YklN+Z44PSIiglGjRvH++++b5XVwcCBTpkypUp6sWbMSEmK5W1+/fv24ePEihw8fthhMKlCgQJL1cvbsWWMeAzs7O/z8/EzyJTVY9cOHD8mX+CdZIYQQ4g1ia6sNap07t+kg2nFxEBSkdWG7du3pINpBQZbHMLp+XVs2bzZN9/J62uooYSApZ07L4ykJITKOBIpeZcOHw6JFlIo4Qkt+ZSWt+egj+PdfWxY3W0zJ70uy5fIW5h2Zx8dlPs7o0grx2tPpdCnqAtYioAXrz69/Zj7QZkFrGdgSJzsn4qwsB4qeR5YsWahTpw5z5syhX79+ZgGd0NBQ3N3dCQgI4ObNm9y8edPYqujMmTOEhoYSGBj43MctWbIk58+fNwusGBQtWpRbt25x4cIFi62K7OzsiE/8c6QFJUqUICgoiJCQEDJnzmxM//777/n555/ZtWsXOXPmtLhtmzZt+PLLLzlx4gSlSpUypuv1eqZPn05gYKDZ+EUpderUKVq0aPFC2wohhBCvMxsbLZiTMydUqvQ0PT5e665244Y2BtL161qLpDt3tDGPErt3T1sSzw3h7Pw0eOTv//TvfPm0Ywsh0p+89V5l2bLB4MEwYgTT7IeyJroZly7ZMWoUfP11ABNrTeSTzZ8waOsgauapScGsBTO6xEK8FVoWasknmz8hNCo0yVnPAHTocHdwp0Vg6gYY5syZQ8WKFSlbtiyjR4+maNGixMXFsW3bNubNm8fZs2epVasWRYoUoV27dsyYMYO4uDg+/vhjqlatatatLCWGDx9Ow4YNyZUrFy1atMDKyooTJ05w6tQpxo4dS9WqValSpQrNmzdn2rRp+Pn5ce7cOXQ6HXXr1iV37txERESwY8cOihUrhqOjI46O5i2pSpQoQdasWdm/fz8NGzYEYP/+/fTt25fhw4eTN29egoKCTLbJlCkTbm5ufPrpp6xbt47333+fKVOmUK5cOe7evcv48eM5e/Ys27dvf6Eg3bVr17h9+za1atV67m2FEEKIN5W1NWTPri1lyz5NVwoePtTGPjK0Qrp1SwsgPXpkvp+ICDhyRFsS79/PT0fu3O4UL64zCSS5uaXlmQkhLI8WKl4dAwZA9uy8E32FPtbfAjBlChw9Cn3K9qFW3lo8iXtChzUdiI2PzeDCCvF2cLBxYGHThYAWDLLEkL6w6UIcbCyPufOi8ubNy9GjR6levToDBw6kcOHCvPfee+zYsYN58+Zpx9fpWLduHZkzZ6ZKlSrUqlWLvHnz8ssvv7zQMevUqcPGjRvZunUrZcqUoVy5ckyfPh1fX19jnlWrVlGmTBk++OADAgMD+eyzz4ytiCpUqEDPnj1p3bo1np6eTJo0yeJxrK2t+fDDD1m6dKkx7ccffyQmJoavvvqKHDlymC2ffPIJoHWB27FjB+3atePLL7/Ez8+PunXrYm1tzcGDBylXrtwLnfvy5cupXbu2ybkKIYQQwjKdDjw8oHhxaNoU+vfXvr8sXQrLlsGkSdCrF9SvD0WKaHkt/Y4THw/nz+vYssWBiRN1dO4M5cqBuzt4e0ONGvDxxzBzJmzdqrVs0j97VAAhRAro1POM/JkBHj16xLBhw1izZg337t2jRIkSfPPNN8aBWJ8lPDwcNzc3wsLCcHV1TbVy6fV67t27h5eXV5Kz86Sa77+HHj2IdPQgR+RlwnGjeHE4dAjuPrlFkXlFCI0KZWTVkYyoNiJty5LO0rWe32JvYz1HRUVx9epV8uTJk+Tgyc+y/vx6Oq/tTEhUCFY6K/RKb/w/s0NmFjZdSKOCjQBtkOWkxigSpoKCgihUqBBHjx597uBMatdzTEwM+fPnZ9myZVSsWPGl9/emeJl6Tu69l1bXbPH80uq5iInRs3LlPeLivPDweDuuNxlBKT1wD/BCp5N6TgtSx6krJubpQNqGcZD++w/u3lXExaX8OuPoCAULaq2ODP/7+0P+/No6Ydnb+F0gvaVlHafFNfuV73r20UcfcerUKRYvXoy3tzdLliyhVq1anDlzhnfeeUtm++rSBaZPx/HcOaZ6TqRb8HiOH9ci80OG5GRu/bm0Xd2WMXvGUC9/Pcq+U/aZuxRCvLzGBRvz38D/+O3Mb6w5t4aHkQ/J4piFZv7NaBHYItVbEr0tsmfPzk8//cSNGzcyvBXPjRs3GDp0qASJhBBCiDRkZwd58mhL1apP0+PiFPfv3+fmTQ+uXbPixg0toBQUpHVZSywyEo4d05bEfH3Ng0gFC2qtk+Q3PCFMvdItip48eYKLiwvr1q2jQYMGxvRSpUpRr149xo4d+8x9vBEtigDWr4cmTdDbO5A75iI3VU7s7eHECe0D7oNVH7Di1AoKeBTgWI9jzzWL0qtMotvp422s59RoUfQ8pEVR+pB6Th/SoujNJy2KXm/S2iXtSR2nj+Tq+dEjbeyj69e1Fkg3b2qtkB48eL4uaM7O2oTThsCRYSlQ4O1phfQ2fhdIb9KiKBXFxcURHx9vdiOZKVMm9u3bl0GlyiCNGkGlSljt28evhUdQ7tRPREdDt26wezfMqT+HPdf3cOHBBT7b9hmz68/O6BILIYQQQgghRJpwcdFmRwsIME2PjdVaHN248TSI9N9/Wpql2dgiIrTxX48eNV/n42MaPDIsPj4g8RTxJnulA0UuLi6UL1+eMWPGEBAQQLZs2Vi+fDkHDhxIcorm6OhooqOjjY/Dw8MBLYKnT8XRzfR6PUqpVN3nM02ciFXFipQ9s4C6OT9h862i7N0L8+bp6dXLnZ8b/0zdpXWZc3gO9f3qU9evbvqVLY1kSD2/hd7Gejacs2FJD4bjvMINOd8IUs/p40Xr2fCes3Rdfps+g4QQQqQNW1stkOPjAwl7jisFoaFay6MbN7TFMBvbw4fa+sRu3tSW7dtN0x0ctHGPErY+MvyfOXOanp4Q6eKVDhQBLF68mC5duvDOO+9gbW1NyZIl+eCDD/jnn38s5p8wYQKjRo0ySw8ODibKUgj5Ben1esLCwlBKpV/zvLx5cW/YEIeNG/nZcxDet7YC8MUXUK7cfYq9U4yuhbvy06mf6LKuCztb7iSLQ5b0KVsayZB6fgu9jfUcGxuLXq8nNjYWG5u0/yhUShlnAJMuUWlH6jl9vEw9G957Dx48wNbW1mTdI0vzJgshhBCpQKfTgjiZM0PRoqbrYmO1gJGhFdKtW4bBtLVxjxKLioKTJ7UlMU9P08CRYcmXTwswCfE6eKXHKEro8ePHhIeHkyNHDlq3bk1ERASbNm0yy2epRZGPjw8hISGpPkZRcHAwnp6e6fvF+uJFdIULo4uLY0q97Qz+oyYA772n+OMPxZO4SEr/UJrzD87TIqAFK5qveK2/LGVYPb9l3sZ6jo+P5+LFi3h5eeHh4ZEux4yNjTX7YixSn9Rz+njRen7w4AH37t0jf/78WFtbm6wLDw8nc+bMMkbRK0DGKHq9yfg5aU/qOH1kdD0rBeHhT8dCunlT+zsoCO7fh///ZpIiOh3kzm0aPDIsPj6Q6JKYrmSMorQnYxSlEScnJ5ycnAgJCWHLli1MmjTJYj57e3vs7e3N0q2srFL9CdHpdGmy32QVLAg9esCcOfS/8znfeB/i1n9WbNum4+efdXTr5syS95dQ/qfy/Hb2N1acXkG7ou3Sr3xpIEPq+S30ttWzlZUVmTNnJjg4GJ1Oh6OjY5oGVQ2D/8bHx7/WwdtXndRz+niRelZKERkZSXBwMJkzZ7YYZHpbPn+EEEK8HnQ6cHPTlkKFTNfFx2stjgxBpFu3tBnZ7t6FsDDzfSkFV69qy5Ytpuvs7bUWR4kDSPnzQ7ZsMiubSH+vfKBoy5YtKKUoWLAgly5dYvDgwfj7+/Phhx9mdNEyzvDhsHAhNsf/YcPnKykxsQ0AAwZA7dpQ2rc0w6oMY8TuEfT+vTdVfKvg4+aTwYUW4tWTPXt2AO7du5fmxzKMyWJlZSUBjDQk9Zw+Xqae3d3dje89IYQQ4nVlbQ3e3tpStqzpusjIp13ZbtzQAkiGrmwJOr8YRUfDmTPakpiLy9OgkeF/w98yHpJIK698oCgsLIwhQ4Zw69YtsmTJQvPmzRk3btzb3a3Ayws+/xyGDaP4yqH0/LAZ3863JyICunSBbdtgaOWhbLq4iUO3D9F5XWe2ddiGlTSLFcKETqcjR44ceHl5ERsbm6bHMozJ4uHhIa0m0pDUc/p40Xq2tbU1624mUl98fDwjR45kyZIlBAUF4e3tTefOnfnqq68kgCqEEOnA0VFrIZQvn2m6YUDt27efDqZt6MoWHGy5K9ujR/DPP9qSmIeHeQDJsLi4pMmpibfEKx8oatWqFa1atcroYrx6Pv0U5s6Fq1eZ3n0em3z6c/Mm7NwJ330HvXrZsLjZYkp8V4KdV3cy8++Z9C/XP6NLLcQrydraOs2/vOr1emxtbXFwcJAARhqSek4fUs+vtokTJzJv3jwWLlxIoUKFOHLkCB9++CFubm7069cvo4snhBBvrYQDahcubLouPl4LFiUMIv33nxZESmpWtgcPtOXgQfN12bKZBo78/J7+7+ycNucn3hyvfKBIJMHJCUaPhm7dcJg8hkU/daZ6M3cABg+GOnWgQN4CTHlvCh///jFfbP+C2vlqE+gZmLHlFkIIIUSa+uuvv2jSpAkNGjQAIHfu3CxfvpxDhw5lcMmEEEIkxdoasmfXllKlTNfFxmoBo1u3ng6obejKZmk8JNDW3b0L+/aZr8uRwzR4lC8fZMlig5OTtEQSGgkUvc46d4Zp0+DsWar9PZGePSfw7bfw+LHWBW3nTuhZuifrL6xn86XNtF/dnoMfHcTO2i6jSy6EEEKINFKhQgW+//57Lly4QIECBThx4gT79u1j2rRpGV00IYQQL8DWVpsZzccHypc3XRcdrQWNDC2Rbt/Wxke6e1frtmbJnTvasmePIcUKyApoQaSErY8M/0tLpLeLBIpeZzY2MHEiNG4MM2Yw+ejHbN7sw7Vr8OefMGcO9O2r4+fGP1N4XmGOBR1j1O5RjKs5LqNLLoQQQog08sUXXxAeHo6/vz/W1tbEx8czbtw42rVLehbU6OhoohOMsBoeHg5o3Qz1en2qlU3blwL0FrtRiNShTSmu1bNIG1LH6UPq+dns7CB3bm2pWNF0XWSkaUuk27d13LkD9+5BRITlMesMQaS9e83XZc+u8PPTWiD5+an//689dndP7TN7s+j1euNkIGmx79QmgaLXXcOGUKUK7NmD86Th/PzzfGrU0FZ9/jnUqwd+fjn4ruF3tPy1JV/v/5oGBRpQwadCxpZbCCGEEGli5cqVLF26lGXLllGoUCGOHz9O//798fb2plOnTha3mTBhAqNGjTJLDw4OJioqKtXKFhurx9o6DJ1Oof2CLdKGHghD+4It9Zw2pI7Th9Tzy3B0hLx5tSWxx491BAVZc+uWDbdvW3Hnjp7//svE3bs2PHpkeezOoCAdQUGG7mymgaYsWfTkzh1H7tzx5MkTT+7cceTJE4+vbxweHoq3fS4FvV5PWFgYSqlUH9/xUVJNx16CTqk3+/ec8PBw3NzcCAsLw9XVNdX2q9fruXfvHl5eXhk/kOehQ/Duu9roaMeP0/eHosyera2qWFFrXWRtDR3XdGTxv4vJmzkvJ3qewNnu1W87+ErV8xtM6jntSR2nD6nn9JFW9ZxW1+y3jY+PD1988QW9e/c2po0dO5YlS5Zw7tw5i9tYalHk4+NDSEhIqj4XMTF6fvstmLg4T7JkkfdoWtFaYQQDnuhk1ts0IXWcPqSe00fieo6M1FoV3b6ttUT677+nLZEePXr+iI+rqzLOAqctTx+/8w68Dbdser2e4OBgPD09U/0eNTw8nMyZM6fq/ZO0KHoTlC0LrVrBypXw+ed8/dsf/P47XLkC+/fD9OkwaBDMqjeL3dd2cyXkCgO3DOS7Rt9ldMmFEEIIkcoiIyPNbkKtra2TbZpub2+Pvb29WbqVlVWq3tBqu9IBVvKlL81JPac9qeP0IfWcPp7Ws5PT03GJEnvy5Gl3tlu3tGBSUFDyA2uHh+s4dgyOHUt4LI29vdbiyTSQpC25c2vr3xQ6nS7Vr6tAmvw4KoGiN8W4cbB6NWzejNPBHSxYUJOqVbVpFL/8EmrXhqJF3VjYdCE1FtXg+6Pf06hgIxoWaJjRJRdCCCFEKmrUqBHjxo0jV65cFCpUiGPHjjFt2jS6dOmS0UUTQgjxmsuUCfLk0ZbEoqO1gNHt2+ZBpJAQsPR7RXQ0nD2rLYnpdJAr19PAUeKAkptb6p9fWlBKcT/yPjcf3cTK2QpPJ090r3hfPAkUvSn8/KBXL5g1Cz77jMqHDzNwoBVTpkBMDLRvr/VQq56nOgPKDWDawWl0Xd+VU71O4enkmdGlF0IIIUQqmTVrFsOGDePjjz/m3r17eHt706NHD4YPH57RRRNCCPEGs7fXAju5cpmvi4vTuq7duWMYWPtpEOn+fW19YkrB9evasnOn+XoPD9MAUsL/X4UubaFRoSw8vpBZh2ZxOeSyMT1f5nz0LduXTsU74e7gnnEFTIaMUfSCXslxMIKDtXfGo0ewdCnRzdtStiz8+6+2euBAmDIFouKiKP19aU4Hn6apf1NWt1r9ykY0X8l6fgNJPac9qeP0IfWcPmSMojdfWj0XMTF6Vq68R1ycFx4e8h5NK9p4I/cAL+muk0akjtOH1HP6yOh61uu1Fkf//fe0JdKdO1oQKThY6+72vOzstFZPlgJJefJoA32npS2XttB8ZXMiYyMBUDwNu+j+3/XO0daRVa1WUcevzksdKy2u2dKi6E3i6QlffKH1NRs6FPvmzVmyxJ7SpbVWRVOnQv36UKOGA4ubLebdH99l7bm1LDyxkM7FO2d06YUQQgghhBBCvGWsrLTWQR4eUKSI+fpHj7TWR4YubXfuaI+Dg7VxkSw1fYmJgfPntcWS7NmfzgiXeMmR4+VaI225tIUGyxqglDIJEBkY0p7EPqHBsgZsarvppYNFqU0CRW+a/v1hzhytfd6cORQZMIAJE7TWRACdOmktjErkKMHo6qMZsmMI/f7oR7Xc1cjtnjsjSy6EEEIIIYQQQphwcdGW/PnN18XEaF3aDK2R7tzRluBgrUtbbKzlfQYFactff5mvs7fXBtJOHEDKk0f738Ul6bKGRoXSfGVzlFLoSXoSCQA9eqyUFc1XNufWgFuvVDc0CRS9aRwdYfRo+OgjGDsWPvyQ/v0zs2mT1q/z1i3o3RuWLYPBFQaz8cJG9t/cT8c1HdnVaRfWVtYZfQZCCCGEEEIIIcQz2dlBzpzaUras6TqltC5thtZICcdFCg6G8HDL+4yOTr41UtasT4NGhoG9DX+vC1pIZGykxZZElujRExkbyaITi+j3br/nOPO0JYGiN1GnTjB9Opw+DV9/jdXEiSxYAEWLQmgoLF8OjRrBBx9Ys6jZIop9W4y9N/Yy7cA0BlccnNGlF0IIIcQrIj4+nvj4eLN0wxS/CfMlx9r66Q9RSsWjVDx6vflNtE4HOl3ivJb3mVZ5AaysXjSvnuSG/3yevDqdlXEMyefNq9fHA9qi06lk86ZVGTI+r/r/uCtJ5dUZx2J5kbxafvM6ftn9vi55gf+/zlIjr+n7M2HexPUsnxEJy5x676OE9WxlpXvF3ssvl9fdXVv8/c0/I6Ki9MYBtg1BpKAgrSVScLCOmBir/x9DYWX19L0REqItR49qj/V6HUpZAQr6zYLMwHMOATzz75l8XOpjk7GDE15rlVLoLU0bx7OvwS9CAkVvIhsbmDgRGjaEb76B3r3xyZWLuXOhbVstS69eUKkS5PXJy/Q60+m2oRtf7fqKOn51KJqtaMaWXwghhBCvhL/++gsnJyez9CxZslC06NP7hf379yd5A+vu7k7x4sWNj0NDTxIfb09MjPldtJ2dC1mzljI+Dg4+TFxclMX92to64un59Ofj+/f/Ifb/g4YmZmPjgJdXOePjBw+OExPzyGJeKytbsmevaHz88OFJoqNDLebV6azIkaOK8XFIyCmioh5azAvg7V3N+Hdo6FmePAlOMm/27JWNX1zDwi4QGRmUZN5s2SpgbW0HQHj4JR4/vg08BpxI/G3Fy6scNjYOADx6dJWIiJtJ7tfTswy2ttrzHxFxg0ePriWZN2vWktjZaYOoPn58i/DwK0nm9fAojr29OwCRkXcIC7uYZN4sWYrg4OABwJMn9wgNPZdk3syZA8mUyQuAqKhgQkLOJJnX3d0fR8fsAERHP+Thw5NJ5nVzy4+T0zsAxMSE8eDBcUBhqY5dXfPi7KxN+RQb+4j7948muV8Xl9y4uOQGIC4ukuDgw0nmdXb2wdU1HwDx8dHcu3cwybxOTt64uRUAQK+P5e5dC31r/s/RMTvu7v6A9iU7KGhvknkzZfIkc+ZCxsfJ5XVwyEKWLE8/I+7e3Z9kEMre3h0Pj+LGx/fuHUSvN/QXMq1n+Yx46uU+I/5LlONpPXt5lX+rPiNsbS3P1Obm5k9MTHbu3oX//ntITMxJIiIgIgIiIyEqwcvu4sX83L79Djg+gCyXeV4KxeWQy2zatQk3Wzdjeu7cucmdOzcAkZGRHD5s+TPi8ePHz33MZ5FA0Zuqfn2oVg1274Zhw2DhQj74ADZs0FoUhYVpDY+2b4euJbqy/vx6NlzYQPvV7Tnc7TD2NvYZfQZCCCGEEEIIIUS60+kgc2ZtyZ0bHiaK7+n18PixFjgKCNCGCL4RHsGplzjmk/gnJoGijKRTybXjegOk1fSur8UUzIcPax01dTo4dgyKFSMkROuCduuWlmXyZBg0CO5G3KXIvCIERwYzuMJgJr03KWPL/n+vRT2/AaSe057UcfqQek4faVXPaXXNFs/P8Fw8fPjQ4nPxol3PYmL0/PLLHeLivMiSxfy1I91KEpb5ZbueWZ7q+nXoKpI6edOj65l5Hb9qXcTejK5nT+tZPiMSljm1u55p9WxlZfOKvZdfn8+I8Lj7tP/XM8l8z3J3wF08HD1M9puSrmfh4eFkyZIlVe+fpEXRm6xMGWjdGn75BT7/HDZvJnNmWLgQatbUsgwdCtWrQ6lS2fih0Q80/aUpU/6aQsMCDaniWyX5/QshhBDijWZtbW0yvlBy+VJKp7NGp7NOUZBRy/s8+83ovFavRF6taq0Ba7NAUXqVIePz6ky++Kd2XqV0PKuO07oMGZkXTIMaaZX3WfX8arzvX7/PiMR5TetZl2zetCpD+udN/feGi7UH2e3ycTfmSooHswbQoSNv5rx4Onua1H/iMiR1rX2ea3BKyc+tb7px48DWFrZsgW3bAKhRAwb/f8zq2Fho0wYePYIm/k3oUrwLCkXHNR0Jj05iGHghhBBCCCGEEEIY6XQ6Gnr1faFt+73bL8kgUUaQQNGbLl8++Phj7e/PPtM6UwJjx2oNjgAuXYI+fbS/Z9SdQR73PFwPu84nmz/JgAILIYQQQgghhBCvnxpZOmFv5YguhaEWK50VjraOdCzWMY1L9nwkUPQ2+OorcHWF48dh2TIA7Oy0Qa1dXLQsixbBkiXgYu/CwqYL0aFjwfEFrDm7JuPKLYQQQgghhBBCvCacbdz5Iu8qdOieGSyywgodOla3Xo27g3v6FDCFJFD0NsiaFb74Qvv7q6+Mc/nlywfz5j3N1quX1rqosm9lPqv4GQDdN3YnKCLpqRaFEEIIIYQQQgihKelah+F+m7C3yvT/gJFplzJDWibbTPze7ndq56udQSVNmgSK3haffALvvKPN2zdnjjG5XTvo1En7OyICPvgAYmJgVLVRFM1WlPuR9/lo/UfJjh4vhBBCCCGEEEIITUnXOvxc+BYf5ZxBNru8JuvyZs7LjLozuD3g9isZJAIJFL09HB1hzBjt77Fj4eFD46pZsyB/fu3vI0e0Rkf2NvYsabYEO2s7Nl3cxI9Hf8yAQgshhBBCCCGEEK8fZxt3Gnn147tCF1lc5B5jPA6xteY9Lva9SL93++Hm4JbRRUySBIreJh07QuHCEBoKEyYYk11ctPGKbG21x5Mna5OkFclWhPE1xgPw6ZZPufTwUgYUWgghhBBCCCGEeD3pdDpcbTzwsPbBzc7jlZrdLCkSKHqbWFvDxIna3zNnat3Q/q9UqaerQIsp3b0Ln5b/lKq+VXkc+5iOazoSp49L50ILIYQQQgghhBAivUig6G1Trx5Ur64NRDRsmMmqTz7RVgPcu6cFi1BWLGy6EBc7Fw7cOsCk/ZPSv8xCCCGEEEIIIYRIFxIoetvodDDp/8GeJUvg2DHjKisrWLAAsmfXHm/dCuPHg6+7L7PrzwZgxO4RHL1zNJ0LLYQQQgghhBBCiPQggaK3UenS2vRmSsHnn5us8vKCZcu0oBHAiBGwcyd0KNqB9wPeJ04fR4c1HXgS+yQDCi6EEEIIIYQQQoi0JIGit9W4cdro1du2aU2HEqheHUaN0v7W66FtWwgK0vFdw+/I5pSNM8FnGLpjaAYUWgghhBBCCCGEEGlJAkVvqzx5oHdv7e/PPtMiQgkMHQp16mh/372rNUByt8vKT41/AmDG3zPYcWVHepZYCCGEEEIIIYQQaUwCRW+zr74CNzc4cQKWLjVZZWUFixfDO+9oj//8E0aOhAYFGtCjVA8AOq/rTGhUaPqWWQghhBBCCCGEEGlGAkVvMw8PGDJE+/vLLyEqymS1pyf88gtYW2uPx42DP/6AKbWn4JfFj1vht+jze590LrQQQgghhBBCCCHSigSK3nb9+kHOnHDzJsyaZba6YkX4+uunjzt0gJC7zixuthgrnRVLTy5l5emV6VhgIYQQQgghhBBCpBUJFL3tMmWCMWO0v8ePh4f/Y+++w6OoujiOf2cTklBDgIQapEnvvdpAUJEigiJYsCugICiC6KuIAiogICqKCIjSe5GugDSlC9I7IhBKSEJJQrLz/nFNQqQlsCUJv8/z7JOdyezs2ZOEnT3ce+6ZKw7p3h2aNTP3T5+Gxx+Hanlr805909D6lbmvcDTyqKciFhERERERERE3UaFIzDChChXg7FlTLPoPy4KxY6FIEbO9Zg307An/u/t/VMtfjfDocJ6b/Ry2bXs0bBERERERERFxLRWKxDQh+vRTc/+LL+DgwSsOCQqCyZMhUyazPXgwzJyeiXGPjCPAN4BF+xbx1bqvPBeziIiIiIiIiLicCkViNGkCDRtCbKxZDe0qatSAzz9P2n72WbBPluGTRp8A8Nbit9h1apcnohURERERERERN1ChSAzLShpV9NNPsHHjVQ/r2NHMVAM4fx4eeQSeKtWZhkUbcjHuIk/NeIpL8Zc8FLSIiIiIiIiIuJIKRZKkalVo187c79EDrtJzyLJgxAioXNls794Nz3Zw8H3zMeQMyMm6f9bR77cr+xyJiIiIiIiISNqnQpEk99FH4OcHS5fCokVXPSRLFpg+3fQtApg1C8Z9WYivHjI9ivqu6MsfR//wVMQiIiIiIiIi4iIqFElyRYtC587mfo8eEB9/zcMmTDAjjADeew+Cjj7B4+UeJ96O56kZT3Hh0gUPBS0iIiIiIiIirqBCkVypd2/ImRP+/BN+/PGahzVpAn37mvu2bWat9Sj3FQWyF2D36d30WNzDM/GKiIiIiIiIiEuoUCRXypULevUy9999Fy5evOahvXpBixbmfng4PPdELkY8MAaAL9d9yYK9C9wcrIiIiIiIiIi4igpFcnWvvQahofD33/DFF9c8zOGAsWOhZEmzvWULTOp3P51qmOlrz816jtMXTnsiYhERERERERG5RWm6UBQfH897771H0aJFyZw5M8WLF6dv377YV1mNS1wsc2bT2BqgXz84fe1iT2AgzJgBWbOa7Z9+gkI7PqFU7lIcO3eMV+e9qp+ZiIiIiIiISDqQpgtFn3zyCV9//TXDhw9nx44dfPLJJ3z66ad8cZ0RLuJC7dtDpUoQEQEff3zdQ8uWhTFjkrZ798jCq/nG4WP5MGX7FMZvHe/eWEVERERERETklqXpQtHq1atp0aIFTZs2pUiRIrRu3ZrGjRvzxx9aet0jfHzg00/N/eHD4cCB6x7eurVZ/QzA6YT3X6hBx3L/A6DTz504EnHEndGKiIiIiIiIyC1K04WiunXrsnTpUnbv3g3Ali1bWLlyJQ8++KCXI7uNNG4MjRrBpUtmNbQb+OADaNnS3I+IgAW936Fa3ppExETQYVYHnLbTreGKiIiIiIiIyM3z9XYA19OzZ08iIyMpXbo0Pj4+xMfH8/HHH9O+fftrPiYmJoaYmJjE7cjISACcTidOp+uKFE6nE9u2XXrONGvAABzVq8OECTjfeAOqVbvu4WPHQv36Flu3WuzZ5Uu9+T+QuWYVfjnwC0PXDqVLrS4pfurbKs9epDy7n3LsGcqzZ7grz/q5iYiIiHhfmi4UTZ48mZ9++onx48dTrlw5Nm/eTNeuXSlQoADPPPPMVR/Tv39/+vTpc8X+kydPEh0d7bLYnE4nERER2LaNw5GmB2bduoIFCXz0UTJPm8alN94gfMoUsKzrPuS773x44IHchIc7WDW7FPWL92Nl4Bv0WtqLqjmrUiqoVIqe+rbKsxcpz+6nHHuG8uwZ7spzVFSUy84lIiIiIjfHstPwclShoaH07NmTTp06Je776KOP+PHHH9m5c+dVH3O1EUWhoaGEh4eTI0cOl8XmdDo5efIkwcHBt8eHkUOHsEqXxoqNxTl3LqRg+t/y5dC4sUVcnAXYlB/wINuiF1IlXxVWP7caPx+/G57jtsuzlyjP7qcce4by7BnuynNkZCRBQUFERES49D1bUi8yMpLAwECX/yxiY51MnhxGXFwIuXPrb9RdbNsJhAEhWJby7A7KsWcoz56hPLufbTs5ejSMatVCqFnTtTl2x3t2mh5RdOHChSsuQH18fK47NN3f3x9/f/8r9jscDpd/aLAsyy3nTZOKFoXXXoNBg3D07AkPPGCaXV/HvfeaHtivvAJgseuT0eR4szybjm+i74q+fNzw+iupJbit8uxFyrP7KceeoTx7hjvyrJ+ZiIiIiPel6SuyZs2a8fHHHzNv3jwOHjzIjBkzGDx4MI888oi3Q7s9vfMO5MwJ27bBuHEpesjLL0PHjub+pfD8WHO/AWDAqgGsPrLaTYGKiIiIiIiIyM1I04WiL774gtatW9OxY0fKlCnDm2++ycsvv0zfvn29HdrtKVeupJXP3n0XLl5M0cOGDIF77jH3I9a0JufhJ3HaTp6a8RTnYs+5JVQRERERERERSb00XSjKnj07Q4YM4dChQ1y8eJF9+/bx0Ucf4ed349424iadO0PhwnD0KAwdmqKHZMoEU6ZAsWJm++z4LwiICWV/+H66LezmxmBFREREREREJDXSdKFI0qCAAPj4395C/fvDqVMpeliePDBvnpm5RnROoieMBWDkxpHM3T3XPbGKiIiIiIiISKqoUCSp164dVK4MkZHw0Ucpfljp0jB9Ovj6AgfvhTVvAPD87Oc5ef6ke2IVERERERERkRRToUhSz+GATz8197/6CvbvT/FD770XRo78d2NpPwgrS9j5MF6a+xK2bbs+VhERERERERFJMRWK5Obcfz80bgyXLiU1uE6hDh3MAmrEBcD0HyE+EzN3zmTslrFuCVVEREREREREUkaFIrl5n3wClgUTJ8K6dal6aN++0KYNcLwK/NoHgNd+fp2DZw+6Pk4RERERERERSREViuTmVa4MTz5p7vfoAamYOuZwwNixULs2sKoHHK7LuUtRtJ/6NPHOeLeEKyIiIiIiIiLXp0KR3Jq+fcHfH5Ytg/nzU/XQzJlh1iwococPzPgBYrOy+uhvDFw12D2xioiIiIiIiMh1qVAkt+aOO+D11839Hj0gPnWjgUJCYN48CHQWhwVDAHhn6bv88fc6xm0ZR+sprWk1uxWtp7Rm3JZxRMdFu/gFiIiIZDxHjx7lySefJHfu3GTOnJkKFSqwfv16b4clIiIi6YAKRXLrevWCoCD46y8znyyVypaFGTMg07bnYVcznFYstUfV5umZTzNr1yzWHFvDrF2zeHrm0xQYVIA5u+a44UWIiIhkDOHh4dSrV49MmTIxf/58tm/fzqBBgwgKCvJ2aCIiIpIOqFAkty4oCN5919x/7z24cCHVp7j3Xhj3gwV/tQYbbJwAOO3kX89Gn6XFxBbM3jXbNbGLiIhkMJ988gmhoaGMHj2amjVrUrRoURo3bkzx4sW9HZqIiIikA77eDkAyiE6dYNgwOHQIhgyBd95J9SlaPBpN5p1duWhbwNUbY9vYWFh0mNmBf7r/Q4BvwK3FLSIiksHMnj2bJk2a0KZNG5YvX07BggXp2LEjL7744jUfExMTQ0xMTOJ2ZGQkAE6nE6fT6bLYzLlswJmaNTAklWw7Kc/iHsqxZyjPnqE8u19Cjm3biQvfVgFc+j6dQIUicQ1/f/j4Y7MK2oAB8OKLEBycqlNM+WsKFwkH6/rH2diER4czdftUnqz45C0ELSIikvHs37+fr7/+mm7duvHOO++wbt06Xn/9dfz8/HjmmWeu+pj+/fvTp0+fK/afPHmS6GjX9Qe8dMmJj08ElmWjge3u5AQiMB/8lGf3UI49Q3n2DOXZ/Zz4+0cQE2MTFubaHEdFRbn0fJDKQpHT6WT58uX89ttvHDp0iAsXLhAcHEyVKlVo1KgRoaGhLg9Q0pEnnoBBg2DTJvjoIxg6NFUPn7lrJg7LkTjN7HocloMZO2eoUCQiImmep6+fnE4n1atXp1+/fgBUqVKFbdu2MWLEiGsWinr16kW3bt0StyMjIwkNDSU4OJgcOXK4LLbYWCfx8RZxccHow4g7OTH/86Y8u49y7BnKs2coz+7nJCbGwt8/mJAQ1+Y4IMD1s2xSVCi6ePEigwYN4uuvv+bMmTNUrlyZAgUKkDlzZvbu3cvMmTN58cUXady4Mf/73/+oXbu2ywOVdMDhgM8+g0aN4Kuv4LXXoESJFD/89IXTKSoSgelZdObCmZuNVERExO28df2UP39+ypYtm2xfmTJlmDZt2jUf4+/vj7+//xX7HQ4HDofrLmjNqSzAgWXpw4h7Kc/upxx7hvLsGcqz+1lYlmvfVwGXnw9SWCgqWbIkderUYeTIkdx///1kypTpimMOHTrE+PHjadu2Lb17977uPHjJwBo2hCZNYOFC6N0bJk1K8UNzZ8mdqhFFubLkupVIRURE3Mpb10/16tVj165dyfbt3r2bO+6445bPLSIiIhlfikpPixYtYvLkyTz00ENXvcgBuOOOO+jVqxd79uzhvvvuc2mQks588glYFkyeDH/8keKHtSzVMlUjih4p/cjNRigiIuJ23rp+euONN1i7di39+vVj7969jB8/nm+//ZZOnTq55PwiIiKSsaWoUFSmTJnE+4cPH8a+yjIVtm1z+PBhMmXKpOVXb3eVKsHTT5v7PXqQ0mVN2pRrQ1BAENaNu1mT3S87rcu2vsVARURE3Mdb1081atRgxowZTJgwgfLly9O3b1+GDBlC+/btXXJ+ERERydhSPZmtaNGinDx58or9Z86coWjRoi4JSjKAvn3NSmjLl8O8eSl6SIBvAGNbjgW4drHIBiw4Hx3Lkn2/uChYERER9/L09dPDDz/M1q1biY6OZseOHWoJICIiIimW6kKRbdtY1pUf4s+dO+eWbtuSToWGQteu5v7bb0NcXIoe1qxUM2a2nUnOgJyA6UV0+VdicsKxSjgdMTSf0IwR679xbdwiIiJuoOsnERERSS9S1MwaSFwy1bIs3nvvPbJkyZL4vfj4eH7//XcqV67s8gAlHevZE0aOhO3bYcwYeOGFFD2seanm/NP9H6Zun8r0HdM5HnGcfIH5aFWmFT67WtO+nQ9205ewq4zh1XmvcOjsQT5u+HFSMUlERCSN0PWTiIiIpDcpLhRt2rQJMP8jtnXrVvz8/BK/5+fnR6VKlXjzzTddH6GkXzlzwrvvQrdu8P778MQTkDVrih4a4BvAkxWfpF35doSFhRESEmKW/asI5yPgxRe/h7NF4d73GbBqAAcjDjKmxRj8fa9c2ldERMRbdP0kIiIi6U2KC0W//vorAM8++yxDhw4lR44cbgtKMpCOHWHYMDh4EIYMgd69b/mUL7wAZ85YvP32/+DsHdD8BSZum8g/Uf8w8/GZBGUOuuXnEBERcQVdP4mIiEh6k+q5OqNHj9ZFjqScvz/062fuf/IJhIW55LQ9epgbW56Bn+ZDdA5WHFpBve/rcfDsQZc8h4iIiKvo+klERETSixQVil555RX+/vvvFJ1w0qRJ/PTTT7cUlGQwjz8O1apBVJRZDc1FBgyAF18E9jeC71dCZEF2nNpB7e9qs/6f9S57HhERkZuh6ycRERFJj1I09Sw4OJhy5cpRr149mjVrRvXq1SlQoAABAQGEh4ezfft2Vq5cycSJEylQoADffvutu+OW9MThgE8/hYYNYcQIeP11uPPOWz6tZcHXX8P58zB+fAX4bi3Wk005EfInd4+5m0mtJ/FwyYdd8AJERERST9dPIiIikh6laERR37592b17N/Xq1eOrr76idu3aFC5cmJCQEEqVKsXTTz/N/v37+fbbb1m7di0VK1Z0d9yS3tx3Hzz4IMTFuaRPUQIfHxg7Fh59FIgshD3qN3wONObCpQu0mNiCr9d97bLnEhERSQ1dP4mIiEh6ZNm2baf2QeHh4Rw+fJiLFy+SJ08eihcvjmVZ7ojvlkVGRhIYGEhERIRLewM4nc7kq3HJjW3dCpUqgW3D2rVQq9YNH5LSPMfGmmLR3LmA4xKZWr3CpfLfA9Cjbg/6N+qPw9LP6Vr0++x+yrFnKM+e4a48u+s9O63Q9RPExjqZPDmMuLgQcufW36i72LYTCANCsHT94xbKsWcoz56hPLufbTs5ejSMatVCqFnTtTl2x3t2ilc9u1xQUBBBQVpZSlKpQgXo0AFGj4a33oLly838MRfw84MpU6B5c1i8OBOXpn5HQFQRouv8j09Xf8qhiEOMaTmGAN8AlzyfiIhIaun6SURERNKDVJeyPvjgA5xO5xX7IyIieOKJJ1wSlGRgH34IAQHw228wZ45LTx0QADNnwl13AVhEL3yP7EvG4mv5MumvSTQe15gzF8+49DlFRERSQtdPIiIikl6kulA0atQo6tevz/79+xP3LVu2jAoVKrBv3z6XBicZUKFC0LWrud+zp+lZ5EJZspjpZwmz2qJWPk2OOQvIlikHvx3+jbqj6nIg/IBLn1NERORGdP0kIiIi6UWqp579+eefvPzyy1SuXJlBgwaxe/duhg4dyltvvUWfPn3cEaNkND17wsiRsGOHmYb24osuPX327LBggemfvWkTnFnfkJDoVWR7+kF2nd5F7VG1mfvEXGoUrOHS5xUREbkWXT+JGJYVj8NxyVXdB7zG9HS5BESrp4sbZbQ82zY4nZmwbR9vhyJyXakuFAUFBTF58mTeeecdXn75ZXx9fZk/fz4NGzZ0R3ySEQUGwnvvmZFF//sftGsHWbO69Cly5oRFi+Dee2HbNgjbVp68366ldOem7Dy7hXvG3sOERyfQvFRzlz6viIjI1ej6ScQmW7bjZMt2loyx1oANOIEoIJ1XvdK0jJdnpxPOncvJuXP5yCivSTKem2pm/cUXXzB06FCeeOIJNmzYwOuvv8748eOpVKmSq+OTjOrVV2HYMNi/HwYPNoUjF8uTB375BRo2NAuundhbEAb/Rv132rDy+EIemfQIwx4YRqeanVz+3CIiIv+l6ye5nWXLdpycOc+SJ08Ifn5Z0uyKfylnA3GYj1Pp/bWkZRkrz7ZtExt7gVOnwgA4dy6/lyMSubpUF4oeeOAB1q9fz9ixY2ndujUXL16kW7du1K5dmz59+tCjRw93xCkZjZ8ffPwxPPEEfPopvPwyhIS4/GmCg5OKRX/+CScOZ8fqO4c2/TsyZf93dJ7fmYNnD/LJ/Z/gyADDWUVEJG3S9ZPcziwrnmzZTJEoe/bc3g7HRTJWASPtynh59vfPDEBcXBjnz4doGpqkSan+ZBwfH8+ff/5J69atAcicOTNff/01U6dO5fPPP3d5gJKBPfYYVK8O586Z1dDcJGFkUcJ/2B4/mokVPb6lS4WPABi4ZiBPTHuC6Lhot8UgIiK3N10/ye3M4biEwwF+flm8HYpImuDnlwWHw/xtiKRFqS4ULV68mAIFClyxv2nTpmzdutUlQcltwuEwo4kAvvkGdu9221Plzg1Ll0Llymb7xHGLiR17M6DWODI5MjH5r8ncP+5+Tl847bYYRETk9qXrJ7mdJcwyS//TzURcI+FvQX8Skla5dK5Nnjx5XHk6uR3cey80bQpxcfDOO259qoRiUZUqZvvECRj8zJN802Ahgf6BrDy8krrf12V/+P7rn0hERMSFdP0kIlezcuUygoIsIiLOejuUdKFixSJ8/fWQWz7Pww/fQ69eXW/5PCLpmZqyiPcNGGBGF02bBmvWuPWpcuWCJUugalWzHRYGPR+/l+/rryI0Ryi7T++m9ne1+ePoH26NQ0REROR2d/EiRER47nbxYupjPHHiOD16vEblysXIm9efcuVCadu2GcuXL3VpLq5WnKhZsy47dx4jR45Alz6XGNcqxI0bN5133unrnaBE0oibWvVMxKXKl4cOHeD77+Gtt+C339w6DjOhWHT//bBhgykWvdC8HONmruW9HQ+z6fgm7hlzDxMenUCL0i3cFoeIiIjI7eriRfj5Z1PA8ZTAQHjoIcicOWXHHz58kAceqEdgYE4+/PAzypatwKVLl/jll4W89VZn/vjDvdNG/fz8yJs3n1ufQ64UFJTL2yGIeJ1GFEna0KePeddetQpmz3b70wUFmWJR7dpmOzwc2jYtQL87l/NAiQe4GHeRRyY9wvA/hrs9FhEREZHbTWysKRL5+5sCjrtv/v7m+WJjUx5j9+4dsSyLJUv+oHnzRylRoiRlypSjU6duLF6cNAr+yJHDtGvXgkKFslG4cA6effYxwsJOJH5/wIAPaNCgMhMnjqNixSIULhzIc8+1JSoqCoCOHTuwatVyRowYSlCQRVCQxeHDB68Y8TJ+/BjuuCMnS5cupFatMhQqlI3WrR/g+PFjic91tZFJ7du3pGPHDonbZ8+G88orT1OkSBAFCmShdesH2bdvzxXxXu7rr4dQsWKRxO2VK5fRsGFNChbMyh135KRJk3ocPnzomrn8++8jPPvsY9xxR06KFs1Fu3YtOHz4IAC//LKIfPkCrhjZ07NnF5o3b5i4PXv2NOrUKUfevP5UrFiE4cMHXfP5Dh8+SFCQxdatmxP3RUScJSjIYuXKZRw+fJBmze4FoEiRIIKCrMQc/TeHN8pXSn4uIunNTReKYmNj2bVrF3Fxca6MR25XhQrBG2+Y+2+/bXoWuVnOnLBoEdxzj9k+dw5aPZyd13LP4cWqL2Jj89r813hz0Zs4bafb4xERkYxP108iyQUEQJYs7r8FBKQurvDwMyxduoDnn+9E1qxZr/h+YGBOAJxOJ+3btyA8/Axz5y5n+vTFHDy4n+eeezzZ8QcP7uPnn2cyceJcJk6cy+rVyxkyZAAA/fsPpUaNOjzzzIvs3HmMnTuPUbBg6FXjunjxAsOHD2TEiHHMm7eCv/8+zHvvvZmq19axYwc2b17P+PGzWbhwDWDz2GMPcelSylbgiouLo337ltStezcrV/7JokVr6NDhpWs2K7906RKtWzchW7bs/PzzbyxYsIqsWU0xJTY2lrvvbkhgYE5mz56W+Jj4+HhmzJhEmzbtANi8eQPPPvsYrVq1ZdWqrfTs+QH9+r3H+PFjUvXaExQsGMoPP5jnW7duFzt3HqN//6FXPTYl+XLFz0UkLUl1oejChQs8//zzZMmShXLlynH48GEAXnvtNQYMGODyAOU20qOHWct+1y4YNcojT5k9uxn2/NBDZvviRWjZ3JcHLn1Dv/v6ATBozSDaTm1LdFy0R2ISEZGMR9dPIunL/v17sW2bkiVLX/e45cuXsn37VkaOHE/lytWoXr0WX3/9A6tWLWfjxnWJxzmdTr78cgxly5anbt0GPPbYU6xYYfocBQYG4ufnR+bMWcibNx958+bDx8fnqs936dIlBg8eQZUq1alUqSovvNA58TwpsW/fHubPn83Qod9Rt24DKlSoxLff/sSxY0eZN29mis4RFRVJZGQEDzzwMEWLFqdUqTI88cQzhIYWvurx06dPwul0MmzYd5QrV4FSpcrw5Zej+fvvw6xcuQwfHx9atWrL1KnjEx+zfPlSIiLO0rz5owB8+eXn3H13Q9566z1KlChJu3YdeOGFznzxxWcpfu2X8/HxSZxiFhwcQt68+QgMvLIXVErzdas/F5G0JtWFol69erFlyxaWLVtGwGWl+UaNGjFp0iSXBie3mcBAeO89c//9980QnyVLyHPXXWaemJtkzgwzZkDr1mb70iV47DGL0EO9+PGRH8nkyMSU7VNo9EMjTl847bY4REQk49L1k0j6Ytt2io7bvXsHBQuGUqhQ0gig0qXLEhiYk927dyTuK1y4CNmzZ0/czpcvPydPhqU6rixZslC0aPGbPs+uXTvw9fWlevVaifty5cpNiRKlksV7PUFBuWjXrgOPPtqEtm2bMWLE0OtOs9q2bQv79+8lNDQ7hQplo1ChbBQrlovo6GgOHNgHQJs27Vm5chnHjv0DwJQpP9G4cdPEkVu7d++gVq16yc5bu3Y99u3bQ3x8fIpff2qlNF+3+nMRSWtSXSiaOXMmw4cPp379+smGF5YrV459+/a5NDi5Db3yChQrZtauHzQIq3dvfPfswerdG1L4hn0z/PxgwgR45hmzHR8PTz8N59e2Z+GTCwn0D2TVkVXUGVWHfWf0ey4iIqmj6yeR9KV48TuxLIvdu3e65Hy+vpmSbVuWhdOZ+tYGVzvP5UUth8NxRZErpVPKUnOOL78czaJFa6hZsy7Tp0+iRo2SrFu39qrnO3/+HJUrV2PFis3JbuvX76Z1azO1rGrVGhQtWpzp0ydy8eJF5s2bQZs27VMV9+Usy3zMvfx1pDYPqXGjn4tIepPqQtHJkycJCQm5Yv/58+evOS/1VhQpUgTLsq64derUyeXPJWmAnx/072/u9++PtX49gPm6aJFbn9rX1yy81rGj2bZtePll2DDtXlY9t4rCgYXZc2YPdUbV4fe/f3drLCIikrF4+vpJRG5NUFAu7ruvCaNGfcn58+ev+H5C4+WSJctw9OgR/v77SOL3du7cTkTEWUqVKpvi5/Pz83PJyJg8eYI5cSJpdE98fDw7dmxL3C5VqgxxcXGsX590LXvmzGn27t2VGG/u3MGEhR1PVui4vCl0gooVq9CtWy8WLVpNmTLlk00du1ylSlXZt28PefKEUKxYiWS3y6d7tWnTnilTfmLBgjlYloPGjZsmfq9kyTL8/vuqZOddu3YVxYuXvOo0vTx5ggGSjXT672vIlMkvMUfXkpJ8iWREqS4UVa9enXnz5iVuJ1zcfPfdd9SpU8d1kf1r3bp1HDt2LPG2ePFiANq0aePy55I0ok0bqF4dYmKw//39sn18zLQ0N1fmHQ4YPty0S0rw1lvww6ByrHluLVXzV+XkhZPcO/ZeZu6c6dZYREQk4/D09ZOI3LqBA78kPj6eRo1qMnv2NPbt28OuXTv45pthNG5cF4B77mlE2bIVeOml9mzZspENG/7g1Vefpl69u6lSpXqKn6tw4SJs2PA7hw8f5PTpUzc12gigQYP7WLRoHgsXzmP37p107/5qstXEihe/k4ceakHXri+yZs1Ktm7dwksvPUn+/AV56KEWANSvfw+nTp1k6NBPOXBgHyNHfsmSJfMTz3Ho0AH69OnFH3+s4fDhQ/zyyyL27dtDyZJlrhpTmzbtyZ07D+3bt2D16t84dOgAK1cu4+23X+fo0b+THbdly0YGDfqYFi1a4+/vn/i9zp27sXz5Uj77rC979+5mwoSxfPfdcF577eoNozNnzkyNGrUZMmQAu3btYNWq5Xz88bvJjgkNvQPLsli4cC6nTp3k3LlzV5wnJfkSyYhSXSjq168f77zzDq+++ipxcXEMHTqUxo0bM3r0aD7++GOXBxgcHEy+fPkSb3PnzqV48eLcfffdLn8uSSMsCx57zNz9tzBkxcfDunVuH1WU8PQDBkDfvkn7Pv0U3u2an6VPLuehOx/iYtxFWk1qxbDfh7k9HhERSf88ff0kkl5ER8OFC+6/Rd/EmiRFihRj2bKN1K9/L+++2526dcvTqtX9LF++lEGDvgJM0fenn2aRM2cQTZvexSOPNKJIkWJ8/33qeo917vwmPj4+1K5dlhIlgvn778OpDxh48snnaNv2GV599Wkefvhu7rijGA0a3JvsmC+/HE2lStVo2/ZhmjSpA9hMnvwzmTKZ6VOlSpVh4MCv+O67L2nQoBIbN/5B585JBZnMmbOwZ89OnnnmUWrUKEnXri/xwgudePbZl68aU5YsWZg3bwWFChXm6adbUatWGV577XliYqLJnj1H4nHFipWgWrWa/PXXn1dMO6tUqSqjR09m+vSJ1K1bnn79/kevXh/Srl2Ha+biiy++Jy4ujnvvrUavXl3p3fujZN8vUKAgvXr1oU+fnpQsmZcePTpf9Tw3ypdIRmTZNzF5ct++fQwYMIAtW7Zw7tw5qlatyttvv02FChXcEWOi2NhYChQoQLdu3XjnnXeuekxMTAwxMTGJ25GRkYSGhhIeHk6OHDmu+pib4XQ6OXnyJMHBwTgcqa63yfXYNlbt2rB+PZcPxrd9fKBKFey1a001xwNGjIDOnS1s2zzfww/bjPvpEm8vf41vN34LQNdaXfns/s9wWOn390C/z+6nHHuG8uwZ7spzZGQkQUFBREREuPQ9O63w1vXTzYiMjCQwMNDlP4vYWCeTJ4cRFxdC7tz6G3UX23YCYUBIYq8Wb/L1jSYk5AChoUXx8zPN3C9eNCvPRkR4Lo7AQLPSbebMrjibDcQBvoCmj7pPxsxzbGw0R44cICysKHFxATd+gJultX8zMiLbdnL0aBjVqoVQs6Zrc+yO92zfm3lQ8eLFGTlypEsCSI2ZM2dy9uxZOnTocM1j+vfvT58+fa7Yf/LkSaJv5r8SrsHpdBIREYFt2/ow4mJ+v/5Krn97E13Oio+H9esJnzyZ2HvvvcojXa9VK/Dz86dTp5zExlrMnWvR5H4YM6YPwZmC+fj3jxny+xD2hO3hi/u+ILOvS648PE6/z+6nHHuG8uwZ7spzVFSUy86VFnnr+kkkLcqc2RRtYmM995x+fq4qEomIZGypLhT5+Phw7NixKxoynj59mpCQELcuTzhq1CgefPBBChQocM1jevXqRbdu3RK3E0YUBQcHu3xEkWVZ+l9rV7NtrMGDsX18TGHov9/28SFo8GDsxx7z2Kii556DIkVsWrWCqCiLP/7w47HH8jJ//oeUKVCG52Y/x7wD8ziz8AwzH59Jnix5PBKXK+n32f2UY89Qnj3DXXm+fNn4jMab108iaVXmzCrciIikRakuFF1rplpMTAx+fn63HNC1HDp0iCVLljB9+vTrHufv75+s8VkCh8Ph8g8NlmW55by3tYUL4SqjiRIkjCqyliyBJk08FlajRrBsGTz4IISFwbZtFvXrWyxa1J5FTxai5aSWrPl7DfVG12N++/mUyFXCY7G5in6f3U859gzl2TPckeeM/DPz1vWTiIiISGqluFA0bJhp2mtZFt999x3ZsmVL/F58fDwrVqygdOnSro/wX6NHjyYkJISmTZve+GBJn2zbrGzm4wM3+p/Vt9+Gxo09NqoIoGpVWLXKPO2BA3DoENSrB/Pm3c3q51bz4E8PsvfMXuqMqsPstrOpE6pVbEREbnfevn4SERERSa0UF4o+//xzwPyP2IgRI/Dx8Un8np+fH0WKFGHEiBGujxAzxH306NE888wz+PreVFslSQ8WLTIrm6XEli3w/ffw/PPujek/SpQwxaIHHoA//4RTp+Cee2DixDKsfWEtD49/mA3HNnDfD/fxU6ufaFWmlUfjExGRtMWb108iIiIiNyPFVZcDBw4AcO+99zJ9+nSCgoLcFtR/LVmyhMOHD/Pcc8957DnFwxJGEzkc4HSm7DEvvQR588LDD7s3tv/Inx+WL4eWLc3XixfhkUdg2LB8LHtxGU9Me4K5u+fSenJrPm/yOV1qd/FofCIiknZ48/pJJK1ImHl5E4sti2RICX8L+pOQtCrVzQB+/fVXj1/kNG7cGNu2KVmypEefVzwoNhYOH055kQjMsY8/bqo1HpYzp2mn9MQTSaF07gwfvJONaW1m8Gr1V7Gx6bqwK28seIN4p5qUiojczrxx/SSSVjidmXA6ITb2grdDEUkTYmMv4HSavw2RtOim5nH9/fffzJ49m8OHDxP7nzUtBw8e7JLA5Dbj72+mnZ08mWy30+nkzJkz5MqVC8eGDbBjB4SGmq8bN8KmTaap9aRJ0KKFx0P+8UcoUgT69zf7Bg2Cw4d9GTv2S4rkLMLbS95myO9DOBx5mB8f+ZHMmbS0h4jI7UrXT3K7sm0fzp3LyalTYQD4+WXB8mCfSfewgTjMx6n0/lrSsoyVZ9u2iY29wKlTYZw7lxPb9rnxg0S8INWFoqVLl9K8eXOKFSvGzp07KV++PAcPHsS2bapWreqOGOV2ERpqbpdzOokLC4OQEMiSBY4dM1POxo6FChWgYEGYOxdatYKRI81a9h7kcEC/fnDHHdCxoxlZNGUK/POPxaxZPSgcWJhnZj7D9B3TuS/qPma3nU1w1mCPxigiIt6n6ye53Z07lw+AuLgwMsYChzbgxEzQSP8FjLQr4+XZ6YRz53Im/k2IpEWpLhT16tWLN998kz59+pA9e3amTZtGSEgI7du354EHHnBHjCJGqVLQujVkzw5t2pjC0AsvQHAwjB5tGlufOgVvveXR1dAAXn7Z1LgeewzOnzcNr+vWhZ9/bsvipwrQcmJL1v69ljqj6jC//XzuzH2nR+MTERHv0vWTiMW5c/k5fz4Eh+OSpy/VXM62ncBpIDeWlSEqX2lSRsuzbZvpZhpJJGldqgtFO3bsYMKECebBvr5cvHiRbNmy8eGHH9KiRQteffVVlwcpAoCPD5Qvb+7nzQsPPghz5kC3bqZY9Omn8PbbEBZm7nv4v6seeghWrICmTeH4cdi9G2rXhhkz7mL186t58KcH2Re+jzqj6jD7idnUDa3r0fhERMR7dP0kYti2D/Hx6f9DsilgZAICMkQBI61SnkW8I9V/bVmzZk2cV58/f3727duX+L1Tp065LjKRG6la1RSO5syBnj3hs8/M/kGDzBS0S5e8EtLatVCmjNk+dQoaNoTf55Vm7fNrqV6gOqcvnua+sfcxbfs0j8cnIiLeoesnERERSS9SXSiqXbs2K1euBOChhx6ie/fufPzxxzz33HPUrl3b5QGKXJNlQbNmpnfRlCnwxhswZowZeTR2rOlbdMHzq2vccYeZetaokdmOjYUOHeDzj/Lyy1PLaFayGTHxMbSZ0obP13zu8fhERMTzdP0kIiIi6UWqC0WDBw+mVq1aAPTp04eGDRsyadIkihQpwqhRo1weoMh1+fubfkUnTsCSJfDMMzBjBgQEmCbXjRtDeLjHwwoKgp9/hstnEnzyCTzVNivjms6gY/WO2Nh0W9SNLvO7EO+M93iMIiLiObp+EhERkfQi1T2KihUrlng/a9asjBgxwqUBiaRagQJw//2wYIFZq75ZM1i8GB5+2Aztuftu870CBTwaVqZM8NVXULYsdOliVjiYNQvuvsuHWbOGUyRnEXos6cGwP4ZxOPIwP7X6iSyZsng0RhER8QxdP4mIiEh6keoRRcWKFeP06dNX7D979myyiyARj6pVy6yKNnMmRERA/fqms3S+fLB1K9SrB3v2eCW0zp1h/nwIDDTbW7ZArVoWDXzeYuKjE/Hz8WPmzpncN/Y+ws6HeSVGERFxL10/iYiISHqR6kLRwYMHiY+/cppMTEwMR48edUlQIqlmWdCyJfj5wbRpZvhOxYqwejWUKAEHD5ri0caNXgmvcWNYswaKFzfbJ07APfdA/J+Ps+SpJQQFBPH70d+pM6oOu0/v9kqMIiLiPrp+EhERkfQixVPPZs+enXh/4cKFBCYMjwDi4+NZunQpRYoUcWlwIqmSOTM8+qhpaL1sGdx3HxQtCitXwoMPwqZNpjozaxbce6/HwytTBn7/3YS4fDnExED79vDWWw1Y+eYaHp7wIPvD91NnVB1mt51NvcL1PB6jiIi4lq6fREREJL1JcaGoZcuWAFiWxTPPPJPse5kyZaJIkSIMGjTIpcGJpFrhwqZAtHSpWX6seHHImxd+/dWMOFq2DB54ACZMMKuieVju3LBokZmONnKk2ffZZ7B5cynmjVrDMwuase6fdTT8oSE/tvqR1mVbezxGERFxHV0/iYiISHqT4qlnTqcTp9NJ4cKFCQsLS9x2Op3ExMSwa9cuHn74YXfGKpIy9epBsWIwfTqcO2f2BQaaRkGPPGLWq2/TJqlS42F+fvDNNzB8OPj+W6pdvBia3p2XYVV/pXmp5sTEx/DYlMcYvGYwtm17JU4REbl1un4SERGR9CbVPYoOHDhAnjx53BGLiGtYlikIWZYpFjmdZn9AAEyeDC+8YPa99BL06wdeKMRYFnTqZAY+hYSYfQcOwH0NsvK4PZ1ONTphY9N9UXden/868c4r+1qIiEj6oesnERERSS9SXChas2YNc+fOTbbvhx9+oGjRooSEhPDSSy8RExPj8gBFbkq2bKYZ0IEDpkdRAl9f+PZb6NXLbPfuDd26JRWTPOyuu2D9eqhe3WxfvAjt2/mQedkXfNpwIADD1w3n0cmPcuHSBa/EKCIiN0/XTyIiIpLepLhQ9OGHH/LXX38lbm/dupXnn3+eRo0a0bNnT+bMmUP//v3dEqTITSla1FRifv0VDh1K2m9ZZiTR4MFme8gQeOYZuHTJK2GGhsJvv0GHDkn7Bn5msahPd0Y9MBl/H39m7ZrFvWPvJex8mFdiFBGRm6PrJxEREUlvUlwo2rx5Mw0bNkzcnjhxIrVq1WLkyJF069aNYcOGMXnyZLcEKXLT7r7bNLieNg0u/GdEzhtvwLhxZpTRjz9CixZw/rxXwgwIgO+/hy++SOpbtGQJ9G3bhi9rLSVX5lz8cfQPan9Xm12ndnklRhERSb20cP00YMAALMuia9eubn0eERERyRhSXCgKDw8nb968idvLly/nwQcfTNyuUaMGR44ccW10IrfK4TBT0OLiYObMK/sRPfkkzJoFmTObZtf33w9nznglVMsyq6EtXQrBwWbfwYPQ8eF6dMm6hmJBxThw9gB1v6/LysMrr3suERFJG7x9/bRu3Tq++eYbKlas6LbnEBERkYwlxYWivHnzcuDAAQBiY2PZuHEjtWvXTvx+VFQUmTJlcn2EIrcqRw7T3Hr3bli79srvP/SQGb6TMyesWWOmqx096vEwE9x1F2zYADVrmu3YWHi/c0mqblxDjfy1OHPxDI1+aMSUv6Z4LUYREUkZb14/nTt3jvbt2zNy5EiCgoLc8hwiIiKS8fim9MCHHnqInj178sknnzBz5kyyZMlCgwYNEr//559/Urx4cbcEKXLL7rwT6tY1BaHChaFgweTfr1vXNApq0gT++stsL1oEpUp5JdyEvkVvvmmmowFMHRtCmU2/0PC1diw9OovHpj7GZxGf0b1OdyzL8kqcIiJyfd68furUqRNNmzalUaNGfPTRR9c9NiYmJllT7cjISACcTidOFy74YM5lA05vLDp627DtpDyLeyjHnqE8e4by7H4JObZtp8vXUXLl+3SCFBeK+vbtS6tWrbj77rvJli0bY8eOxc/PL/H733//PY0bN3Z5gCIu07AhHD4MU6fCyy+bxkCXK18eVq2Cxo1hzx6oX99MR0tYkszD/Pxg2DCoVw9eeAHOnYMdf2bh0BvTaPLRGyw8+wVvLX6Lg2cPMvSBofg4fLwSp4iIXJu3rp8mTpzIxo0bWbduXYqO79+/P3369Lli/8mTJ4mOjnZZXJcuOfHxicCybFIxsF1SzQlEYD74Kc/uoRx7hvLsGcqz+znx948gJsYmLMy1OY6KinLp+QAs207d/+dERESQLVs2fHySfyg9c+YM2bJlS3bxkxZERkYSGBhIREQEOXLkcNl5nU4nYWFhhISE4HDoj8ldXJ7ns2dhxAgoVgzatDGNgf4rLAwefBA2boRs2Uxvo8sakXrDrl2m1VLSwjk29d4cwups3bGxaVayGRMenUBWv6w3dX79PrufcuwZyrNnuCvP7nrPTgs8ef105MgRqlevzuLFixN7E91zzz1UrlyZIUOGXPUxVxtRFBoaSnh4uEt/FrGxTqZOPUlcXDC5culv1F3M/1yfBIKxLOXZHZRjz1CePUN5dj/bdvLPPyepWjWYGjVcm+PIyEiCgoJcev2U4hFFCQIDA6+6P1euXLccjIjb5cwJzZvD5MmmEdDVRguFhMCvv5q+Rr/8YnoY/fQTtG7t8XATlCoFv/8Or75qFmoDi1UD36B4s1CO1HiSObvncM/Ye5j7xFzyZst7g7OJiIinefL6acOGDYSFhVG1atXEffHx8axYsYLhw4cTExNzRcHK398ff3//K87lcDhcWgw0p7IAhz6MuJ3y7H7KsWcoz56hPLufhWW59n0VcMt/juq3QG4/ZcuaTtELFsDx41c/JkcO+PlnUxyKjYXHHjMjkbwoa1YYOxa+/RYSruX3zWmN70+/kN0nN+v/WU/tUbXZeWqnV+MUERHvatiwIVu3bmXz5s2Jt+rVq9O+fXs2b958RZFIRERE5HIqFMntqXFjyJMHpkwxhaCr8feHiRNNPyPbNsN5+vbFm903LQtefNEszpbQ+/TCrrpEfb6G7JeKc/DsQeqOqstvh37zWowiIuJd2bNnp3z58sluWbNmJXfu3JQvX97b4YmIiEgap0KR3J58fU2PoqgomDfv2sf5+MDXX8N775nt//0PunTB5a3qU6lKFdNCqV27f3ecuZOoz9cQcKoW4dHhNBrXiEnbJnk1RhEREREREUl/VCiS21fu3PDww7BlC2zefO3jLAs+/BCGDjXbX3wBTz557ZFIHpIjB/z4I4weDVmyABeCif7mFxy7HiE2Ppa209ry6apPSWW/ehERyYCWLVt2zUbWIiIiIpdToUhubxUrmuE58+bByZPXP/b1101Ta19fmDDBNMU+f94zcV6DZUGHDmZ0UeXKwKUsOCdOgbVdAHh7ydt0+rkTcc44b4YpIiIiIiIi6YQKRSIPPmhWQ5syBS5duv6x7drBnDlmCM/ChdCwIZw+7ZEwr6dUKVi71tSysH1gwRBY8DnYFl+v/5pHJj3C+VjvFrVEREREREQk7VOhSMTPz/QrOnPGFH9u5IEHYOlSyJXLrFnfoAEcOeL+OG/A39/Mjps928yqY21XmDwVLgUwd/dc7hp9N8fPXWOVNxERERERERFUKBIxQkLgoYdg/XrYtu3Gx9euDb/9BoUKwY4dUK8e7Ewby9I3a2baLt17L7CjFYz9Bc7nYePxDVT7ujY7Tu7wdogiIiIiIiKSRqlQJJKgShUoX95MLTtz5sbHly0Lq1aZeV9HjkD9+vDHH+6PMwUKFoQlS2DgQPALqwOj1sDpEvxz4RDVvqrLsgMrAIiOi2bclnG0ntKaVrNb0XpKa8ZtGUd0XLSXX4GIiIiIiIh4gwpFIgksywzHyZoVpk6FuBQ0gC5cGFauhBo1TK+i++6DxYvdH2sKOBzQvbsZJFWhYAlTLDpSh4uc5b4x9/PS1DcpMKgAT898mlm7ZrHm2Bpm7ZrF0zOfpsCgAszZNcfbL0FEREREREQ8TIUikcv5+5t+RSdOmCE5KZEnj+lZ1KiRWQWtaVOYNMm9caZChQqwbh282TEP/LAUtrfCdsQy8q9BhEeHA+C0ncm+no0+S4uJLZi9a7bX4hYRERERERHPU6FI5L/y54fGjc0yYnv2pOwx2bPD3Lnw2GNm5bQnnoCvvnJvnKng7w+ffQa/LspMwXU/QJz/dY+3sQHoMLODpqGJiIiIiIjcRlQoErmamjVNw+rY2JQ/xt8fxo+Hjh3BtqFTJ/jgA3M/jbjnHnj3p+ngG3PDY21swqPDmbp9qvsDExERERERkTRBhSKRq7EseOABKFcudY/z8YHhw02BCKBPH+jcGeLjXR7izVp8ZCYOK2V/+g7LwYydM9wckYiIiIiIiKQVKhSJuJplwfvvm4KRZZkpaO3aQcyNR/F4wukLpxN7Ed2I03Zy5kIKVoATERERERGRDEGFIhF36dQJJkyATJlg8mR4+GE4d87bUZE7S+4UjyiysMiVOZebIxIREREREZG0QoUiEXd6/HHT5DprVrOK2n33walTXg2pZamWKR5RZGOzdv921hxZ6+aoREREREREJC3w9XYAIi7z/PNm9I6fX9K+bt2gSBHPxvHPP/D55xAZaQpEXbvCL7/AQw+Zderr14dFi6BwYTh4EL75Bs6eNY996imoWzfpXLYN774L+/bBxIkuCa9NuTZ0WdCFs9FnE1c3u6p/v/VP7E7qfl+HxoVbMLjpx5QLSWXfJhEREREREUk3VCiSjKVHDyhW7PrHxMebptPX2r6ehBXMLOvax3z5pWmE3bAhrFoFQ4bA4MHw22/QpAns2mWKQXPnmj5G3bpB2bLgdEJUVPJzzZoF+fKZQpGLBPgGMLblWFpMbIGFddVikdkPzPoOQtdAle9ZdHgWFb6ezZMVnuLD+/pQJGcRl8UkIiIiIiIiaYOmnsntoVkz+OknU5QZO9YUb4YOhZ49TS8hgOnTzdL2nTvDwIFw/rzZP3489O8P//ufOfbMdZo7R0TAnj1mHXowBaGTJ+HYMShTxhSOypSBo0fhrrsgIMAUiQAcDggMTDrX4cOwdi20bu3qbNCsVDNmtp1JzoCc5qn/7VmU8DVnQE5mtZ3F5HeeI+8fI+Grv+Cv1tjYjNv6A3cOK0mX+V0IOx/m8thERERERETEezSiSDKWTz9NPvVs4MCkbR8fM7IHTKFo715zfObMsGEDLF5sjs+a1Yz0GTvWFI4Adu40haWcOc32Bx9A+/Zw553Jn//kSciVK2mEkmVBcLDZnz8/hIaakUVNm8Lvv8O335riUlCQmSL3/POmWBQXB198Aa+/nvLRTqnUvFRz/un+D1O3T2X6jukcjzhOvsB8tCrTitZlWxPgGwCloVEjePvt0owcOQVWrYeG7xBXfDHD/hjGqE2j6FanG93rdCcwIPDGTyoiIiIiIiJpWpofUXT06FGefPJJcufOTebMmalQoQLr16/3dliSVvXoAcOGJd0uLxo1apT82Pr1TZEIYPNmaNDAFInA9BPavDnp2GrVkopEYApF/y0SpVTu3KaxddmycOmSWRmtdm2z/6uvzDETJpjRSKGhN/ccKRTgG8CTFZ9kapupTG8+naltpvJkxSdNkehfQUGmnrV8OZTKXh3GLYKxS+BoDc5fOk/fFX0pPqw4g9cMJjou2q3xioiIiIiIiHul6UJReHg49erVI1OmTMyfP5/t27czaNAggoKCvB2apEcJRaEEAQFXPy4lj72W4GAzNS0+3mzbthlNFByc/Lhs2UyxqUoVc2z79qax9a5d5vvbtsGcOWaEUY8ecOGCuR8RkfKYXeyuu0zt7H//g0x/N4SRv8OkaXCyNKcvnqb7ou7c+cWdjNo4ijhnnNfiFBERERERkZuXpgtFn3zyCaGhoYwePZqaNWtStGhRGjduTPHixb0dmmQ0lSvDypWmIAOwYIEp4qRWYCAULw7Llpnt1ashTx4z7ey/7rkHataEl1822x98AAcOmOLSJ5/A99/DqFFmelyWLOZ+oHendwUEQJ8+pmBUt64FO1rB11th5vdYUaH8Hfk3L8x5gfJflWfa9mnY9nVWVRMREREREZE0J033KJo9ezZNmjShTZs2LF++nIIFC9KxY0defPHFaz4mJiaGmJiYxO3IyEgAnE4nTqfTZbE5nU5s23bpOeVKqcmzlVBguWy6mf3881CxIpZtYzudZmUxMMWYy7erVIH77sPq3h0cDuwiReCVV5KOufxYwOrTB7tdu6tPP+vYEWvIEJg0CbJkwX799aTHfvGFKQ7VqmWmmrVpgzVtGtSsifXHH7BiBfYrr2APH57Um8jpTIrfTVL7+1y6tJmK9s030Lu3DxGbn8Xe9gRU/xrfe/ux6/QuWk9pTfX81fn4vo9pVKzRjU+awenfDM9Qnj3DXXnWz01ERETE+yw7Df+Xf8C/U4O6detGmzZtWLduHV26dGHEiBE888wzV33MBx98QJ8+fa7Yv3v3brJnz+6y2JxOJxEREQQGBuJwpOmBWena7ZTnzGPHkqNXLyzbJvrhhzk7fDj4+3vkuW8lzydPOujbNztTpvw7Pc8/EuoMwveuwcQ5zgHQoGAD3qn5DpVDKrs48vTjdvpd9ibl2TPcleeoqChKlixJREQEOXLkcNl5JfUiIyMJDAx0+c8iNtbJ5MlhxMWFkDu3/kbdxbadQBgQgmUpz+6gHHuG8uwZyrP72baTo0fDqFYthJo1XZtjd7xnp+lCkZ+fH9WrV2f16tWJ+15//XXWrVvHmjVrrvqYq40oCg0NJTw83KUXOk6nk5MnTxIcHKwPI2502+V56lSsp57Cio3Fvu8+7OnTwYUFzmtxRZ5/+w06d7bYts0yO7KGEXD/x8RVHkEcsQA8UvoR+t7TlzLBZVwVerpx2/0ue4ny7BnuynNkZCRBQUEqFKUBKhSlb/rQ537KsWcoz56hPLtfeisUpempZ/nz56ds2bLJ9pUpU4Zp06Zd8zH+/v74X2UUhsPhcPmHBsuy3HJeSe62yvNjj5kpaS1bYv3yC1bDhvDzzxAS4vanvtU83303bNwIw4ebhtfnzoUQPXMo/NqN4DYfcLrQD8zYOYNZu2bxTKVn+OCeDygcWNjFryJtu61+l71IefYMd+RZPzMRERER70vTV2T16tVjV8IqUP/avXs3d9xxh5ciEvGAhg3h119NE+wNG6BBA7MiWjqQKRO88YZZvK1t2393RtzBye9G4/zqT+640BKn7WT05tHc+cWddFvYjVMXTnk1ZhEREREREUmSpgtFb7zxBmvXrqVfv37s3buX8ePH8+2339KpUydvhybiXtWrw6pVULgw7N4N9erBtm3ejirFChSACRNgyRLT+BqAsHIc+nQGWcavoZjjHmLjY/l87ecUG1qMD5d/SFRMlFdjFhERERERkTReKKpRowYzZsxgwoQJlC9fnr59+zJkyBDat2/v7dBE3K9kSVi9GsqVg3/+gbvuMtvpSMOGsGULDBwICdNlL+yuzf7//UL+JQsplrkqUbFRvL/sfYoPK87QtUOJiYu5/klFRERERETEbdJ0oQjg4YcfZuvWrURHR7Njxw5efPFFb4ck4jkFC8KKFVCnDoSHQ6NGpmdROuLnB927w5498NJLYFkAFsdWNmZ/z3WU3zGZO7LdyckLJ+m6sCslh5dkzOYxxDvjvR26iIiIiIjIbSfNF4pEbnu5cpk5XA8+CBcvQvPm8OOP3o4q1UJC4JtvYNMmuOeef3faDrZNasPhnn9xd+S35MtagMMRh3l21rNUHFGRmTtnkoYXZhQREREREclwVCgSSQ+yZIFZs+DJJyE+Hp56CoYM8XZUN6VSJfjlF5g6FYoUMfvsuEwsH/wi5/vv5UGfzwgKCGL7ye08MukR6oyqw68HfvVqzCIiIiIiIrcLFYpE0otMmWDsWOja1Wy/8Qa88w6kwxE3lgWPPgo7dkC/fpA1q9kfdSYz8997k8zf7ufhHL3JkikLvx/9nft+uI8mPzZhwz8bvBu4iIiIiIhIBqdCkUh64nDA4MGmugLQv79p/BMX5924blJAAPTqZfoXvfCCeXkA/+zPydxuH1F4xj6a5e1MJkcmFu1bRPWR1Xl86uPsPr3bu4GLiIiIiIhkUCoUiaQ3lmWqK99+ayor330Hjz0G0dHejuym5c8PI0fCn3/Cww8n7d+5Ph9zXv2CGmt30bTQU1hYTP5rMmW/LMtLc17i78i/vRe0iIiIiIhIBqRCkUh69eKLMGWKWVZsxgzT7Doy0ttR3ZJy5WDOHPj1V6hePWn/6p+LMu+FH3jw0BYaFmpGvB3PyI0jufOLO+mxuAenL5z2XtAiIiIiIiIZiApFIulZq1awYAFkzw7LlpnlxE6c8HZUt+yee+D332HiRChaNGn/z6MrsOLV2TwSvpJa+RoQHRfNZ6s/o9iwYny84mPOxZ7zWswiIiIiIiIZgQpFIundvfeaIlFwsFl7vn59OHDA21HdMocDHn/cNLz+/HPIlcvsv3QJZgytx+auy2l18WfK5a5EZEwk7/76LiWGlWD4H8OJjY/1bvAiIiIiIiLplApFIhlB1aqwapVZb37vXqhb1zT8yQD8/c1Cb/v2Qe/eSSukxURbTP/kQQ69s5FWzvEUDSzOifMneG3+a5QeXpof//yReGe8V2MXERERERFJb1QoEsko7rzTFIvKl4fjx+Guu2DlSm9H5TI5c8JHH8H+/fDGG6aABHAuysH0D5/g7Mc7aOn7NXmz5uPA2QM8NeMpqnxThbm752LbtldjFxERERERSS9UKBLJSAoUgBUroF49iIiA++833aEzkJAQGDzYDJx6+WXw9TX7w09lYua7r2AP2UezzAPI6Z+TrWFbaTahGfVH1+e3Q795N3AREREREZF0QIUikYwmKAgWLYKmTSE6Gh55BMaO9XZULleoEIwYATt3wlNPgWWZ/WFHszDn7bfxH7Gfhv49yeybmdVHVnPXmLtoOr4pm49v9mrcIiIiIiIiaZkKRSIZUZYsMGMGPP00xMdDhw4waJC3o3KL4sXhhx9g2zZ49NGk/ScOBbG0V3+yjtpLHd9X8HX48vOen6nyTRXaTWvH3jN7vRe0iIiIiIhIGqVCkUhGlSkTjB4N3bub7TffhLffhgzar6dsWZg6FTZuNIOoEpw6UIA1735N9rE7qOR4AoAJ2yZQ5ssyvDr3Vf6J+sdLEYuIiIiIiKQ9KhSJZGQOBwwcCJ98YrY//RSefx7i4rwblxtVqQLTp8OWLdCmTdKUtPB9Jdjyv/EETthESesh4pxxjNgwghLDStBrSS/CL4Z7N3AREREREZE0QIUikdtBjx4wapQpHI0ebeZoXbyY9P0lS8hz112wZIn3YnSxihVh8mTYuhWeeCKpYBSxqzK7359HtinLKWTX5WLcRQasGkCxYcX4ZOUnXLh0wbuBi4iIiIiIeJEKRSK3i+eeM0Nt/P1h9mx44AGzMpptY/Xuje+ePVi9e2e4qWnlysH48bB9u2l67fj3X71zf93F331W4jd1NrnjynM2+iw9l/akxLASjFg/gkvxl7wbuIiIiIiIiBeoUCRyO2nRAhYuhBw5YMUKuPtumDgRa/16APN10SIvB+kepUubptc7d5re3r6+ABax25px+uPNWDPHkfVSEY6dO8ar816lzJdlmLB1Ak7b6eXIRUREREREPEeFIpHbzd13w/LlkDevaeTToQP2v8NsbB8feO+9DDeq6HJ33mlm3+3bB126mAXisH2wNz/J+QG74Ocv8LsUwr7wfbSb3o6q31Rl/p752Bk4JyIiIiIiIglUKBK5HVWuDKtWQb58EBuL5TSjZqz4eFi3LsOOKrpc4cIwZAgcPgwffAC5cgHxfvBHZ2I/2wdLP8LnUg62nNjCQ+Mf4u4xd7Pq8CovRy0iIiIiIuJeKhSJ3K6KFTOjiv7L4YB3383Qo4oulzs3vP++KRgNHQqhoUBsNvitN/GD98OqN7HiAvjt8G/UH12f5hOas/XEVm+HLSIiIiIi4hYqFIncrhYtMlPP/svphPXr4bXX4MLtswJY1qzw+utmStrYsaYJNhdzw+LPsIftgQ0vgtOHObvnUGlEJZ6a8RT7w/d7O2wRERERERGXUqFI5HZk26YXkY/PtY/58kszP6tPHzh1ynOxeVmmTPD00/Dnn2ZxuLvvBiILwZxv4cvtsO0xbGx+/PNHSn1Rmtd+fo0T5054O2wRERERERGXUKFI5Ha0aJHpRRQff/3jTp82DXwKF4ZOncxwm9uEwwHNmsGyZbBhAzz1FGSKLAlTJ8E362FvY+LsSwxfN5winxfnnSXvEhEdccV5ouOiGbdlHK2ntKbV7Fa0ntKacVvGER0X7fkXJSIiIiIicgMqFIncblIymghMpeSOO6BKFbh4Eb76CkqWhMceM0Wm20jVqvDDD3DwILzzDuSKqQY/LoSxS+HvmkQ7z9N/1ccU+KQYHy4ZyMVLFwGYvWs2BQYV4OmZTzNr1yzWHFvDrF2zeHrm0xQYVIA5u+Z494WJSIbUv39/atSoQfbs2QkJCaFly5bs2rXL22GJiIhIOqFCkcjtJqWjiZxOOHTIDKtZuBAeeMDsmzIFataEe+6BefPMvttEgQLw8cdw5AiMGAGl/O6D79bCxBlwsgwXOMP7q94iz4d38tiYTrSc2JKz0WcBcNrOZF/PRp+lxcQWzN4121svR0QyqOXLl9OpUyfWrl3L4sWLuXTpEo0bN+b8+fPeDk1ERETSARWKRG4nCaOJHKn40x87FsqXh/nzTfPrp54CX19YvhwefhgqVoQxYyA21m1hpzVZssDLL8P27TBvnkWjQi3hq60wczScLcwF36NMOfQVtm1jc/XV4xL2d5jZQdPQRMSlFixYQIcOHShXrhyVKlVizJgxHD58mA0bNng7NBEREUkHVCgSuZ3Expp14FMzCig62qwhD6Yo9MMPsH8/dO8O2bPDX3/Bs89C0aLw6acQcWWfnozK4YCHHoLFi2HHdh9ea9CB7GN3w5b25gDr+o+3sQmPDmfq9qnuD1ZEblsR//67nCtXLi9HIiIiIumBr7cDEBEP8vc3085Onky22+l0cubMGXLlyoVjyRK4dMmsD79wITRtah53udBQGDgQ3n0Xvv0WhgyBf/6Bt9+Gjz4yw226dIFChTz32rysdGkYNgz69fOnwfCLbI52gHXjgpzDcjBj5wyerPikB6IUkduN0+mka9eu1KtXj/Lly1/zuJiYGGJiYhK3IyMjEx/vdOEUY3MuG3BiX33ApbiAbSflWdxDOfYM5dkzlGf3S8ixbTtd3rnDle/TCVQoErndhIaa2+WcTuLCwiAkxDTiOXfONK7OksUUlqpXN/v/K2dO6NHDFIXGjzfFo+3bzdchQ6BdO3jzTahQwROvLE3Ilg0C852GQyn7B9tpOzlz4YyboxKR21WnTp3Ytm0bK1euvO5x/fv3p0+fPlfsP3nyJNHRrpsee+mSEx+fCCzLRgPb3ckJRGA++CnP7qEce4by7BnKs/s58fePICbGJizMtTmOiopy6flAhSIR+a/LC0L332+mqk2ZYkYJBQRc/TH+/mb62TPPmF5Gn31mehj98IO5PfggvPWWaYBt3WA+VgaQO0tuHJYjsXH1ddmwau9Wnv9uML1atqJEniJuj09Ebg+dO3dm7ty5rFixgkI3GOHZq1cvunXrlrgdGRlJaGgowcHB5MiRw2UxxcY6iY+3iIsLRh9G3MmJmf+sPLuPcuwZyrNnKM/u5yQmxsLfP5iQENfmOOBan9FugQpFInJtPj7QujV88w3MmWPuX6/Q43CYqWpNm8Iff5iC0fTppng0fz5Uq2YKRo8+ahpiZ1AtS7Vk+o7pKTvYgkuZTvP90e58/2V38tlVaV+lFS/Ue5TSeUq7N1ARyZBs2+a1115jxowZLFu2jKJFi97wMf7+/vj/d5ox4HA4cKRmAYQbMKeyAAeWpQ8j7qU8u59y7BnKs2coz+5nYVmufV8FXH4+ULlQRG4kVy5o3tw0rd64MeWPq1nTjETavRs6doTMmWHDBmjb1kxrGz4cMuhSzW3KtSEoIAjrht2sLayYQFgwCA7eDU4Hx62NDNr8LmW+LEOh/mXpuehdNh3bhK1mHiKSQp06deLHH39k/PjxZM+enePHj3P8+HEuXrzo7dBEREQkHVChSERurFw506do/nw4cSJ1jy1eHL78Eg4dgvffNyuoHTgAr70GhQvD//4HYWHuidtLAnwDGNtyLMA1i0UWFpYF058ax9x3u9EifBmOz4/B7JGw5wGIz8TR2B18suZjqn5blYKfFqPbwu6sPrI6ZVPaROS29fXXXxMREcE999xD/vz5E2+TJk3ydmgiIiKSDqhQJCIp06SJKfJMmQKxsal/fHAwfPCB6Xn05ZdQrBicOQN9+8Idd8Arr8CePS4P21ualWrGzLYzyRmQEzCrm13+NWdATma1nUXLMs1o2hRmzoS/d4XQr/ULFFszHz4Lg2k/wo5H4FJmjkUf5PO1g6n3fT3yf1aIjvM6snT/Ui7FX/LSKxSRtMq27aveOnTo4O3QREREJB1QoUhEUiZTJmjTBiIi4Oefb/48WbKYqWi7d8PkyVCjBkRHmz5IpUqZ/kVr17oubi9qXqo5/3T/h3GPjKNFqRbUyV+HFqVaMO6RcfzT/R+alWqW7Pj8+aFXL1MvWzovJ09XaU+WOdPh05MwaRr82Q6icxB28Rhfr/+aRuMakW9gPp6d9Sxzd88lOs51KxOJiIiIiMjtSYUiEUm5PHlMo+rNm2HLlls7l4+PKTz9/jssW2bOa9um+XWdOtCggWmg7Uzf06wCfAN4suKTTG0zlenNpzO1zVSerPgkAb7XXp3A4YD77oOxY+H4cRj9bVbuydsKpv9kRhr9+DNseAHO5+FM9BnGbB5DswnNCP4smLZT2zLlrymciz3nwVcpIiIiIiIZhQpFIpI6lStDpUowbx6cOnXr57MsuPtumDsXtm2DZ581o5dWrjRNtMuVg1GjICbm1p8rHcqeHTp0gF9/Na2d+vzPn+L2gzBnJAw6BmN+hd9fg8iCnIs9x6S/JvHY1MfI82keWkxswQ9bfiD8Yri3X4aIiIiIiKQTKhSJSOo1bQo5cph+RZdc2COnXDn4/ntTEenRwzzHzp3wwgtQpAgMGABnz7ru+dKZIkVM7+89e+C33+CF53zJceYemD8MPj8MI9fCyh5wpjgx8THM3jWbZ2Y+Q8jAEBqPa8w367/h+Lnj3n4ZIiIiIiKShqlQJCKp5+dnpo2dPg2LFrn+/AULwiefwJEjMHCg2T5+3DTwCQ2Fbt1MU+zblGVB/fowciQcOwYTJkDzZg4yhdWCJZ/AsD3w9RZY9j6cKE+cM47F+xfzyrxXKDCoAA1GN2DI2iEcOnvI2y9FRERERETSGBWKROTm5M0LDzwA69bBX3+55zly5IDu3WH/ftOwp3x5OHcOPv8ciheHp5669V5J6VyWLNC2LcyaZWppI0fCvfdaWGEVYdkH8PVW+GIXLB4AR2tgY7Py8EreWPgGRYYWofq31en3Wz92ndrl7ZciIiIiIiJpgApFInLzqlUz08Vmz4ZwN/bB8fODp5+GP/+E+fNNp+e4OPjxR9MzqUkTWLLENMO+jeXKZWbp/fKLGYw1eDBUrw6cLgmr3oaRf8Dnh2D+UDh4F9gWG45toPcvvSn9ZWnKfVWO9355j83HN2Pf5rkUEREREbldqVAkIjfPsqBZMzOsZepUiI93//M98AAsXQrr18Pjj5slwhYtgvvvN4WrCRNMEek2V7AgvPGGGfC1ezf06QOlSgERheH312HMchh4DGZ/C3ubYDl92X5yOx/99hFVvqlC8WHFeXPRm6w5sgannb5XnhMRERERkZRL04WiDz74AMuykt1Kly7t7bBE5HIBAdC6tZn3tGSJ5563WjWYOBH27oXXXjPFqk2boF07KFEChg4109SEO+80TbB37DAz9d5999+i0fm8sPFF+HEB9qcnYfo42NESKz6AA2cPMGjNIOp+X5dCgwvRaV4nfjnwC3FOFeFERERERDKyNF0oAihXrhzHjh1LvK1cudLbIYnIfxUsCI0awZo1ZviKJxUtCsOGmebWH34IwcFw6BB07QqFC0Pv3qaIJVgWVKwIffuaotHWrfD++1C2LBCdE/58EibNwB5wCiZNha1P4LiUnWPnjvHV+q9o+END8g3Mx3OznmPe7nnExMV4+yWJiIiIiIiLpflCka+vL/ny5Uu85cmTx9shicjV1K4NJUvCjBkQGen558+dG957zxSJRowww2jCw6FfP7Ou/EsvwS41bE5gWaY3+AcfmF7kf/1l6mwVKgCXssKOR2HaeJwDTsJP82Djcziic3P64mlGbx7NwxMeJvizYJ6Y9gRTt0/lXKxGb4mIiIiIZARpvlC0Z88eChQoQLFixWjfvj2Hb+MlsUXSNMuCli0hUybTr8jppb42mTPDyy+bITPTp5sCVkyMWQ6sTBkT46pV3oktDStb1tTZ/vwTdu6Ejz+GqlWBeH/Y8xDMHoXz0+Mw5hf4oxPWuQJExUYxcdtE2kxpQ/BnwbSc2JJxW8YRftGNjc1FRERERMStLDsNL20zf/58zp07R6lSpTh27Bh9+vTh6NGjbNu2jezZs1/1MTExMcTEJE2HiIyMJDQ0lPDwcHLkyOGy2JxOJydPniQ4OBiHI83X29It5dkzXJrnw4fNUvb16pnVybzNtmHVKqyBA7HmzEnaXbcudvfu0Ly5aYjtZun1d/nIEZgzB2bNsli2DOLiLPMNywkF/4Ay06HsNAjan/gYX4cv9xa5l1ZlWtGiZAvyZsvrsXjTa57TG3flOTIykqCgICIiIlz6ni2pFxkZSWBgoMt/FrGxTiZPDiMuLoTcufU36i627QTCgBAsS3l2B+XYM5Rnz1Ce3c+2nRw9Gka1aiHUrOnaHLvjPTtNF4r+6+zZs9xxxx0MHjyY559//qrHfPDBB/Tp0+eK/bt3775mcelmOJ1OIiIiCAwM1IcRN1KePcPVec60di1+q1YR3bo18Xfc4YIIXcNn926yfvMNmadOxYqNBSCueHHOv/wyF9u0MY253SQj/C5HRFj88os/Cxf688sv/kRFJbwOG/L+mVQ0Cvkr8TEWFjXz1aRpsaY8WORBCmUv5NYYM0Ke0wN35TkqKoqSJUuqUJQGqFCUvulDn/spx56hPHuG8ux+KhS5WY0aNWjUqBH9+/e/6vc1oihjUZ49w+V5tm348UcICzPTwLJlu/VzutKxY1jDh8OIEVhnzwJg582L3bkzvPIK5Mrl8qfMaL/LsbGwbBnMnm0xezYcPWolfTP3LigzA8pMg4Lrkz2uev7qtCrTikdKP0LJ3CVdHldGy3NapRFFGZ8KRembPvS5n3LsGcqzZyjP7qdCkRudO3eOwoUL88EHH/D666+n6DHuutBxOp2EhYUREhKiDyNupDx7hlvyfO6caSodEgJPPWV6GKU1UVHw3Xfw+edmjhVA1qzw/PPwxhumCbaLZOTfZduGjRth7lz4+WdYt87sAyDwMJSeYUYaFV4JVtJbTrngcjxa5lFalWlFxbwVsVzwO5KR85yWuCvP7nrPltRToSh904c+91OOPUN59gzl2f3SW6EoTf8WvPnmmyxfvpyDBw+yevVqHnnkEXx8fHjiiSe8HZqI3Ei2bNCqFRw4ACtXejuaq8ue3RSE9u0zI6AqVYLz52HYMChRAtq1g02bvB1lmmdZUK0avP8+/P47HD8OP/wAbdtCkKMw/N4FRq+AgcdgzjewtzHE+/LXyb/4cMWHVP6mMkU/v5Mei3uw9u+1OG0vNUIXEREREZG0XSj6+++/eeKJJyhVqhSPPfYYuXPnZu3atQQHB3s7NBFJiWLFoEED+OUXs2x9WpUpE7Rvb4pCixbB/fdDfDxMmGCW/mrUyOxPPwMwvSphENmECWb24cqV8M47UPnOvLDhJfhxIXwWBtN/gJ0t4FIAh6L28dnqz6gzqg4h/Qvz8qzX+PXAr8Q541L0nNFx0YzbMo7WU1rTanYrWk9pzbgt44iOi3bzqxURERERyVjS1dSzm6GpZ+mb8uwZbs2z02lWQQsPN/1/smRx7fndZdMmGDgQJk0yRSMwI47efBMef9wUl1JBv8vG0aOwYAHMmwdLl0JkJOB3DkosMD2NSs4F/3OJx2chDw0LtuDF+q1oXKIh/r7+V5xz9q7ZdJjZgfDocByWA6ftTPwaFBDE2JZjaVaqmQdfZcanqWcZn6aepW+aRuJ+yrFnKM+eoTy7n6aeiYhczuGARx+FuDiYOTP9jMqpUgV++slMS+va1fQu2rLFDJUpXhwGDzY9jiRVChY0LaCmT4dTp8xoo/d7ZaNuztb4zJwAn52En+bCpmfhQi4ucIo5R0fRfFJTsn0YQo0B7RmyaBrnYs4DpkjUcmJLzkafBUictpbw9Wz0WVpMbMHsXbO98npFRERERNIbFYpExP1y5ICWLWH3bli71tvRpM4ddyQ1u/74Y8ib19zv3h1CQ6FXLzh2zNtRpkuZMkG9evDBB7BqFZw+DTOnBtDx/qbcuf17GHgCxi6FPzpCVH7ifCJZHzOeN9a0JvtHeSjYvTltJpqedTZXL0Am7O8ws4OmoYmIiIiIpIAKRSLiGSVLQt26sGSJmX+U3gQFmUY7Bw/CyJFQqhRERMCAAWZ1tOefhx07rv34JUvIc9dd5vXLVQUGQosW8OWXpqZ4YJ8v3/a6jzZZvyRo9N/w3WpY3R3Ci4JvNP/kmEMsF65ZJEpgYxMeHc7U7VM99EpERERERNIvFYpExHMaNoR8+WDqVIhOp6M7AgLghRdg+3Yzla5ePYiNhe+/h7JloVkz+O235FPsbBurd2989+zB6t07/Uy/87IiReDFF2HyZDgZ5mD9zDoMbDyQh/buI8sPm+BkaW5QI0rksBzM2DnDrfGKiIiIiGQEKhSJiOf4+EDr1nDxIsyenb4LJg6HGf6yciWsXg2PPGLWiZ87F+66C+rUgWnTTCPsRYuw1q8HMF8XLfJy8OmPjw9Uq2Zm/M2baxGxqzJVS+UFK2WPd9pO5v25mlb9v+S7BWsIP3fevQGLiIiIiKRTKhSJiGcFBUHz5mZEzoYN3o7GNerUMd2Zd+6El14Cf3/4/XdTFCtVCl5+GdvHB8B8fe+99F0kSwN8faFI3tw4UrEyR0ym48yI7cyLv9cl12c5yNqjHBXef4pXxnzOz9uXExEd4caIRURERETSB19vByAit6GyZaFGDbNOeqFCZjpaRlCyJHzzDXz4IQwfbprt7NsHJA18seLjYd06M6qoSRPvxZoBtCzVkuk7pqf8ATtagE8c5N8I2Y9xIet2trGdbYd+5JtD5pDA+BKUyVmVu0tW4d7SValWoCp5suRxzwsQEREREUmDVCgSEe9o0sSsHjZ1Krz8slkCK6PImxf69oW334Zy5eDw4SuPad0aXnkF6tc3Tb6Dgz0fZzrXplwbuizowtnos9dtaG1hkTMgJ0vemci6NQH89hssm3eMo/GbTNEo4ZbzEBE+e1kbtZe1Gybzyb8D3gIJpXzuqtxTqiq176hK1fxVyZ8tP5aVwnlvIiIiIiLpiApFIuIdvr7Qpg1MmQLnz0POnN6OyPVWrbp6kQjg3DkYONDcAO680zTGrlvXfC1d2vRBkmsK8A1gbMuxtJjYAgvrqsUi69+xXGNbjqVqqQCqVjR1ScjPkSP5WbXqIX77DVb+An/uPQ35/lM8yr2HCI6w6vQRVq2eBavNeXM48lIpb1UaFDejjqrkq0KRnEVUPBIRERGRdE+FIhHxnty5zaiajMi2TS8iHx/T0Pq/LMu8/uBg2LED9uwxtzFjzPeDgkzvo4TiUc2akCWLR19CetCsVDNmtp1Jh5kdCI8Ox2E5cNrOxK85A3IytuVYmpVqdsVjQ0OhbVtzAzh7NjerVzdi7dpGrF0Lvy+CyJhIyLc5eQEpeDuRnOC3Y/P57dj8xPNl8wmicr4q1C5sRh1VzV+VO3Pfmao+SiIiIiIi3qZCkYiIOyxaZHoRXYttw6lT8OOPpgi0Zo1ZPW3VKtMIOzwcfv7Z3MCMwKpc2RSOEopHBQt65KWkdc1LNeef7v8wdftUpu+YzvGI4+QLzEerMq1oXbY1Ab4BKTpPzpzw0EPmBuB0ws6dOfj997tYu/Yu1q6FbbPB6XMBQrYmH3mUdyvnCGfl0V9YefSXxHNmdmSjYt7K1ApNKh6VCS6Dr0NvvyIiIiKSNlm2nbGX3omMjCQwMJCIiAhy5MjhsvM6nU7CwsIICQnBoekhbqM8e4by7GK2DbVqwcaNVx9NlMCyoEwZ2LLFFIISXLpk9q1alVQ8Onr0ysffcUfy6WoVKpgRTLcxd/8uR0XB+vWwdm3SLSwM8ImF4L+SF4/ybYFMF684h58jgPLBFalZqCpV8lehav6qlA8pn+KCVlrgrjy76z1bUs9dP4vYWCeTJ4cRFxdC7tx6v3EX23YCYUAIlkY1uoVy7BnKs2coz+5n206OHg2jWrUQatZ0bY7d8Z6t/9IUEXG1G40mSmDbsH07dOoEnTubQg+Yxt7Vq5tbly7muCNHTMEooXi0ZQscOmRu48ebx2XLBrVrJxWPatcGfdh2qezZ4d57zQ3Mj+bQIVi71o8NG6qwfn0VNq58nshIwBEHuXf9WzhKmroW6x/FxhN/sPHEH4nn9bF8KZWrHDVDq1Lt35FHlfJWIqtfVu+8UBERERG5balQJCKp8/zzppDh55e0r1s3KFLEs3H88w98/jlERkLWrNC1KxQufOVxS5bA7NlJ26dOQfny8M47EB0N/frB3r1mntHEibceV0JvIofDnDMlli4108rKlzejjP7LssxrK1wYnnjC7IuKMlPUEkYcrVlj9i1ZYm4Jj6tQIfl0tSJFrv4cclMsy6S0SJGkXkdOp/mVWr/el/Xry7F+fTk2rnyK8+cBywlB+5OPPMq/kfgsp9l+egvbT29hzObR5txYFM1emlp3VKV6AVM8qpyvMjkDcnrp1YqIiIjI7UCFIhFJvR49oFix6x8TH598GtR/t68nYUbs9QoaX34JDzwADRuaQsmQITB48JXHNWpkbgk6dYJ77jH3fXzMMvXZspnCkSvExpqVzlJaJAKIiIDnnktdASd79uSvLT4e/vor+XS1Awfgzz/N7euvzXH58ydNVatXzxSoLi/6yS1zOKBkSXNr187si4+H3bth/XoH69eXYP36Emxa9RgXLwLYEHjkiuKRnf0Y+6N2sH/bDiZs+ynx/AUzF6dGoSrUvKzvUXDWYK+8VhERERHJeFQoEhHXadbMDKvYsMGMjomMNMWPY8fg7FkYMQKmTzcjXhwOMwzj1VfNiKDx480cnosXzaifvn3NqmBXExFhVgj78EOzXbeuOfexY6YQci27dpnH1qxptjNlgooV/20y4yL+/mba2cmTyXY7nU7OnDlDrly5cMTHw+jR0KCBmVIWEQEXLpjH3iwfH/NaKlY0OQWTj4Si0erVpmfSsWMwbZq5AQQEmHwkFI/q1oVcuW4+DrkqHx/TjqpMGXjqKbMvLg527oQtWyw2by78760lp37990HZjiVNWUtYdS3oIEcv7uPonn3M3DM18fy5fAtRKbgq9YtXpUYhUzwqkL0AlkaPiYiIiEgqqVAkIqn36afJR6EMHJi07eOTNLJnyBAzB+fTTyFzZlNAWrzYHJ81KwwfDmPHQseO5vidO2HoULP8FMAHH0D79nDnncmf/+RJU8xIGKFkWWaZ+ZMnr18oWrzYNJfxdfM/faGh5nY5p5O4sDAICTFFMssyo7KyZYNvvoGpU82oIlc2o86fHx591NzAFOHWr0/e6+jMGVixwtwSlC6dfLpayZKaruYGvr6mnlq+vPk1BzOY7tgx2LwZNm/Oz+bN+dmy5SH2/PbvQLvMZ5KKRgm3PLs5E/c3vx77m1+PJU2zzEoIJbNXpXbhqtxbuirVC1alSM4it1Q8io6LZspfU5ixc0bi6nKPlH6ENuXapKtm3CIiIiJybSoUiUjqXW/q2eXTvADq1zdFIjCffhs0MEUiMOuQDxiQdGy1aklFIjCFIleJjjbFkEGDXHfOW5EwqgnM9Lfvvze9iho3dt9zZs5s8t+ggdm2bTPK6vLpart2mYLdzp0wapQ5Lnfu5NPVqlVL+pmKS1kWFChgbg89lLT/3DnYuhU2b87F5s0N2bKlIVvnm4Fo+EdC3i3Ji0fB2znvCGNT1AI2/bWAr/8y5/GLz0lopipUCqnK3SWr0qhcVUrluRMfx40LlLN3zabDzA6ER4fjsBw4bSeO4w5m7JxBlwVdGNtyLM1KNXNPYkRERETEY1QoEhHX+m8BISAVowxSWnwIDjYjYRL6Htm2GU0UfJ0+LatWmWbQ/x3pkxYULGgKbAsXmul4JUt65nkty4weKl3aNCkHM+1vzZqk4tG6dXD6NMyZY25gpuxVq5Z8ulq+fJ6J+TaVLRvUqWNuCZxOOHgQtm3LwbZtDf69wc55cIkLELL1suLRJgjZSqzvWfY5f2Xf8V+ZfhxYAY64rOSKrUzxLFWpVqAq95WpSuMqZcieNVPic83eNZuWE1smPbftTPb1bPRZWkxswcy2M2leqrkHMiIiIiIi7qJCkYh4TuXKZpRKy5aQJQssWABVqqT+PIGBULw4LFtmmlmvXg158lx/2tmiRe4drXOratc2zadnzoRXXvHesvZ58pheU83+HRkSGwubNiVNV1u1Ck6cgLVrzS1hmmGxYklFo3r1oFw5M8VO3MbhMGkvVgyaX1abuXQJ9uzJwrZttf69wV/LYM/+WOw825OPPMq3GWem85zyXcUpVvH7P/DVP8BCf/zPViSvsyrFc5RnTWbT7N3GvmosNjYWFh1mduCf7v9oGpqIiIhIOqZCkYik3n97FL3wgmmifCPVqpmG1W++mbyZ9bVcq0cRQOfO8PnnMHmyKTp16ZL0vWHDoFYtcwM4ehT277/6VLbXXktqJt2hg3kd3brd+LW4mmWZAtqIEabR9DPPpI1Ci59fUi67dTOjtw4cSJqqtmoVbNtm8rt/P4wbZx4XGGiKXwnFo1q1zLAYcbtMmaBsWXN77LGk/Rcv+rFzZ2W2bavMtm3PsXMn7FgZz76zu3Dmvbx4tAkCIonJs47DrOMwcI36UDI2NuHR4UzdPpUnKz7prpcnIiIiIm5m2badgsu/9CsyMpLAwEAiIiLI4cL/oXc6nYSFhRESEoIjLXyYy6CUZ89Qnt0vxTk+fBjGjDF9hO6912Px3ZKICDO6KKF4tHYtnD+f/BgfH6hUKXmvIzdMA9TvcurFxpqe8wmtqbbvcLLl8H72nN9ITNBGqDwasoZBCnpgOywHLUu3ZNpj024qFne9Z0vquetnERvrZPLkMOLiQsidW3+j7mLbTiAMCMGylGd3UI49Q3n2DOXZ/WzbydGjYVSrFkLNmq7NsTveszWiSEQkLSlcGO65B379Fe6449pNw9OSwEBo0sTcwKz7vnVr8tXVDh+GjRvNbfhwc1yhQsmnq1WqdOsr0i1ZQp7Onc1zpOWphmmIn1/SCCTDAZTAtktw9OhjNJ2ylj8jw1J0Lqft5MyFM+4KVUREREQ8QIUiEZG0pn5906V4+nTTryi9Tdny9TW9p6pUMVMEAf7+O2nE0erVpu/R33/DpEnmBmYKYa1aScWjOnWSr4J3I7aN1bs3vnv2YPfuDfffb6b0yU2xLFPLK1EwN9uiHImNq6/HYTnIlSWXB6ITEREREXfRuDIRkbTG4YBWrcz9GTNMX6D0rlAh0zBn6FCzklpEBPzyC3z0ETz4oCkIXbhgRlJ99JFZGz5XLqhQAV5+GX74wcyPul4uFi3CWr8ewHxdtMgzry2Da1mqZYqKRGBGFD1S+hE3RyQiIiIi7qRCkYhIWpQtmykW7d8PK1d6OxrXy5rV9GDq3Rt+/hlOnzZNsb/5xjTyLlHCFIW2bYNvvzX77rzTrGzXqhUMHAhr1kBMjDmfbcN772H7+JhNHx94772MUWTzsjbl2hAUEIR1gyZFFhZBAUG0LtvaQ5GJiIiIiDto6pmISFpVrJhpap3Qr6hwYW9H5D4OB5QrZ24vvWT2nThhikEJ09XWrzf7ZswwNwB/f6heHQoUgHXrEksZVny8Gbm0aFFS7yS5KQG+AYxtOZYWE1tgYWFfZQm0hCLS2JZjCfAN8HSIIiIiIuJCGlEkIpKW3XOPmbY1bZqZmnU7yZsXWraEzz4zxaKICPP100+hRQsIDjYjilatgilTrny8ZZmRSEOHmsLShg0QFqZRRjehWalmzGw7k5wBOQHTi+jyrzkDcjKr7SyalWrmrRBFRERExEU0okhEJC1zOKB1axgxAmbNgrZtb98GzQEBpsl13brw1lum4LN3r8nN4MFXHm/bZgRS167J9/v7Q2ho8lvhwsnva2n2KzQv1Zx/uv/D1O1Tmb5jOscjjpMvMB+tyrSiddnWGkkkIiIikkGoUCQiktblyGFG1owfD2vXmtXAxBTMSpSA334DHx+Ij7/6MTlzmuP+/huOHzejkPbuNbdryZHjygLS5duFCpnC1W0mwDeAJys+Sbvy7QgLCyMkJASHQ4OTRURERDISFYpERNKDkiVNgWjJEtOvqEABb0eUNixaZHoRXYttQ3g49O1rehXFxsLRo3DkSNLt8OHk98PDITIS/vrL3K4lJOT6o5Ly5zcFLBERERGRdESFIhGR9KJRI1PImDLFLBl/G45oSebflc6uOZooQcIKaI0bg58fFC1qbtdy/vyVBaT/bl+4YPodhYWZ3kfXet4CBa4sIF1eXMqT5/adSigiIiIiaZIKRSIi6YWPT1K/ojlzzP3buchwo9FECVK7AlrWrFC6tLldjW3DmTPXHpF05IgZtRQXl7T/WgICrj0iKeF+9uw3jtnTliwhT+fOMHy4KcCJiIiISIahQpGISHoSFATNm5tRRUWLmqXhb0cJo4kcDnA6b3y8w5E0quhWi2uWBblzm1vlylc/Jj7eNNK+WhEp4f6JExAdDXv2mNu1BAZeu4hUuDAULGgadHuKbWP17o3vnj3YvXvD/fff3gVLERERkQxGhSIRkfSmXDk4eBAWLDDFgrx5vR2R58XGmmJLSopEYI47csQ8zhNFlYRpZwUKQO3aVz8mJsaMPLreFLezZyEiArZuNbdryZv3+qOS8uVzXb+kRYuw1q8HMF9TOlJLPO7LL7/ks88+4/jx41SqVIkvvviCmjVrejssERERSeNUKBIRSY+aNEnqV/TSS6b3zu3E399MJzt5Mtlup9PJmTNnyJUr15WrcYWEeHbkzY34+0OxYuZ2LVFRVy8gXb4dHW1GJ504Af8WcK7g62tGHl1vmluuXDceGfTvSC7bxwcrPt58ddVILXGpSZMm0a1bN0aMGEGtWrUYMmQITZo0YdeuXYSEhHg7PBEREUnDVCgSEUmPfH2hTRv49lv4+Wdo2dLbEXleQqHjck4ncWFhpiiUEZZtz54dypY1t6uxbTh9+vpT3P75x/RLOnTI3K4lc+Yrm23/d3vVKli3joSSkJXa/k/iMYMHD+bFF1/k2WefBWDEiBHMmzeP77//np49e3o5OhEREUnLVCgSEUmv8uSBpk1hxgzTr6hSJW9HJJ5mWeb3IE8eqFr16sfExcHx49duvH3kiFm97eJF2LXL3K7latPXLl9VTqOK0oTY2Fg2bNhAr169Evc5HA4aNWrEmjVrrvqYmJgYYmJiErcjIyMBM0rPmdIpnilgzmUDTmzbZaeV/7DtpDyLeyjHnqE8e4by7H4JObZtZ4o7J6SUK9+nE6hQJCKSnlWqBAcOwLx5ZmpRnjzejkjSGl9fKFTI3OrUufox0dHw99/XX8ktMtI06f4vjSpKc06dOkV8fDx5/9O/LG/evOzcufOqj+nfvz99+vS5Yv/JkyeJjo52WWyXLjnJlCmCiAibixczwKi/NMuJv38EMTE2oDy7h3LsGcqzZyjP7mfe/2JibMLCXJvjqKgol54PVCgSEUn/HnrIfMifMgVeeAEyZfJ2RJLeBARAiRLmdjW2DdWqwZYtV28grlFF6V6vXr3o1q1b4nZkZCShoaEEBweTI0cOlz2P0+nkzjstypYNxrL0YcRdbNvJhQsWWbIoz+6iHHuG8uwZyrP7JeS4WLFgcud2bY4DAgJcej5QoUhEJP3z8zP9ikaOhIUL4eGHvR2RZDSLFsGmTdf+vkYVpSl58uTBx8eHEydOJNt/4sQJ8uXLd9XH+Pv743+VZu8Oh+PKxvC3qEABi5AQ159XkjidEBamPLuTcuwZyrNnKM/ul5Dj3Lldn2N3/Mz0WyAikhHkzQsPPmhWvfrrL29HIxnJvyudXbU/0eUSRhWp8YzX+fn5Ua1aNZYuXZq4z+l0snTpUupca/qhiIiIyL9UKBIRySiqVoVy5WD2bAgP93Y0klEsWmRGC12tP9HlLh9VJF7XrVs3Ro4cydixY9mxYwevvvoq58+fT1wFTURERORaVCgSEckoLAuaNYMsWUy/oht9sBe5kYTRRCkd0uxwaFRRGvH4448zcOBA/ve//1G5cmU2b97MggULrmhwLSIiIvJf6apQNGDAACzLomvXrt4ORUQkbQoIMP2KTpyAJUu8HY2kd7GxZtWzlC676nSaFdJiY90bl6RI586dOXToEDExMfz+++/UqlXL2yGJiIhIOpBumlmvW7eOb775hooVK3o7FBGRtK1AAbj/fliwAIoUgVKlvB2RpFf+/mY62cmTyXY7nU7OnDlDrly5rmygGBJiHiciIiIi6VK6KBSdO3eO9u3bM3LkSD766CNvhyMikvbVqgUHDsDMmfDKKxAY6O2IJL0KDTW3yzmdxIWFmaKQVkcRERERyVDSxdVdp06daNq0KY0aNfJ2KCIi6YNlQcuW4OcH06alfOqQiIiIiIjc1tL8iKKJEyeyceNG1q1bl6LjY2JiiImJSdyOjIwEzDB5pws/KDmdTmzbduk55UrKs2coz+7nlRz7+8Mjj8DYsbB0KTRs6Lnn9hL9LnuGu/Ksn5uIiIiI96XpQtGRI0fo0qULixcvJiAgIEWP6d+/P3369Lli/8mTJ4mOjnZZbE6nk4iICGzbvrI/g7iM8uwZyrP7eS3HAQFkql4dnx07iK5QwXPP6yX6XfYMd+U5KirKZecSERERkZuTpgtFGzZsICwsjKpVqybui4+PZ8WKFQwfPpyYmBh8fHySPaZXr15069YtcTsyMpLQ0FCCg4PJkSOHy2JzOp1YlkVwcLA+jLiR8uwZyrP7eTXHDz4IgF2V6BUAAEJhSURBVOv+BUy79LvsGe7Kc0r/U0hERERE3CdNF4oaNmzI1q1bk+179tlnKV26NG+//fYVRSIAf39//K+y2orD4XD5hwbLstxyXklOefYM5dn9lGPPUJ49wx151s9MRERExPvSdKEoe/bslC9fPtm+rFmzkjt37iv2i4iIiIiIiIjIrUnThSJXsG0bSGpq7SpOp5OoqCgCAgL0P6BupDx7hvLsfsqxZyjPnuGuPCe8Vye8d4v36PopfVOe3U859gzl2TOUZ/dzZ47dcf2U7gpFy5YtS9XxCY0xQ0ND3RCNiIjrfQdcAmIv2zcYOOThOPIDb2B6G50HhgBHrnJceeAD4Ohl+97EvAaAZ4FqgA+wHfgKiHdHwJJhREVFERgY6O0wbmu6fhIREUlfXHn9ZNkZ/L/tnE4n//zzD9mzZ8eyLJedN6FJ9pEjR1zaJFuSU549Q3l2v9TkOKBTJ2LefBO7aNHrnzQ+Hi7v1fbf7etJ+Kf/Ov8u+n/4IXF33UX8Pffgs3YtvrNnE9Ov3xXHOf76i0xjxxLz6adXfM9nyRJ8V68m5p13wMeHTN9+i50/P3HNm6cszlTS77JnuCvPtm0TFRVFgQIF9D+aXqbrp/RNeXY/5dgzlGfPUJ7dz505dsf1U7obUZRaDoeDQoUKue38OXLk0B+TByjPnqE8u1+Kcuznh1/27HC145o1g7ZtYcMGKF8eIiNNsefYMTh7FkaMgOnTYckScDigSBF49VXImhXGj4dDh+DiRTh1Cvr2hdy5rx5DRAQcPox/06am+HT//TBuHP7nz0P+/MmPzZYN/Pzwv1q8YWFQsyb+uXKZ7Xr1YMIEePLJG6Xqluh32TPckWeNJEobdP2UMSjP7qcce4by7BnKs/u5K8euvn7K8IUiEZF06dNPwc8vaXvgwKRtHx8YPNjcHzIE9u41x2fObApIixeb47NmheHDYexY6NjRHL9zJwwdCjlzmu0PPoD27eHOO5M//8mTkCtX0ggly4LgYLP/v4UiMIWqLl3M8Y0awUMPmf0lSsCCBfDwwyb+lSvhxAkXJEhERERERNxBhSIRkbSoRw8oVuzq32vUKPl2/fqmSASweTM0aGCKRGAKNgMGJB1brVpSkQhMoehWFS8OY8aY5zx1Cvr0MaOh6teHhg3NqKJevUyhqHJl2LTp1p9TRERERETcQoWim+Tv78/777+Pv7+/t0PJ0JRnz1Ce3c+lOU4oCiUICLj5x15LcDCcOZPU98i2zWii4OArj82SJel+njxw113w11+mUGRZ0K6duQGsWAGFC6c83lTS77JnKM9ys/S74xnKs/spx56hPHuG8ux+6S3HGb6ZtYhIuvP889C799VHFDVrBhMnJo0YGjIEihaFFi3M9oYNMGqUmXqWJQt89ZUp1rz6qulRdP48vPhiyuLo1cuMXmrYEFatgqlT4fPPrzzuzBkICjLPc/EivP++6Wl0//0QG2tu2bKZfkrvvmv6E9WseVOpERERERER99KIIhGRtOi/PYpeeAEqVrzx46pVMw2r33wzeTPra7lWjyKAzp1NYWjyZFN06tIl6XvDhkGtWua2ejX8/LMZeRQfb0YSJUyPu3DBFJwsy4xKat5cRSIRERERkTRMI4pERERERERERAQAh7cDEBERERERERGRtEGFIhERERERERERAVQoumlffvklRYoUISAggFq1avHHH394O6QMZcWKFTRr1owCBQpgWRYzZ870dkgZTv/+/alRowbZs2cnJCSEli1bsmvXLm+HleF8/fXXVKxYkRw5cpAjRw7q1KnD/PnzvR1WhjZgwAAsy6Jr167eDiVD+eCDD7AsK9mtdOnS3g5L0qDUXiNNmTKF0qVLExAQQIUKFfj55589FGn6lpo8jxw5kgYNGhAUFERQUBCNGjXStWsK3Oz1/sSJE7Esi5YtW7o3wAwitXk+e/YsnTp1In/+/Pj7+1OyZEn9u5ECqc3zkCFDKFWqFJkzZyY0NJQ33niD6OhoD0Wb/tzM59dly5ZRtWpV/P39KVGiBGPGjHF7nCmlQtFNmDRpEt26deP9999n48aNVKpUiSZNmhAWFubt0DKM8+fPU6lSJb788ktvh5JhLV++nE6dOrF27VoWL17MpUuXaNy4MefPn/d2aBlKoUKFGDBgABs2bGD9+vXcd999tGjRgr/++svboWVI69at45tvvqFiShp/S6qVK1eOY8eOJd5Wrlzp7ZAkjUntNdLq1at54okneP7559m0aRMtW7akZcuWbNu2zcORpy+pzfOyZct44okn+PXXX1mzZg2hoaE0btyYo0ePejjy9ONmr/cPHjzIm2++SYMGDTwUafqW2jzHxsZy//33c/DgQaZOncquXbsYOXIkBQsW9HDk6Utq8zx+/Hh69uzJ+++/z44dOxg1ahSTJk3inXfe8XDk6UdqP78eOHCApk2bcu+997J582a6du3KCy+8wMKFC90caQrZkmo1a9a0O3XqlLgdHx9vFyhQwO7fv78Xo8q4AHvGjBneDiPDCwsLswF7+fLl3g4lwwsKCrK/++47b4eR4URFRdl33nmnvXjxYvvuu++2u3Tp4u2QMpT333/frlSpkrfDkDQutddIjz32mN20adNk+2rVqmW//PLLbo0zvbvVa9G4uDg7e/bs9tixY90VYrp3MzmOi4uz69ata3/33Xf2M888Y7do0cIDkaZvqc3z119/bRcrVsyOjY31VIgZQmrz3KlTJ/u+++5Ltq9bt252vXr13BpnRpGSz689evSwy5Url2zf448/bjdp0sSNkaWcRhSlUmxsLBs2bKBRwtLPgMPhoFGjRqxZs8aLkYncmoiICABy5crl5Ugyrvj4eCZOnMj58+epU6eOt8PJcDp16kTTpk2T/fssrrVnzx4KFChAsWLFaN++PYcPH/Z2SJKG3Mw10po1a674m23SpImuqa7DFdeiFy5c4NKlS3rPv4abzfGHH35ISEgIzz//vCfCTPduJs+zZ8+mTp06dOrUibx581K+fHn69etHfHy8p8JOd24mz3Xr1mXDhg2J09P279/Pzz//zEMPPeSRmG8Haf39z9fbAaQ3p06dIj4+nrx58ybbnzdvXnbu3OmlqERujdPppGvXrtSrV4/y5ct7O5wMZ+vWrdSpU4fo6GiyZcvGjBkzKFu2rLfDylAmTpzIxo0bWbdunbdDybBq1arFmDFjKFWqFMeOHaNPnz40aNCAbdu2kT17dm+HJ2nAzVwjHT9+/KrHHz9+3G1xpneuuBZ9++23KVCggArr13AzOV65ciWjRo1i8+bNHogwY7iZPO/fv59ffvmF9u3b8/PPP7N37146duzIpUuXeP/99z0RdrpzM3lu164dp06don79+ti2TVxcHK+88oqmnrnQtd7/IiMjuXjxIpkzZ/ZSZIYKRSJCp06d2LZtm/qNuEmpUqXYvHkzERERTJ06lWeeeYbly5erWOQiR44coUuXLixevJiAgABvh5NhPfjgg4n3K1asSK1atbjjjjuYPHmy/vdcJB0ZMGAAEydOZNmyZfo300WioqJ46qmnGDlyJHny5PF2OBma0+kkJCSEb7/9Fh8fH6pVq8bRo0f57LPPVChyoWXLltGvXz+++uoratWqxd69e+nSpQt9+/blvffe83Z44gEqFKVSnjx58PHx4cSJE8n2nzhxgnz58nkpKpGb17lzZ+bOncuKFSsoVKiQt8PJkPz8/ChRogQA1apVY926dQwdOpRvvvnGy5FlDBs2bCAsLIyqVasm7ouPj2fFihUMHz6cmJgYfHx8vBhhxpQzZ05KlizJ3r17vR2KpBE3c42UL18+XVOl0q1ciw4cOJABAwawZMkSNf2/jtTmeN++fRw8eJBmzZol7nM6nQD4+vqya9cuihcv7t6g06Gb+V3Onz8/mTJlSva+XqZMGY4fP05sbCx+fn5ujTk9upk8v/feezz11FO88MILAFSoUIHz58/z0ksv0bt3bxwOdbC5Vdd6/8uRI4fXRxOBVj1LNT8/P6pVq8bSpUsT9zmdTpYuXaqeI5Ku2LZN586dmTFjBr/88gtFixb1dki3DafTSUxMjLfDyDAaNmzI1q1b2bx5c+KtevXqtG/fns2bN6tI5Cbnzp1j37595M+f39uhSBpxM9dIderUSXY8wOLFi3VNdR03ey366aef0rdvXxYsWED16tU9EWq6ldocly5d+or3oebNmyeuZhQaGurJ8NONm/ldrlevHnv37k0sxAHs3r2b/Pnzq0h0DTeT5wsXLlxRDEq4nrJt233B3kbS/Pufl5tpp0sTJ060/f397TFjxtjbt2+3X3rpJTtnzpz28ePHvR1ahhEVFWVv2rTJ3rRpkw3YgwcPtjdt2mQfOnTI26FlGK+++qodGBhoL1u2zD527Fji7cKFC94OLUPp2bOnvXz5cvvAgQP2n3/+affs2dO2LMtetGiRt0PL0LTqmet1797dXrZsmX3gwAF71apVdqNGjew8efLYYWFh3g5N0pAbXSM99dRTds+ePROPX7Vqle3r62sPHDjQ3rFjh/3+++/bmTJlsrdu3eqtl5AupDbPAwYMsP38/OypU6cme8+Piory1ktI81Kb4//Sqmcpk9o8Hz582M6ePbvduXNne9euXfbcuXPtkJAQ+6OPPvLWS0gXUpvn999/386ePbs9YcIEe//+/fb/27v3OKvqem/gnwGdARJGDR0uoah5yxQUlNAM7Yxysoe086SEHkCP5knRUh5L8AKaKdbRIhXjaBp6XhqmqcdXGFYknkfFG0oXRcpb+JSMoskoKrdZzx9tdk2AAsKeDb7fr9d+vdy//Vtrfdf8gPX1M2vv/fOf/7zYZZddimOOOaatTqHqvdf/v44ZM6YYPnx4ef5zzz1XdOrUqfja175WzJ07t5g0aVLRvn37Yvr06W11Cq0IitbTlVdeWeywww5FbW1tccABBxQPPfRQW5e0Wbn33nuLJKs8Ro4c2dalbTZW9/NNUvzwhz9s69I2K//2b/9W7LjjjkVtbW2x3XbbFf/0T/8kJKoAQdGGN3To0KJ79+5FbW1t0bNnz2Lo0KHFM88809ZlUYXerUcaNGjQKtfyH//4x8Vuu+1W1NbWFnvttVcxbdq0Cle8aVqXn/OOO+642mv++PHjK1/4JmRd/yz/PUHR2lvXn/ODDz5YDBgwoKirqyt23nnn4uKLLy6WL19e4ao3Pevyc162bFlxwQUXFLvsskvRoUOHolevXsWpp55a/OUvf6l84ZuI9/r/15EjRxaDBg1aZZu+ffsWtbW1xc4771xV/x9WUxTuHQMAAADAZxQBAAAAUCIoAgAAACCJoAgAAACAEkERAAAAAEkERQAAAACUCIoAAAAASCIoAgAAAKBEUAQAAABAEkER8HeOP/74HHXUUW12/OHDh+eSSy5ps+NvCFOmTMnWW2+9VnOnT5+evn37pqWlZeMWBQCbgbbuU9ZWTU1N7rzzznedszbn0rt370ycOPF91bIh9tHW3utnNWXKlNTU1KSmpiZnnHFGxer6e4cccki5hjlz5rRJDbAhCYrgA2LlxWtNjwsuuCDf+973MmXKlDap79e//nXuvvvufOUrX2mT47eFf/7nf86WW26Zm266qa1LAYA2Ve19yrp46aWX8pnPfCZJ8sILL6x3ePDoo4/m5JNPfl+1rOs+Zs6cmZqamrz++uvv67iV1qVLl7z00ku56KKLymNFUWTcuHHp3r17OnbsmMbGxvzhD39Y72Psscce2XLLLbNgwYJVXrv99tvzyCOPrPe+odoIiuAD4qWXXio/Jk6cWL6grnycddZZqa+vX+u7YTa0K6+8MkcffXS22mqrNjl+Wzn++ONzxRVXtHUZANCmqr1PWRfdunVLXV3d+97Pdtttl06dOrX5PtZHURRZvnx5xY5XU1OTbt26pXPnzuWxb3/727niiisyefLkPPzww/nQhz6UwYMH55133lnn/d9///159dVXc9hhh+WGG25Y5fVtt90222233fs6B6gmgiL4gOjWrVv5UV9fX76grnxstdVWq9zae8ghh+T000/PGWeckW222SYNDQ259tprs3jx4pxwwgnp3LlzPvrRj+ZnP/tZq2P97ne/y2c+85lstdVWaWhoyPDhw7Nw4cI11rZixYrcdtttGTJkSKvxq6++Orvuums6dOiQhoaGfOELXyi/1tLSkgkTJmSnnXZKx44d06dPn9x2222ttn/yySfzv/7X/0qXLl3SuXPnHHzwwXn22WfL23/jG9/IRz7ykdTV1aVv376ZPn16eduVvwG8/fbbc+ihh6ZTp07p06dPZs2a1eoYU6ZMyQ477JBOnTrl85//fF599dVWr//617/OoYcems6dO6dLly7p169fHnvssfLrQ4YMyWOPPVauCwA+iKq1TymKItttt12rHqNv377p3r17+fn999+furq6vPXWW0lav/Vsp512SpLsu+++qampySGHHNJq/5dddlm6d++eD3/4wxk1alSWLVtWfu0f3zZWU1OTH/zgB/n85z+fTp06Zdddd81dd931rj/XddnHCy+8kEMPPTRJss0226SmpibHH398kvfuu1beifSzn/0s/fr1S11dXa6//vrU1NTk6aefblXTd7/73eyyyy5J/toDnnjiieX97r777vne9773rue0NoqiyMSJE3PeeeflyCOPzD777JMbb7wxf/7zn9/zbYGrc9111+Xoo4/OyJEjc/3117/v+qDaCYqAd3XDDTeka9eueeSRR3L66afnlFNOydFHH50DDzwwjz/+eA4//PAMHz683By9/vrr+fSnP5199903jz32WKZPn56mpqYcc8wxazzGb37zmyxatCj9+/cvjz322GP5yle+km984xuZN29epk+fnk996lPl1ydMmJAbb7wxkydPzpNPPpkzzzwz//qv/5r77rsvSfKnP/0pn/rUp1JXV5df/epXmT17dv7t3/6t/Nut733ve7n88stz2WWX5Te/+U0GDx6cz33uc6vcknzuuefmrLPOypw5c7Lbbrtl2LBh5X08/PDDOfHEE3Paaadlzpw5OfTQQ/PNb36z1fbHHXdcPvKRj+TRRx/N7NmzM2bMmGy55Zbl13fYYYc0NDTk//7f/7s+ywMAH2gbu0+pqanJpz71qcycOTNJ8pe//CVz587N22+/XQ5A7rvvvuy///6rvXNn5duRfvnLX+all17K7bffXn7t3nvvzbPPPpt77703N9xwQ6ZMmfKeb6278MILc8wxx+Q3v/lNjjjiiBx33HF57bXX1ulntqZ99OrVKz/5yU+SJPPmzctLL71UDm3eq+9aacyYMbn00kszd+7cfOELX0j//v1XeYv9TTfdlGOPPTbJXwOoj3zkI7n11lvz1FNPZdy4cTnnnHPy4x//eJ3O6R89//zzWbBgQRobG8tj9fX1GTBgwCq/9Hsvb7zxRm699dYcd9xx+dznPpeXXnpJ38bmrwA+cH74wx8W9fX1q4yPHDmyOPLII8vPBw0aVHzyk58sP1++fHnxoQ99qBg+fHh57KWXXiqSFLNmzSqKoiguuuii4vDDD2+13xdffLFIUsybN2+19dxxxx1F+/bti5aWlvLYT37yk6JLly5Fc3PzKvPfeeedolOnTsWDDz7YavzEE08shg0bVhRFUYwdO7bYaaediqVLl672mD169CguvvjiVmP7779/ceqppxZFURTPP/98kaT4wQ9+UH79ySefLJIUc+fOLYqiKIYNG1YcccQRrfYxdOjQVj/bzp07F1OmTFltDSvtu+++xQUXXPCucwDgg6La+pQrrrii2GuvvYqiKIo777yzGDBgQHHkkUcW3//+94uiKIrGxsbinHPOKc9PUtxxxx1FUfytn3jiiSdWOZcdd9yxWL58eXns6KOPLoYOHVp+vuOOOxbf/e53W+33vPPOKz9/8803iyTFz372s9XWvT77uPfee4skxV/+8pfynLXpu1Zud+edd7aa893vfrfYZZddys/nzZvXqpdanVGjRhX/+3//7/Lzf1z3f7S6Py8PPPBAkaT485//3Gr86KOPLo455pg17mt1rrnmmqJ3797lPnXEiBHFyJEjV5m3prWGTZE7ioB3tc8++5T/u3379vnwhz+cvffeuzzW0NCQJHn55ZeT/PWtVvfee2+22mqr8mOPPfZIkjW+vertt99OXV1dampqymOHHXZYdtxxx+y8884ZPnx4brrppvJvA5955pm89dZbOeyww1od58YbbywfY86cOTn44INb3b2zUnNzc/785z/noIMOajV+0EEHZe7cuWs8/5W3ma8817lz52bAgAGt5g8cOLDV89GjR+ekk05KY2NjLr300tX+DDp27Fg+NwBg7VWiTxk0aFCeeuqpvPLKK7nvvvtyyCGH5JBDDsnMmTOzbNmyPPjgg6u8pWxt7LXXXmnfvn35effu3ct1rs35fuhDH0qXLl3ec5v3u4+16btW+vu7w5Pki1/8Yl544YU89NBDSf56N9F+++1X/pknyaRJk9KvX79st9122WqrrXLNNddk/vz563ROG9P111+fY489ttynHnfccbn11lvzxhtvtHFlsPFs0dYFANXtH4OWmpqaVmMrL5orv+L9zTffzJAhQ/Ktb31rlX39/fv5/17Xrl3z1ltvZenSpamtrU2SdO7cOY8//nhmzpyZn//85xk3blwuuOCCPProo3nzzTeTJNOmTUvPnj1b7Wvlh0d27NhxfU53Fe92rmvjggsuyLHHHptp06blZz/7WcaPH5+pU6fm85//fHnOa6+95gMQAWA9VKJP2XvvvbPtttvmvvvuy3333ZeLL7443bp1y7e+9a08+uijWbZsWQ488MANUvt79Rjrs8373cfa9F0rfehDH2r1vFu3bvn0pz+dm2++OZ/4xCdy880355RTTim/PnXq1Jx11lm5/PLLM3DgwHTu3Dn/8R//kYcffnidzukfdevWLUnS1NTUal2bmprSt2/ftd7PU089lYceeijXXXddeeyf/umf0rlz50ydOjVf+tKX3ledUK0ERcAGtd9+++UnP/lJevfunS22WLt/YlZesJ966qlWF+8tttgijY2NaWxszPjx47P11lvnV7/6VQ477LDU1dVl/vz5GTRo0Gr3uc8+++SGG27IsmXLVmmIunTpkh49euSBBx5otf0DDzyQAw44YK3Pdc8991ylkVn5G7O/t9tuu2W33XbLmWeemWHDhuWHP/xhOSh655138uyzz2bfffdd6+MCAOtnffqUmpqaHHzwwfnv//7vPPnkk/nkJz+ZTp06ZcmSJfnP//zP9O/ff5WAZKWVvwBbsWLFBjuHjWl19X7sYx97z77r3Rx33HH5+te/nmHDhuW5557LF7/4xfJrDzzwQA488MCceuqp5bEN8QUfO+20U7p165YZM2aUe8vm5uY8/PDDrYKq93LdddelT58++djHPlYea9++fYYOHZrrrrtOUMRmy1vPgA1q1KhRee211zJs2LA8+uijefbZZ3PPPffkhBNOWGOTtN1222W//fbL/fffXx776U9/miuuuCJz5szJH//4x9x4441paWnJ7rvvns6dO+ess87KmWeemRtuuCHPPvtsHn/88Vx55ZXlryw97bTT0tzcnC9+8Yt57LHH8oc//CH/9V//lXnz5iVJvva1r+Vb3/pWbrnllsybNy9jxozJnDlz8tWvfnWtz/UrX/lKpk+fnssuuyx/+MMfctVVV7X65rS33347p512WmbOnJk//vGPeeCBB/Loo49mzz33LM956KGHUldXt8pb1gCADW99+pTkr9+w9qMf/Sh9+/bNVlttlXbt2uVTn/pUbrrppncNT7bffvt07Nix/KHZixYt2hintcHsuOOOqampyU9/+tO88sorefPNN9eq73o3//Iv/5I33ngjp5xySg499ND06NGj/Nquu+6axx57LPfcc09+//vf5/zzz8+jjz76vs+jpqYmZ5xxRr75zW/mrrvuym9/+9uMGDEiPXr0aPXNee9m2bJl+a//+q8cfPDB+d3vftfqsd9+++Xhhx/Ok08++b5rhWokKAI2qJV36qxYsSKHH3549t5775xxxhnZeuut067dmv/JOemkk1p9K8bWW2+d22+/PZ/+9Kez5557ZvLkyfnRj36UvfbaK0ly0UUX5fzzz8+ECROy55575p//+Z8zbdq08tfQfvjDH86vfvWrvPnmmxk0aFD69euXa6+9tnx30Ve+8pWMHj06/+f//J/svffemT59eu66667suuuua32un/jEJ3Lttdfme9/7Xvr06ZOf//znOe+888qvt2/fPq+++mpGjBiR3XbbLcccc0w+85nP5MILLyzP+dGPfpTjjjtutd+UAgBsWOvbpwwaNCgrVqxo9VlEhxxyyCpj/2iLLbbIFVdckf/8z/9Mjx49cuSRR27As9nwevbsmQsvvDBjxoxJQ0NDTjvttCTv3Xe9m86dO2fIkCH59a9/neOOO67Va//+7/+ef/mXf8nQoUMzYMCAvPrqq63uLno/vv71r+f000/PySefnP333z9vvvlmpk+fng4dOpTnHHLIITn++ONXu/1dd92VV155JVdddVX23nvvVo+V2/z9W9Jgc1JTFEXR1kUAvP3229l9991zyy23fGDurlm4cGF23333PPbYY2vVaAEA0NqUKVNyxhln5PXXX1/nbXfcccdceOGFawyL1sULL7yQnXbaKU888cQ6fQ4SVCN3FAFVoWPHjrnxxhuzcOHCti6lYl544YVcffXVQiIAgPdh0aJF2WqrrXL22Wev9TZPPvlk6uvrM2LEiPd9/M985jPlu95hc+COIgAAADZJb7zxRpqampL89aMLunbtWvEa/vSnP+Xtt99Okuywww7lDwWHTZWgCAAAAIAk3noGAAAAQImgCAAAAIAkgiIAAAAASgRFAAAAACQRFAEAAABQIigCAAAAIImgCAAAAIASQREAAAAASQRFAAAAAJQIigAAAABIIigCAAAAoERQBAAAAEASQREAAAAAJYIiAAAAAJIIigAAAAAoERQBAAAAkERQBAAAAECJoAgAAACAJIIiAAAAAEoERQAAAAAkERQBAAAAUFLRoOh//ud/MmTIkPTo0SM1NTW5884733ObmTNnZr/99ktdXV0++tGPZsqUKRu9TgCAaqF/AgAqqaJB0eLFi9OnT59MmjRpreY///zz+exnP5tDDz00c+bMyRlnnJGTTjop99xzz0auFACgOuifAIBKqimKomiTA9fU5I477shRRx21xjlnn312pk2blt/97nflsS9+8Yt5/fXXM3369ApUCQBQPfRPAMDGtkVbF/BuZs2alcbGxlZjgwcPzhlnnLHGbZYsWZIlS5aUn7e0tOS1117Lhz/84dTU1GysUgGA96koirzxxhvp0aNH2rXzMYrrS/8EAB8cG6N/quqgaMGCBWloaGg11tDQkObm5rz99tvp2LHjKttMmDAhF154YaVKBAA2sBdffDEf+chH2rqMTZb+CQA+eDZk/1TVQdH6GDt2bEaPHl1+vmjRouywww558cUX06VLlzasDAB4N83NzenVq1c6d+7c1qV84OifAGDTtDH6p6oOirp165ampqZWY01NTenSpctqfxuWJHV1damrq1tlvEuXLhodANgEeKvT+6N/AoAPng3ZP1X1BwAMHDgwM2bMaDX2i1/8IgMHDmyjigAAqpv+CQB4PyoaFL355puZM2dO5syZk+SvX986Z86czJ8/P8lfb3seMWJEef6Xv/zlPPfcc/n617+ep59+OldffXV+/OMf58wzz6xk2QAAbUb/BABUUkWDosceeyz77rtv9t133yTJ6NGjs++++2bcuHFJkpdeeqnc9CTJTjvtlGnTpuUXv/hF+vTpk8svvzw/+MEPMnjw4EqWDQDQZvRPAEAl1RRFUbR1ERtTc3Nz6uvrs2jRIu+xB4Aq5ppdPawFAGwaNsY1u6o/owgAAACAyhEUAQAAAJBEUAQAAABAiaAIAAAAgCSCIgAAAABKBEUAAAAAJBEUAQAAAFAiKAIAAAAgiaAIAAAAgBJBEQAAAABJBEUAAAAAlAiKAAAAAEgiKAIAAACgRFAEAAAAQBJBEQAAAAAlgiIAAAAAkgiKAAAAACgRFAEAAACQRFAEAAAAQImgCAAAAIAkgiIAAAAASgRFAAAAACQRFAEAAABQIigCAAAAIImgCAAAAIASQREAAAAASQRFAAAAAJQIigAAAABIIigCAAAAoERQBAAAAEASQREAAAAAJYIiAAAAAJIIigAAAAAoERQBAAAAkERQBAAAAECJoAgAAACAJIIiAAAAAEoERQAAAAAkERQBAAAAUCIoAgAAACCJoAgAAACAEkERAAAAAEkERQAAAACUCIoAAAAASCIoAgAAAKBEUAQAAABAEkERAAAAACWCIgAAAACSCIoAAAAAKBEUAQAAAJBEUAQAAABAiaAIAAAAgCSCIgAAAABKBEUAAAAAJBEUAQAAAFAiKAIAAAAgiaAIAAAAgBJBEQAAAABJBEUAAAAAlFQ8KJo0aVJ69+6dDh06ZMCAAXnkkUfedf7EiROz++67p2PHjunVq1fOPPPMvPPOOxWqFgCg7emfAIBKqWhQdMstt2T06NEZP358Hn/88fTp0yeDBw/Oyy+/vNr5N998c8aMGZPx48dn7ty5ue6663LLLbfknHPOqWTZAABtRv8EAFRSRYOi73znO/nSl76UE044IR/72McyefLkdOrUKddff/1q5z/44IM56KCDcuyxx6Z37945/PDDM2zYsPf8LRoAwOZC/wQAVFLFgqKlS5dm9uzZaWxs/NvB27VLY2NjZs2atdptDjzwwMyePbvc2Dz33HO5++67c8QRR6zxOEuWLElzc3OrBwDApkj/BABU2haVOtDChQuzYsWKNDQ0tBpvaGjI008/vdptjj322CxcuDCf/OQnUxRFli9fni9/+cvveuv0hAkTcuGFF27Q2gEA2oL+CQCotKr+1rOZM2fmkksuydVXX53HH388t99+e6ZNm5aLLrpojduMHTs2ixYtKj9efPHFClYMANC29E8AwPtRsTuKunbtmvbt26epqanVeFNTU7p167babc4///wMHz48J510UpJk7733zuLFi3PyySfn3HPPTbt2q+ZcdXV1qaur2/AnAABQYfonAKDSKnZHUW1tbfr165cZM2aUx1paWjJjxowMHDhwtdu89dZbqzQz7du3T5IURbHxigUAqAL6JwCg0ip2R1GSjB49OiNHjkz//v1zwAEHZOLEiVm8eHFOOOGEJMmIESPSs2fPTJgwIUkyZMiQfOc738m+++6bAQMG5Jlnnsn555+fIUOGlBseAIDNmf4JAKikigZFQ4cOzSuvvJJx48ZlwYIF6du3b6ZPn17+gMb58+e3+g3Yeeedl5qampx33nn505/+lO222y5DhgzJxRdfXMmyAQDajP4JAKikmmIzvwe5ubk59fX1WbRoUbp06dLW5QAAa+CaXT2sBQBsGjbGNbuqv/UMAAAAgMoRFAEAAACQRFAEAAAAQImgCAAAAIAkgiIAAAAASgRFAAAAACQRFAEAAABQIigCAAAAIImgCAAAAIASQREAAAAASQRFAAAAAJQIigAAAABIIigCAAAAoERQBAAAAEASQREAAAAAJYIiAAAAAJIIigAAAAAoERQBAAAAkERQBAAAAECJoAgAAACAJIIiAAAAAEoERQAAAAAkERQBAAAAUCIoAgAAACCJoAgAAACAEkERAAAAAEkERQAAAACUCIoAAAAASCIoAgAAAKBEUAQAAABAEkERAAAAACWCIgAAAACSCIoAAAAAKBEUAQAAAJBEUAQAAABAiaAIAAAAgCSCIgAAAABKBEUAAAAAJBEUAQAAAFAiKAIAAAAgiaAIAAAAgBJBEQAAAABJBEUAAAAAlAiKAAAAAEgiKAIAAACgRFAEAAAAQBJBEQAAAAAlgiIAAAAAkgiKAAAAACgRFAEAAACQRFAEAAAAQImgCAAAAIAkgiIAAAAASgRFAAAAACQRFAEAAABQIigCAAAAIImgCAAAAIASQREAAAAASQRFAAAAAJRUPCiaNGlSevfunQ4dOmTAgAF55JFH3nX+66+/nlGjRqV79+6pq6vLbrvtlrvvvrtC1QIAtD39EwBQKVtU8mC33HJLRo8encmTJ2fAgAGZOHFiBg8enHnz5mX77bdfZf7SpUtz2GGHZfvtt89tt92Wnj175o9//GO23nrrSpYNANBm9E8AQCXVFEVRVOpgAwYMyP7775+rrroqSdLS0pJevXrl9NNPz5gxY1aZP3ny5PzHf/xHnn766Wy55Zbrdczm5ubU19dn0aJF6dKly/uqHwDYeFyzV0//BACsyca4ZlfsrWdLly7N7Nmz09jY+LeDt2uXxsbGzJo1a7Xb3HXXXRk4cGBGjRqVhoaGfPzjH88ll1ySFStWrPE4S5YsSXNzc6sHAMCmSP8EAFRaxYKihQsXZsWKFWloaGg13tDQkAULFqx2m+eeey633XZbVqxYkbvvvjvnn39+Lr/88nzzm99c43EmTJiQ+vr68qNXr14b9DwAACpF/wQAVFpVf+tZS0tLtt9++1xzzTXp169fhg4dmnPPPTeTJ09e4zZjx47NokWLyo8XX3yxghUDALQt/RMA8H5U7MOsu3btmvbt26epqanVeFNTU7p167babbp3754tt9wy7du3L4/tueeeWbBgQZYuXZra2tpVtqmrq0tdXd2GLR4AoA3onwCASqvYHUW1tbXp169fZsyYUR5raWnJjBkzMnDgwNVuc9BBB+WZZ55JS0tLeez3v/99unfvvtomBwBgc6J/AgAqraJvPRs9enSuvfba3HDDDZk7d25OOeWULF68OCeccEKSZMSIERk7dmx5/imnnJLXXnstX/3qV/P73/8+06ZNyyWXXJJRo0ZVsmwAgDajfwIAKqlibz1LkqFDh+aVV17JuHHjsmDBgvTt2zfTp08vf0Dj/Pnz067d37KrXr165Z577smZZ56ZffbZJz179sxXv/rVnH322ZUsGwCgzeifAIBKqimKomjrIjam5ubm1NfXZ9GiRenSpUtblwMArIFrdvWwFgCwadgY1+yq/tYzAAAAACpHUAQAAABAEkERAAAAACWCIgAAAACSCIoAAAAAKBEUAQAAAJBEUAQAAABAiaAIAAAAgCSCIgAAAABKBEUAAAAAJBEUAQAAAFAiKAIAAAAgiaAIAAAAgBJBEQAAAABJBEUAAAAAlAiKAAAAAEgiKAIAAACgRFAEAAAAQBJBEQAAAAAlgiIAAAAAkgiKAAAAACgRFAEAAACQRFAEAAAAQImgCAAAAIAkgiIAAAAASgRFAAAAACQRFAEAAABQIigCAAAAIImgCAAAAIASQREAAAAASQRFAAAAAJQIigAAAABIIigCAAAAoERQBAAAAEASQREAAAAAJYIiAAAAAJIIigAAAAAoERQBAAAAkERQBAAAAECJoAgAAACAJIIiAAAAAEoERQAAAAAkERQBAAAAUCIoAgAAACCJoAgAAACAEkERAAAAAEkERQAAAACUCIoAAAAASCIoAgAAAKBEUAQAAABAEkERAAAAACWCIgAAAACSCIoAAAAAKBEUAQAAAJBEUAQAAABAiaAIAAAAgCSCIgAAAABKBEUAAAAAJBEUAQAAAFBS8aBo0qRJ6d27dzp06JABAwbkkUceWavtpk6dmpqamhx11FEbt0AAgCqjfwIAKqWiQdEtt9yS0aNHZ/z48Xn88cfTp0+fDB48OC+//PK7bvfCCy/krLPOysEHH1yhSgEAqoP+CQCopIoGRd/5znfypS99KSeccEI+9rGPZfLkyenUqVOuv/76NW6zYsWKHHfccbnwwguz8847V7BaAIC2p38CACqpYkHR0qVLM3v27DQ2Nv7t4O3apbGxMbNmzVrjdt/4xjey/fbb58QTT1yr4yxZsiTNzc2tHgAAmyL9EwBQaRULihYuXJgVK1akoaGh1XhDQ0MWLFiw2m3uv//+XHfddbn22mvX+jgTJkxIfX19+dGrV6/3VTcAQFvRPwEAlVa133r2xhtvZPjw4bn22mvTtWvXtd5u7NixWbRoUfnx4osvbsQqAQCqh/4JAHi/tqjUgbp27Zr27dunqamp1XhTU1O6deu2yvxnn302L7zwQoYMGVIea2lpSZJsscUWmTdvXnbZZZdVtqurq0tdXd0Grh4AoPL0TwBApVXsjqLa2tr069cvM2bMKI+1tLRkxowZGThw4Crz99hjj/z2t7/NnDlzyo/Pfe5zOfTQQzNnzhy3RAMAmz39EwBQaRW7oyhJRo8enZEjR6Z///454IADMnHixCxevDgnnHBCkmTEiBHp2bNnJkyYkA4dOuTjH/94q+233nrrJFllHABgc6V/AgAqqaJB0dChQ/PKK69k3LhxWbBgQfr27Zvp06eXP6Bx/vz5adeuaj82CQCg4vRPAEAl1RRFUbR1ERtTc3Nz6uvrs2jRonTp0qWtywEA1sA1u3pYCwDYNGyMa7ZfPwEAAACQRFAEAAAAQImgCAAAAIAkgiIAAAAASgRFAAAAACQRFAEAAABQIigCAAAAIImgCAAAAIASQREAAAAASQRFAAAAAJQIigAAAABIIigCAAAAoERQBAAAAEASQREAAAAAJYIiAAAAAJIIigAAAAAoERQBAAAAkERQBAAAAECJoAgAAACAJIIiAAAAAEoERQAAAAAkERQBAAAAUCIoAgAAACCJoAgAAACAEkERAAAAAEkERQAAAACUCIoAAAAASCIoAgAAAKBEUAQAAABAEkERAAAAACWCIgAAAACSCIoAAAAAKBEUAQAAAJBEUAQAAABAiaAIAAAAgCSCIgAAAABKBEUAAAAAJBEUAQAAAFAiKAIAAAAgiaAIAAAAgBJBEQAAAABJBEUAAAAAlAiKAAAAAEgiKAIAAACgRFAEAAAAQBJBEQAAAAAlgiIAAAAAkgiKAAAAACgRFAEAAACQRFAEAAAAQImgCAAAAIAkgiIAAAAASgRFAAAAACQRFAEAAABQIigCAAAAIImgCAAAAIASQREAAAAASQRFAAAAAJQIigAAAABI0gZB0aRJk9K7d+906NAhAwYMyCOPPLLGuddee20OPvjgbLPNNtlmm23S2Nj4rvMBADZH+icAoFIqGhTdcsstGT16dMaPH5/HH388ffr0yeDBg/Pyyy+vdv7MmTMzbNiw3HvvvZk1a1Z69eqVww8/PH/6058qWTYAQJvRPwEAlVRTFEVRqYMNGDAg+++/f6666qokSUtLS3r16pXTTz89Y8aMec/tV6xYkW222SZXXXVVRowYsVbHbG5uTn19fRYtWpQuXbq8r/oBgI3HNXv19E8AwJpsjGt2xe4oWrp0aWbPnp3Gxsa/HbxduzQ2NmbWrFlrtY+33nory5Yty7bbbrvGOUuWLElzc3OrBwDApkj/BABUWsWCooULF2bFihVpaGhoNd7Q0JAFCxas1T7OPvvs9OjRo1Wz9I8mTJiQ+vr68qNXr17vq24AgLaifwIAKm2T+dazSy+9NFOnTs0dd9yRDh06rHHe2LFjs2jRovLjxRdfrGCVAADVQ/8EAKyrLSp1oK5du6Z9+/ZpampqNd7U1JRu3bq967aXXXZZLr300vzyl7/MPvvs865z6+rqUldX977rBQBoa/onAKDSKnZHUW1tbfr165cZM2aUx1paWjJjxowMHDhwjdt9+9vfzkUXXZTp06enf//+lSgVAKAq6J8AgEqr2B1FSTJ69OiMHDky/fv3zwEHHJCJEydm8eLFOeGEE5IkI0aMSM+ePTNhwoQkybe+9a2MGzcuN998c3r37l1+L/5WW22VrbbaqpKlAwC0Cf0TAFBJFQ2Khg4dmldeeSXjxo3LggUL0rdv30yfPr38AY3z589Pu3Z/u8np+9//fpYuXZovfOELrfYzfvz4XHDBBZUsHQCgTeifAIBKqimKomjrIjam5ubm1NfXZ9GiRenSpUtblwMArIFrdvWwFgCwadgY1+xN5lvPAAAAANi4BEUAAAAAJBEUAQAAAFAiKAIAAAAgiaAIAAAAgBJBEQAAAABJBEUAAAAAlAiKAAAAAEgiKAIAAACgRFAEAAAAQBJBEQAAAAAlgiIAAAAAkgiKAAAAACgRFAEAAACQRFAEAAAAQImgCAAAAIAkgiIAAAAASgRFAAAAACQRFAEAAABQIigCAAAAIImgCAAAAIASQREAAAAASQRFAAAAAJQIigAAAABIIigCAAAAoERQBAAAAEASQREAAAAAJYIiAAAAAJIIigAAAAAoERQBAAAAkERQBAAAAECJoAgAAACAJIIiAAAAAEoERQAAAAAkERQBAAAAUCIoAgAAACCJoAgAAACAEkERAAAAAEkERQAAAACUCIoAAAAASCIoAgAAAKBEUAQAAABAEkERAAAAACWCIgAAAACSCIoAAAAAKBEUAQAAAJBEUAQAAABAiaAIAAAAgCSCIgAAAABKBEUAAAAAJBEUAQAAAFAiKAIAAAAgiaAIAAAAgBJBEQAAAABJBEUAAAAAlAiKAAAAAEgiKAIAAACgRFAEAAAAQBJBEQAAAAAlFQ+KJk2alN69e6dDhw4ZMGBAHnnkkXedf+utt2aPPfZIhw4dsvfee+fuu++uUKUAANVB/wQAVEpFg6Jbbrklo0ePzvjx4/P444+nT58+GTx4cF5++eXVzn/wwQczbNiwnHjiiXniiSdy1FFH5aijjsrvfve7SpYNANBm9E8AQCXVFEVRVOpgAwYMyP7775+rrroqSdLS0pJevXrl9NNPz5gxY1aZP3To0CxevDg//elPy2Of+MQn0rdv30yePHmtjtnc3Jz6+vosWrQoXbp02TAnAgBscK7Zq6d/AgDWZGNcs7fYIHtZC0uXLs3s2bMzduzY8li7du3S2NiYWbNmrXabWbNmZfTo0a3GBg8enDvvvHONx1myZEmWLFlSfr5o0aIkf/3hAQDVa+W1uoK/w6p6+icA4N1sjP6pYkHRwoULs2LFijQ0NLQab2hoyNNPP73abRYsWLDa+QsWLFjjcSZMmJALL7xwlfFevXqtR9UAQKW9+uqrqa+vb+syqoL+CQBYGxuyf6pYUFQpY8eObfVbtNdffz077rhj5s+fr+lsY83NzenVq1defPFFt7G3IetQPaxFdbAO1WPRokXZYYcdsu2227Z1KR84+qfq5N+n6mEtqoe1qA7WoXpsjP6pYkFR165d0759+zQ1NbUab2pqSrdu3Va7Tbdu3dZpfpLU1dWlrq5ulfH6+np/gKtEly5drEUVsA7Vw1pUB+tQPdq1q/iXslYt/ROJf5+qibWoHtaiOliH6rEh+6eKdWK1tbXp169fZsyYUR5raWnJjBkzMnDgwNVuM3DgwFbzk+QXv/jFGucDAGxO9E8AQKVV9K1no0ePzsiRI9O/f/8ccMABmThxYhYvXpwTTjghSTJixIj07NkzEyZMSJJ89atfzaBBg3L55Zfns5/9bKZOnZrHHnss11xzTSXLBgBoM/onAKCSKhoUDR06NK+88krGjRuXBQsWpG/fvpk+fXr5Axfnz5/f6napAw88MDfffHPOO++8nHPOOdl1111z55135uMf//haH7Ouri7jx49f7e3UVJa1qA7WoXpYi+pgHaqHtVg9/dMHl3WoHtaieliL6mAdqsfGWIuawnfQAgAAAJAKfkYRAAAAANVNUAQAAABAEkERAAAAACWCIgAAAACSbCZB0aRJk9K7d+906NAhAwYMyCOPPPKu82+99dbsscce6dChQ/bee+/cfffdFap087Yu63Dttdfm4IMPzjbbbJNtttkmjY2N77lurL11/Tux0tSpU1NTU5Ojjjpq4xb4AbKua/H6669n1KhR6d69e+rq6rLbbrv5N2oDWNd1mDhxYnbfffd07NgxvXr1yplnnpl33nmnQtVuvv7nf/4nQ4YMSY8ePVJTU5M777zzPbeZOXNm9ttvv9TV1eWjH/1opkyZstHr/KDQP1UH/VP10D9VD/1TddA/tb02652KTdzUqVOL2tra4vrrry+efPLJ4ktf+lKx9dZbF01NTaud/8ADDxTt27cvvv3tbxdPPfVUcd555xVbbrll8dvf/rbClW9e1nUdjj322GLSpEnFE088UcydO7c4/vjji/r6+uL//b//V+HKNz/ruhYrPf/880XPnj2Lgw8+uDjyyCMrU+xmbl3XYsmSJUX//v2LI444orj//vuL559/vpg5c2YxZ86cCle+eVnXdbjpppuKurq64qabbiqef/754p577im6d+9enHnmmRWufPNz9913F+eee25x++23F0mKO+64413nP/fcc0WnTp2K0aNHF0899VRx5ZVXFu3bty+mT59emYI3Y/qn6qB/qh76p+qhf6oO+qfq0Fa90yYfFB1wwAHFqFGjys9XrFhR9OjRo5gwYcJq5x9zzDHFZz/72VZjAwYMKP793/99o9a5uVvXdfhHy5cvLzp37lzccMMNG6vED4z1WYvly5cXBx54YPGDH/ygGDlypEZnA1nXtfj+979f7LzzzsXSpUsrVeIHwrquw6hRo4pPf/rTrcZGjx5dHHTQQRu1zg+atWl2vv71rxd77bVXq7GhQ4cWgwcP3oiVfTDon6qD/ql66J+qh/6pOuifqk8le6dN+q1nS5cuzezZs9PY2Fgea9euXRobGzNr1qzVbjNr1qxW85Nk8ODBa5zPe1ufdfhHb731VpYtW5Ztt912Y5X5gbC+a/GNb3wj22+/fU488cRKlPmBsD5rcdddd2XgwIEZNWpUGhoa8vGPfzyXXHJJVqxYUamyNzvrsw4HHnhgZs+eXb69+rnnnsvdd9+dI444oiI18zeu2RuH/qk66J+qh/6peuifqoP+adO1oa7XW2zIoipt4cKFWbFiRRoaGlqNNzQ05Omnn17tNgsWLFjt/AULFmy0Ojd367MO/+jss89Ojx49VvlDzbpZn7W4//77c91112XOnDkVqPCDY33W4rnnnsuvfvWrHHfccbn77rvzzDPP5NRTT82yZcsyfvz4SpS92VmfdTj22GOzcOHCfPKTn0xRFFm+fHm+/OUv55xzzqlEyfydNV2zm5ub8/bbb6djx45tVNmmTf9UHfRP1UP/VD30T9VB/7Tp2lC90yZ9RxGbh0svvTRTp07NHXfckQ4dOrR1OR8ob7zxRoYPH55rr702Xbt2betyPvBaWlqy/fbb55prrkm/fv0ydOjQnHvuuZk8eXJbl/aBMnPmzFxyySW5+uqr8/jjj+f222/PtGnTctFFF7V1aQBl+qe2o3+qLvqn6qB/2rxs0ncUde3aNe3bt09TU1Or8aampnTr1m2123Tr1m2d5vPe1mcdVrrsssty6aWX5pe//GX22WefjVnmB8K6rsWzzz6bF154IUOGDCmPtbS0JEm22GKLzJs3L7vsssvGLXoztT5/L7p3754tt9wy7du3L4/tueeeWbBgQZYuXZra2tqNWvPmaH3W4fzzz8/w4cNz0kknJUn23nvvLF68OCeffHLOPffctGvndyyVsqZrdpcuXdxN9D7on6qD/ql66J+qh/6pOuifNl0bqnfapFertrY2/fr1y4wZM8pjLS0tmTFjRgYOHLjabQYOHNhqfpL84he/WON83tv6rEOSfPvb385FF12U6dOnp3///pUodbO3rmuxxx575Le//W3mzJlTfnzuc5/LoYcemjlz5qRXr16VLH+zsj5/Lw466KA888wz5WYzSX7/+9+ne/fumpz1tD7r8NZbb63SzKxsPv/6OYJUimv2xqF/qg76p+qhf6oe+qfqoH/adG2w6/U6ffR1FZo6dWpRV1dXTJkypXjqqaeKk08+udh6662LBQsWFEVRFMOHDy/GjBlTnv/AAw8UW2yxRXHZZZcVc+fOLcaPH+/rXTeAdV2HSy+9tKitrS1uu+224qWXXio/3njjjbY6hc3Guq7FP/KtHRvOuq7F/Pnzi86dOxennXZaMW/evOKnP/1psf322xff/OY32+oUNgvrug7jx48vOnfuXPzoRz8qnnvuueLnP/95scsuuxTHHHNMW53CZuONN94onnjiieKJJ54okhTf+c53iieeeKL44x//WBRFUYwZM6YYPnx4ef7Kr3j92te+VsydO7eYNGnSen3FK6vSP1UH/VP10D9VD/1TddA/VYe26p02+aCoKIriyiuvLHbYYYeitra2OOCAA4qHHnqo/NqgQYOKkSNHtpr/4x//uNhtt92K2traYq+99iqmTZtW4Yo3T+uyDjvuuGORZJXH+PHjK1/4Zmhd/078PY3OhrWua/Hggw8WAwYMKOrq6oqdd965uPjii4vly5dXuOrNz7qsw7Jly4oLLrig2GWXXYoOHToUvXr1Kk499dTiL3/5S+UL38zce++9q/23f+XPf+TIkcWgQYNW2aZv375FbW1tsfPOOxc//OEPK1735kr/VB30T9VD/1Q99E/VQf/U9tqqd6opCveBAQAAALCJf0YRAAAAABuOoAgAAACAJIIiAAAAAEoERQAAAAAkERQBAAAAUCIoAgAAACCJoAgAAACAEkERAAAAAEkERQAAAACUCIoAAAAASCIoAgAAAKBEUAQAAABAkuT/A0qXlyCvOe/2AAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [] } ], From 17f86fd7be62f4333cedd649fe57f8fc0214bf07 Mon Sep 17 00:00:00 2001 From: oleksost Date: Wed, 20 Aug 2025 14:18:54 +0000 Subject: [PATCH 08/18] wip --- fast_llm/layers/ssm/mamba2.py | 217 +++++++++++++++++++++++++++++++++- 1 file changed, 213 insertions(+), 4 deletions(-) diff --git a/fast_llm/layers/ssm/mamba2.py b/fast_llm/layers/ssm/mamba2.py index caa6e214..da8f9c5f 100644 --- a/fast_llm/layers/ssm/mamba2.py +++ b/fast_llm/layers/ssm/mamba2.py @@ -10,10 +10,10 @@ from fast_llm.layers.common.linear import InputParallelLinear, Linear, OutputParallelLinear from fast_llm.layers.common.normalization import RMSNorm from fast_llm.layers.ssm.config import SSMConfig, SSMDimNames, SSMKwargs -from fast_llm.layers.ssm.mamba_layer import init_A, init_dtprojbias +from fast_llm.layers.ssm.mamba_layer import init_dtprojbias from fast_llm.layers.transformer.config import TransformerConfig, TransformerDimNames, TransformerKwargs from fast_llm.layers.transformer.transformer import Mixer -from fast_llm.tensor import ParameterMeta, init_kaiming_, init_ones_, init_uniform_centered_ +from fast_llm.tensor import LambdaInitializer, ParameterMeta, init_kaiming_, init_ones_, init_uniform_centered_ from fast_llm.utils import Assert, div, get_lr_scale _mamba_varlen = False @@ -283,6 +283,208 @@ # return self.out_proj(y) +# class Mamba2M1(Mixer): +# """ +# This code is adapted from https://github.com/jxiw/M1/blob/main/mamba2/hybrid_mamba_layer.py +# """ + +# _mixer_name: typing.ClassVar[str] = "mamba_2" + +# _XZ_DIMS = ( +# TransformerDimNames.batch, +# SSMDimNames.composite_heads_and_head_dim, +# TransformerDimNames.sequence_q, +# ) +# _BC_DIMS = ( +# TransformerDimNames.batch, +# SSMDimNames.composite_heads, +# SSMDimNames.state, +# TransformerDimNames.sequence_q, +# ) + +# def __init__( +# self, +# config: SSMConfig, +# tensor_space: TensorSpace, +# block_index: int, +# transformer_config: TransformerConfig, +# ): +# super().__init__(tensor_space, block_index, debug_level=transformer_config.debug_transformer) +# self._config: SSMConfig = config +# Assert.eq(self._config.activation_type, ActivationType.silu) +# layer_lr_scale: float | None = config.per_layer_lr_scale[block_index] if config.per_layer_lr_scale else None +# lr_scale: float | tuple[float | None, ...] | None = get_lr_scale(self._config.mamba_lr_scale, layer_lr_scale) + +# inner_dim: TensorDim = tensor_space[SSMDimNames.composite_heads_and_head_dim] +# xb_dim = tensor_space[SSMDimNames.composite_head_groups_and_state] +# hidden_dim: TensorDim = tensor_space[TransformerDimNames.hidden] +# tensor_space[SSMDimNames.dt_rank] + +# self._local_heads = tensor_space[SSMDimNames.composite_heads].size +# self._local_head_groups = tensor_space[SSMDimNames.head_groups].size +# self._group_heads = div(self._local_heads, self._local_head_groups) +# self._local_inner_size = inner_dim.size +# self._local_xb_size = xb_dim.size + +# conv1d_dim = tensor_space[SSMDimNames.conv1d_dim] +# self.conv1d_weight = ParameterMeta.from_dims( +# ( +# conv1d_dim, +# tensor_space[DefaultDimNames.scalar], +# tensor_space[SSMDimNames.convolution_kernel], +# ), +# init_method=init_uniform_centered_((conv1d_dim.global_size * self._config.conv_kernel_dimension) ** -0.5), +# lr_scale=lr_scale, +# ) +# self.conv1d_bias = ParameterMeta.from_dims( +# (conv1d_dim,), +# init_method=init_uniform_centered_(self._config.conv_kernel_dimension**-0.5), +# lr_scale=lr_scale, +# ) +# self.in_proj = OutputParallelLinear( +# hidden_dim, +# tensor_space[SSMDimNames.concatenated_inner_projection], +# bias=config.add_bias_linear, +# weight_init_method=init_kaiming_(transformer_config.hidden_size), +# sequence_parallel=self._sequence_parallel, +# lr_scale=lr_scale, +# ) + +# self.dt_in_proj = Linear( +# hidden_dim, +# tensor_space[SSMDimNames.composite_heads], +# bias=config.add_bias_linear, +# weight_init_method=init_kaiming_(transformer_config.hidden_size), +# lr_scale=lr_scale, +# ) + +# self.dt_proj_bias = ParameterMeta.from_dims( +# (tensor_space[SSMDimNames.composite_heads],), +# init_method=init_dtprojbias(self._config.dt_max, self._config.dt_min, self._config.dt_init_floor), +# lr_scale=lr_scale, +# ) + +# def init_A_uniform(A_init_range: tuple[float, float]=(1, 16)) -> LambdaInitializer: +# def init_(meta: ParameterMeta, tensor: torch.Tensor, generator: torch.Generator) -> None: # noqa +# tensor.uniform_(*A_init_range).log_() +# return LambdaInitializer(init_, requires_global_initialization=True) + +# self.A_log = ParameterMeta.from_dims( +# (tensor_space[SSMDimNames.composite_heads],), +# init_method=init_A_uniform(A_init_range=(1, 16)), +# lr_scale=lr_scale, +# weight_decay=False, +# ) +# self.D = ParameterMeta.from_dims( +# (tensor_space[SSMDimNames.composite_heads],), # can also be nheads x headim +# weight_decay=False, +# init_method=init_ones_, +# lr_scale=lr_scale, +# ) +# self.out_proj = InputParallelLinear( +# inner_dim, +# hidden_dim, +# bias=config.add_bias_linear, +# weight_init_method=init_kaiming_(self._config.d_inner), +# sequence_parallel=self._sequence_parallel, +# lr_scale=lr_scale, +# ) +# self.norm = RMSNorm( +# inner_dim, +# eps=1e-5, +# lr_scale=lr_scale, +# ) + +# def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[torch.Tensor, torch.Tensor | None]: +# """ """ +# assert _mamba_available +# assert _causal_conv1d_available +# cu_seqlens = kwargs[SSMKwargs.cu_seqlens] +# seq_idx = kwargs[SSMKwargs.seq_idx] +# kwargs[SSMKwargs.ssm_position_ids] + +# # inner_projection : (batch/local_sequence, local_sequence/batch, hidden) +# # -> (batch/sequence, sequence/batch, inner_projection) +# inner_projection = self.in_proj(input_) +# dt = self.dt_in_proj(input_) # bs, seq, heads #+ self.dt_proj_bias +# # Standardize to (batch, sequence, inner_projection) +# if kwargs[TransformerKwargs.sequence_first]: +# inner_projection = inner_projection.transpose(0, 1) +# dt = dt.transpose(0, 1) + +# sequence_length = inner_projection.size(1) + +# z, xBC = torch.split( +# inner_projection, +# [self._local_inner_size, self._local_xb_size + self._local_xb_size + self._local_inner_size], +# dim=2, +# ) + +# if cu_seqlens is not None: +# # from https://github.com/jxiw/M1/blob/d92b53faa640f8ebf624d3e9e771fe24648ef014/rl/verl/verl/models/mamba/hybrid_wrapper.py#L152 +# xBC = _causal_conv1d_fn( +# xBC.transpose(1, 2), +# weight=self.conv1d_weight.squeeze(1), +# bias=self.conv1d_bias, +# seq_idx=seq_idx, +# activation="silu", +# ).transpose(1, 2) +# else: +# xBC = _causal_conv1d_fn( +# x=xBC.transpose(1, 2), weight=self.conv1d_weight.squeeze(1), bias=self.conv1d_bias, activation="silu" +# ).transpose(1, 2) + +# x, b, c = torch.split(xBC, [self._local_xb_size, self._local_xb_size, self._local_inner_size], dim=-1) +# x = einops.rearrange(x, "b l (xb_group dstate) -> b xb_group l dstate", dstate=self._config.state_size) +# b = einops.rearrange(b, "b l (xb_group dstate) -> b xb_group l dstate", dstate=self._config.state_size) +# batch, num_key_value_heads, slen, head_dim = x.shape +# x = x[:, :, None, :, :].expand(batch, num_key_value_heads, self._group_heads, slen, head_dim) +# x = x.reshape(batch, num_key_value_heads * self._group_heads, slen, head_dim) +# b = b[:, :, None, :, :].expand(batch, num_key_value_heads, self._group_heads, slen, head_dim) +# b = b.reshape(batch, num_key_value_heads * self._group_heads, slen, head_dim) + +# if self._debug_level: +# self._debug_log(z, "z", self._XZ_DIMS, kwargs) +# self._debug_log(x, "x", self._XZ_DIMS, kwargs) +# self._debug_log(b, "b", self._BC_DIMS, kwargs) +# self._debug_log(c, "c", self._BC_DIMS, kwargs) +# self._debug_log(dt, "dt", self._XZ_DIMS, kwargs) + +# dt_limit_kwargs = {} +# # c is b x seq x heads * state +# y = mamba_chunk_scan_combined( +# # rearrange(x, "b l (h p) -> b l h p", p=self.headdim), +# einops.rearrange(x, "b g l p -> b l g p"), +# dt, +# -torch.exp(self.A_log.float()), +# # rearrange(B, "b l (g n) -> b l g n", g=self.ngroups), +# einops.rearrange(b, "b g l n -> b l g n"), +# einops.rearrange(c, "b l (g n) -> b l g n", g=self._local_heads), +# chunk_size=self._config.chunk_size, +# D=self.D, +# z=None, +# dt_bias=self.dt_proj_bias, +# dt_softplus=True, +# seq_idx=seq_idx, +# cu_seqlens=cu_seqlens, +# **dt_limit_kwargs, +# return_final_states=False, +# return_varlen_states=False, +# ) + +# if self._debug_level: +# self._debug_log(y, "y", self._XZ_DIMS, kwargs) + +# # y: (batch, local_heads * state, sequence) -> (batch, sequence, local_heads * state) +# y = y.transpose(1, 2)[:, :sequence_length] +# if kwargs[TransformerKwargs.sequence_first]: +# # TODO: Is contiguous needed? +# y = y.transpose(0, 1).contiguous() +# # (batch/sequence, sequence/batch, local_heads * state) +# # -> (batch/local_sequence, local_sequence/batch, hidden) +# return self.out_proj(y) + + class Mamba2(Mixer): """ This code is adapted from https://github.com/jxiw/M1/blob/main/mamba2/hybrid_mamba_layer.py @@ -363,14 +565,21 @@ def __init__( init_method=init_dtprojbias(self._config.dt_max, self._config.dt_min, self._config.dt_init_floor), lr_scale=lr_scale, ) + + def init_A_uniform(A_init_range: tuple[float, float] = (1, 16)) -> LambdaInitializer: + def init_(meta: ParameterMeta, tensor: torch.Tensor, generator: torch.Generator) -> None: # noqa + tensor.uniform_(*A_init_range).log_() + + return LambdaInitializer(init_, requires_global_initialization=True) + self.A_log = ParameterMeta.from_dims( (tensor_space[SSMDimNames.composite_heads],), - init_method=init_A(self._config.state_size, self._config.d_inner), + init_method=init_A_uniform(A_init_range=(1, 16)), lr_scale=lr_scale, weight_decay=False, ) self.D = ParameterMeta.from_dims( - (inner_dim,), + (tensor_space[SSMDimNames.composite_heads],), # can also be nheads x headim weight_decay=False, init_method=init_ones_, lr_scale=lr_scale, From adb06662fabb60f1d93707acbf34014ba0449ee8 Mon Sep 17 00:00:00 2001 From: oleksost Date: Wed, 20 Aug 2025 14:30:42 +0000 Subject: [PATCH 09/18] wip --- fast_llm/layers/ssm/config.py | 15 + fast_llm/layers/ssm/mamba2.py | 2 +- .../models/ssm/external/nemotron/config.py | 249 +++ .../models/ssm/external/nemotron/modeling.py | 1628 +++++++++++++++++ 4 files changed, 1893 insertions(+), 1 deletion(-) create mode 100644 fast_llm/models/ssm/external/nemotron/config.py create mode 100644 fast_llm/models/ssm/external/nemotron/modeling.py diff --git a/fast_llm/layers/ssm/config.py b/fast_llm/layers/ssm/config.py index 7a357bde..e2d0862f 100644 --- a/fast_llm/layers/ssm/config.py +++ b/fast_llm/layers/ssm/config.py @@ -66,6 +66,7 @@ class SSMBlockType(enum.StrEnum): mamba2_discrete = "m2d" mamba2 = "m2" transformer = "t" + nemotron_h_mamba2 = "nm2" def get_mixer_class(self): if self == SSMBlockType.mamba: @@ -80,6 +81,10 @@ def get_mixer_class(self): from fast_llm.layers.ssm.discrete_mamba2 import DiscreteMamba2 return DiscreteMamba2 + elif self == SSMBlockType.nemotron_h_mamba2: + from fast_llm.layers.ssm.mamba2 import NemotronHMamba2 + + return NemotronHMamba2 else: raise NotImplementedError(self) @@ -227,6 +232,13 @@ class SSMConfig(LLMBlockConfig): valid=check_field(Assert.gt, 0), ) + # Nemotron H + n_groups: int = Field( + default=8, + desc="Number of groups for Nemotron H", + hint=FieldHint.architecture, + ) + def _validate(self) -> None: with self._set_implicit_default(): if self.activation_type is None: @@ -244,6 +256,9 @@ def setup_tensor_space(self, tensor_space: TensorSpace, block_type: SSMBlockType elif block_type == SSMBlockType.mamba2: num_heads = div(self.d_inner, self.state_size) num_head_groups = div(self.d_xb, self.state_size) + elif block_type == SSMBlockType.nemotron_h_mamba2: + num_heads = div(self.d_inner, self.state_size) + num_head_groups = self.n_groups elif block_type == SSMBlockType.mamba2_discrete: # TODO: Use different variables? num_heads = self.n_v_heads diff --git a/fast_llm/layers/ssm/mamba2.py b/fast_llm/layers/ssm/mamba2.py index da8f9c5f..c044cbc8 100644 --- a/fast_llm/layers/ssm/mamba2.py +++ b/fast_llm/layers/ssm/mamba2.py @@ -485,7 +485,7 @@ # return self.out_proj(y) -class Mamba2(Mixer): +class NemotronHMamba2(Mixer): """ This code is adapted from https://github.com/jxiw/M1/blob/main/mamba2/hybrid_mamba_layer.py """ diff --git a/fast_llm/models/ssm/external/nemotron/config.py b/fast_llm/models/ssm/external/nemotron/config.py new file mode 100644 index 00000000..058cd0cb --- /dev/null +++ b/fast_llm/models/ssm/external/nemotron/config.py @@ -0,0 +1,249 @@ +# Copyright 2024 AI21 Labs Ltd. and the HuggingFace Inc. team. All rights reserved. +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""NemotronH model configuration""" + +import re + +from transformers.configuration_utils import PretrainedConfig +from transformers.utils import logging + +logger = logging.get_logger(__name__) + + +class NemotronHConfig(PretrainedConfig): + r""" + This is the configuration class to store the configuration of a [`NemotronHModel`]. It is used to instantiate a + NemotronH model according to the specified arguments, defining the model architecture. Instantiating a configuration + with the defaults will yield a similar configuration to that of the NemotronH-v0.1 model. + + [todo](todo) + + Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the + documentation from [`PretrainedConfig`] for more information. + + + Args: + vocab_size (`int`, *optional*, defaults to 131072): + Vocabulary size of the NemotronH model. Defines the number of different tokens that can be represented by the + `inputs_ids` passed when calling [`NemotronHModel`] + tie_word_embeddings (`bool`, *optional*, defaults to `False`): + Whether the model's input and output word embeddings should be tied. Note that this is only relevant if the + model has a output word embedding layer. + hidden_size (`int`, *optional*, defaults to 4096): + Dimension of the hidden representations. + intermediate_size (`int`, *optional*, defaults to 21504): + Dimension of the MLP representations. + num_hidden_layers (`int`, *optional*, defaults to 52): + Number of hidden layers in the Transformer encoder. + hybrid_override_pattern (`str`, *optional*, defaults to `"M-M-M-M*-M-M-M-M-M*-M-M-M-M-M*-M-M-M-M-M*-M-M-M-M-M-"`): + The pattern of the hybrid model. The pattern is a string of characters where each character represents M: Mamba2, *: Attention, -: MLP + num_attention_heads (`int`, *optional*, defaults to 32): + Number of attention heads for each attention layer in the Transformer encoder. + attention_head_dim (`int`, *optional*, defaults to 128): + Dimension of each attention head. + num_key_value_heads (`int`, *optional*, defaults to 8): + This is the number of key_value heads that should be used to implement Grouped Query Attention. If + `num_key_value_heads=num_attention_heads`, the model will use Multi Head Attention (MHA), if + `num_key_value_heads=1` the model will use Multi Query Attention (MQA) otherwise GQA is used. + mlp_hidden_act (`str`, *optional*, defaults to "relu2"): + The non-linear activation function in the MLP layers. + attention_bias (`bool`, *optional*, defaults to `False`): + Whether to use bias in attention layers. + mlp_bias (`bool`, *optional*, defaults to `False`): + Whether to use bias in MLP layers. + use_bias (`bool`, *optional*, defaults to `False`): + Whether to use bias in the model. + initializer_range (`float`, *optional*, defaults to 0.02): + The standard deviation of the truncated_normal_initializer for initializing all weight matrices. + layer_norm_epsilon (`float`, *optional*, defaults to 1e-5): + The epsilon used by the layer normalization layers. + residual_in_fp32 (`bool`, *optional*, defaults to `False`): + Whether or not residuals should be in `float32`. If set to `False` residuals will keep the same `dtype` as the rest of the model. + use_cache (`bool`, *optional*, defaults to `True`): + Whether or not the model should return the last key/values attentions (not used by all models). Only + relevant if `config.is_decoder=True`. + num_logits_to_keep (`int` or `None`, *optional*, defaults to 1): + Number of prompt logits to calculate during generation. If `None`, all logits will be calculated. If an + integer value, only last `num_logits_to_keep` logits will be calculated. + pad_token_id (`int`, *optional*, defaults to 0): + The id of the padding token. + bos_token_id (`int`, *optional*, defaults to 1): + The id of the "beginning-of-sequence" token. + eos_token_id (`int`, *optional*, defaults to 2): + The id of the "end-of-sequence" token. + sliding_window (`int`, *optional*, defaults to None): + Sliding window attention window size. + max_position_embeddings (`int`, *optional*, defaults to 4096): + The maximum sequence length that this model might ever be used with. + attention_dropout (`float`, *optional*, defaults to 0.0): + The dropout ratio for the attention probabilities. + hidden_dropout (`float`, *optional*, defaults to 0.0): + The dropout ratio for the hidden states. + use_mamba_kernels (`bool`, *optional*, defaults to `True`): + Flag indicating whether or not to use the fast mamba kernels. These are available only if `mamba-ssm` and + `causal-conv1d` are installed, and the mamba modules are running on a CUDA device. + ssm_state_size (`int`, *optional*, defaults to 128): + The dimension of the mamba state space latents. + mamba_num_heads (`int`, *optional*, defaults to 128): + Number of heads in Mamba layers. + mamba_n_groups (`int`, *optional*, defaults to 8): + Number of groups in Mamba layers. + mamba_head_dim (`int`, *optional*, defaults to 64): + Dimension of each Mamba head. + mamba_d_conv (`int`, *optional*, defaults to 4): + The size of the mamba convolution kernel. + mamba_expand (`int`, *optional*, defaults to 2): + Expanding factor used to determine the mamba intermediate size. + mamba_hidden_act (`str`, *optional*, defaults to "silu"): + The non-linear activation function in the Mamba layers. + mamba_dt_min (`float`, *optional*, defaults to 0.001): + Minimum value for the time step in Mamba. + mamba_dt_max (`float`, *optional*, defaults to 0.1): + Maximum value for the time step in Mamba. + mamba_dt_limit (`tuple`, *optional*, defaults to (0.0, float("inf"))): + Limits for the time step in Mamba. + mamba_dt_init_floor (`float`, *optional*, defaults to 1e-4): + Floor value for time step initialization in Mamba. + mamba_conv_bias (`bool`, *optional*, defaults to `True`): + Whether to use bias in the convolution layer of the mamba mixer block. + mamba_proj_bias (`bool`, *optional*, defaults to `False`): + Whether to use bias in the input and output projections of the mamba mixer block. + mamba_chunk_size (`int`, *optional*, defaults to 256): + Size of chunks for Mamba processing. + rescale_prenorm_residual (`bool`, *optional*, defaults to `True`): + Whether to rescale the pre-normalization residual connections. + """ + + model_type = "nemotron_h" + keys_to_ignore_at_inference = ["past_key_values"] + + def __init__( + self, + vocab_size=131072, + tie_word_embeddings=False, + hidden_size=4096, + intermediate_size=21504, + num_hidden_layers=52, + hybrid_override_pattern="M-M-M-M*-M-M-M-M-M*-M-M-M-M-M*-M-M-M-M-M*-M-M-M-M-M-", + num_attention_heads=32, + attention_head_dim=128, + num_key_value_heads=8, # nemo: num_query_groups + mlp_hidden_act="relu2", + attention_bias=False, + mlp_bias=False, + use_bias=False, + initializer_range=0.02, # nemo: init_method_std + layer_norm_epsilon=1e-5, # nemo: layernorm_epsilon + residual_in_fp32=False, # Megatron Core default value + use_cache=True, + num_logits_to_keep=1, + pad_token_id=0, + bos_token_id=1, + eos_token_id=2, + sliding_window=None, + max_position_embeddings=4096, + attention_dropout=0.0, + hidden_dropout=0.0, # * ADDED + use_mamba_kernels=True, + ssm_state_size=128, # mamba_state_size + mamba_num_heads=128, + mamba_n_groups=8, # nemo: mamba_ssm_ngroups = num_heads + mamba_head_dim=64, + mamba_d_conv=4, + mamba_expand=2, + mamba_hidden_act="silu", + mamba_dt_min=0.001, + mamba_dt_max=0.1, + mamba_dt_limit=(0.0, float("inf")), + mamba_dt_init_floor=1e-4, + mamba_conv_bias=True, + mamba_proj_bias=False, + mamba_chunk_size=256, + rescale_prenorm_residual=True, + **kwargs, + ): + self.vocab_size = vocab_size + self.tie_word_embeddings = tie_word_embeddings + self.hidden_size = hidden_size + self.intermediate_size = intermediate_size + self.num_hidden_layers = num_hidden_layers + self.hybrid_override_pattern = hybrid_override_pattern + self.num_attention_heads = num_attention_heads + self.attention_head_dim = attention_head_dim + self.sliding_window = sliding_window + self.max_position_embeddings = max_position_embeddings + self.attention_dropout = attention_dropout + self.hidden_dropout = hidden_dropout + + # Validate hybrid_override_pattern + # M: Mamba2, *: Attention, -: MLP + assert ( + len(self.hybrid_override_pattern) == self.num_hidden_layers + ), "hybrid_override_pattern must have the same length as num_hidden_layers" + assert re.match( + r"^[*-M]+$", self.hybrid_override_pattern + ), "hybrid_override_pattern must only contain characters 'M', '*', or '-'" + + # for backward compatibility + if num_key_value_heads is None: + num_key_value_heads = num_attention_heads + + self.num_key_value_heads = num_key_value_heads + self.mlp_hidden_act = mlp_hidden_act + self.attention_bias = attention_bias + self.mlp_bias = mlp_bias + self.use_bias = use_bias + self.initializer_range = initializer_range + self.layer_norm_epsilon = layer_norm_epsilon + self.residual_in_fp32 = residual_in_fp32 + + self.use_cache = use_cache + self.num_logits_to_keep = num_logits_to_keep + + self.use_mamba_kernels = use_mamba_kernels + self.n_groups = mamba_n_groups + self.mamba_head_dim = mamba_head_dim + self.ssm_state_size = ssm_state_size + self.mamba_num_heads = mamba_num_heads + self.conv_kernel = mamba_d_conv + self.expand = mamba_expand + self.mamba_hidden_act = mamba_hidden_act + self.time_step_min = mamba_dt_min + self.time_step_max = mamba_dt_max + self.time_step_limit = mamba_dt_limit + self.time_step_floor = mamba_dt_init_floor + self.use_conv_bias = mamba_conv_bias + self.mamba_proj_bias = mamba_proj_bias + self.chunk_size = mamba_chunk_size + self.rescale_prenorm_residual = rescale_prenorm_residual + + super().__init__( + pad_token_id=pad_token_id, + bos_token_id=bos_token_id, + eos_token_id=eos_token_id, + tie_word_embeddings=tie_word_embeddings, + **kwargs, + ) + + @property + def layers_block_type(self): + return [ + ( + "mamba" + if self.hybrid_override_pattern[i] == "M" + else "attention" if self.hybrid_override_pattern[i] == "*" else "mlp" + ) + for i in range(self.num_hidden_layers) + ] diff --git a/fast_llm/models/ssm/external/nemotron/modeling.py b/fast_llm/models/ssm/external/nemotron/modeling.py new file mode 100644 index 00000000..154316c7 --- /dev/null +++ b/fast_llm/models/ssm/external/nemotron/modeling.py @@ -0,0 +1,1628 @@ +# Copyright 2024 HuggingFace Inc. team. +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""PyTorch NemotronH model.""" + +import math +from dataclasses import dataclass +from typing import Any, Optional, Union + +import torch +import torch.utils.checkpoint +from torch import nn +from torch.nn import CrossEntropyLoss +from transformers.activations import ACT2FN +from transformers.cache_utils import DynamicCache # we need __iter__ and __len__ of pkv +from transformers.generation import GenerationMixin +from transformers.modeling_attn_mask_utils import AttentionMaskConverter +from transformers.modeling_utils import PreTrainedModel +from transformers.utils import ( + ModelOutput, + add_code_sample_docstrings, + add_start_docstrings, + add_start_docstrings_to_model_forward, + logging, +) +from transformers.utils.import_utils import ( + is_causal_conv1d_available, + is_flash_attn_2_available, + is_flash_attn_greater_or_equal_2_10, + is_mamba_2_ssm_available, +) + +from fast_llm.models.ssm.external.nemotron.config import NemotronHConfig + +logger = logging.get_logger(__name__) + + +# Copied from transformers.models.mamba.modeling_mamba2.modeling_mamba2.py with MAMBA2->NEMOTRONH,Mamba2->NemotronH +# For Mamba2 components Mamba2->NemotronHMamba2 +if is_mamba_2_ssm_available(): + from mamba_ssm.ops.triton.selective_state_update import selective_state_update + from mamba_ssm.ops.triton.ssd_combined import mamba_chunk_scan_combined, mamba_split_conv1d_scan_combined +else: + mamba_chunk_scan_combined, mamba_split_conv1d_scan_combined, selective_state_update = None, None, None + +try: + # from mamba_ssm.ops.triton.layernorm_gated import RMSNorm as RMSNormGated + from mamba_ssm.ops.triton.layernorm_gated import rmsnorm_fn +except ImportError: + raise ImportError("mamba-ssm is required by the Mamba model but cannot be imported") + +if is_causal_conv1d_available(): + from causal_conv1d import causal_conv1d_fn, causal_conv1d_update +else: + causal_conv1d_update, causal_conv1d_fn = None, None + +if is_flash_attn_2_available(): + from transformers.modeling_flash_attention_utils import _flash_attention_forward + +is_fast_path_available = all( + ( + selective_state_update, + mamba_chunk_scan_combined, + mamba_split_conv1d_scan_combined, + causal_conv1d_fn, + causal_conv1d_update, + ) +) + + +_CHECKPOINT_FOR_DOC = "nvidia/Nemotron-H-56B-Base-8K" +_CONFIG_FOR_DOC = "NemotronHConfig" + + +# Helper methods for segment sum computation + + +def pad_tensor_by_size(input_tensor: torch.Tensor, pad_size: int): + """ + Padding x tensor with `pad_size` on the seq_len dim (dim=1) + + Assumes that we only have tensors of either size 4 or 3 + """ + pad_shape = (0, 0, 0, 0, 0, pad_size, 0, 0) if len(input_tensor.shape) == 4 else (0, 0, 0, pad_size, 0, 0) + + return torch.nn.functional.pad(input_tensor, pad_shape, mode="constant", value=0) + + +def reshape_into_chunks(input_tensor, pad_size, chunk_size): + """ + Padding input_tensor with `pad_size` on the seq_len dim (dim=1) and + simultaneously splitting it into chunk sequences. + + Assumes that we only have tensors of either size 4 or 3 + """ + # [bsz, seq_len, ...] -> [bsz, seq_len multiple of chunk_size, ...] + input_tensor = pad_tensor_by_size(input_tensor, pad_size) + + if len(input_tensor.shape) == 3: + # [bsz, seq_len multiple of chunk_size, num_heads] -> [bsz, -1, chunk_size, num_heads] + return input_tensor.reshape(input_tensor.shape[0], -1, chunk_size, input_tensor.shape[2]) + else: + # [bsz, seq_len multiple of chunk_size, num_heads, head_dim or state_size] -> [bsz, -1, chunk_size, num_heads, head_dim or state_size] + return input_tensor.reshape( + input_tensor.shape[0], -1, chunk_size, input_tensor.shape[2], input_tensor.shape[3] + ) + + +def segment_sum(input_tensor): + """ + More stable segment sum calculation. Uses cumulative sums and masking instead of direct subtractions. + """ + chunk_size = input_tensor.size(-1) + # 1. expand input tensor to have an additional dimension and repeat along that dimension + # [..., chunk_size] -> [..., chunk_size, chunk_size] + input_tensor = input_tensor[..., None].expand(*input_tensor.size(), chunk_size) + # 2. create a lower triangular mask with the diagonal set to 0 to 0 out elements above diag + mask = torch.tril(torch.ones(chunk_size, chunk_size, device=input_tensor.device, dtype=torch.bool), diagonal=-1) + input_tensor = input_tensor.masked_fill(~mask, 0) + # 3. compute actual cumsum + tensor_segsum = torch.cumsum(input_tensor, dim=-2) + + # 4. apply mask to keep only the lower triangular part of the cumulative sum result (incl diagonal this time) + mask = torch.tril(torch.ones(chunk_size, chunk_size, device=input_tensor.device, dtype=torch.bool), diagonal=0) + tensor_segsum = tensor_segsum.masked_fill(~mask, -torch.inf) + return tensor_segsum + + +def apply_mask_to_padding_states(hidden_states, attention_mask): + """ + Tunes out the hidden states for padding tokens, see https://github.com/state-spaces/mamba/issues/66 + """ + if attention_mask is not None and attention_mask.shape[1] > 1 and attention_mask.shape[0] > 1: + dtype = hidden_states.dtype + hidden_states = (hidden_states * attention_mask[:, :, None]).to(dtype) + + return hidden_states + + +# Copied from https://github.com/huggingface/transformers/blob/main/src/transformers/models/jamba/modeling_jamba.py +class HybridMambaAttentionDynamicCache(DynamicCache): + """ + A dynamic cache that can handle both the attention cache (which has a seq_len dimension) and the mamba cache + (which has a constant shape regardless of seq_len). + + This cache has two sets of lists of tensors: `key_cache` and `value_cache` for attention cache and `conv_states` + and `ssm_states` for mamba cache. Each of these lists has `num_layers` tensors. The expected shape for each tensor + For attention layers, `key_cache` and `value_cache` have a shape of `(batch_size, num_heads, seq_len, head_dim)`, + while `conv_states` and `ssm_states` have a shape of `(batch_size, 0)` (empty tensors). + For mamba layers, `key_cache` and `value_cache` have a shape of `(batch_size, 0)` (empty tensors), + while `conv_states` represents the convolution state and has a shape of `(batch_size, d_inner, d_conv)`, + and `ssm_states` represents the ssm state and has a shape of `(batch_size, d_inner, d_state)`. + """ + + def __init__(self, config, batch_size, dtype=torch.float16, device=None): + super().__init__() + self.dtype = dtype + self.hybrid_override_pattern = config.hybrid_override_pattern + self.has_previous_state = False # only used by mamba + intermediate_size = config.expand * config.hidden_size + ssm_state_size = config.ssm_state_size + self.conv_kernel_size = conv_kernel_size = config.conv_kernel + self.conv_states = [] + self.ssm_states = [] + self.transformer_layers = [] + for i in range(config.num_hidden_layers): + if self.hybrid_override_pattern[i] == "M": + # Mamba layer + self.conv_states += [ + torch.zeros(batch_size, intermediate_size, conv_kernel_size, device=device, dtype=dtype) + ] + self.ssm_states += [ + torch.zeros(batch_size, intermediate_size, ssm_state_size, device=device, dtype=dtype) + ] + else: + # Attention or MLP layer + self.conv_states += [torch.tensor([[]] * batch_size, device=device)] + self.ssm_states += [torch.tensor([[]] * batch_size, device=device)] + self.transformer_layers.append(i) + + self.key_cache = [torch.tensor([[]] * batch_size, device=device) for _ in range(config.num_hidden_layers)] + self.value_cache = [torch.tensor([[]] * batch_size, device=device) for _ in range(config.num_hidden_layers)] + + def update( + self, + key_states: torch.Tensor, + value_states: torch.Tensor, + layer_idx: int, + cache_kwargs: Optional[dict[str, Any]] = None, + ) -> tuple[torch.Tensor, torch.Tensor]: + # Update the cache + if self.key_cache[layer_idx].shape[-1] == 0: + self.key_cache[layer_idx] = key_states + self.value_cache[layer_idx] = value_states + else: + self.key_cache[layer_idx] = torch.cat([self.key_cache[layer_idx], key_states], dim=2) + self.value_cache[layer_idx] = torch.cat([self.value_cache[layer_idx], value_states], dim=2) + + return self.key_cache[layer_idx], self.value_cache[layer_idx] + + def reorder_cache(self, beam_idx: torch.LongTensor): + """Reorders the cache for beam search, given the selected beam indices.""" + for layer_idx in range(len(self.key_cache)): + device = self.key_cache[layer_idx].device + self.key_cache[layer_idx] = self.key_cache[layer_idx].index_select(0, beam_idx.to(device)) + device = self.value_cache[layer_idx].device + self.value_cache[layer_idx] = self.value_cache[layer_idx].index_select(0, beam_idx.to(device)) + + device = self.conv_states[layer_idx].device + self.conv_states[layer_idx] = self.conv_states[layer_idx].index_select(0, beam_idx.to(device)) + device = self.ssm_states[layer_idx].device + self.ssm_states[layer_idx] = self.ssm_states[layer_idx].index_select(0, beam_idx.to(device)) + + def get_seq_length(self, layer_idx: Optional[int] = 0) -> int: + """Returns the sequence length of the cached states. A layer index can be optionally passed.""" + # take any layer that contains cache and not empty tensor + layer_idx = self.transformer_layers[0] if layer_idx not in self.transformer_layers else layer_idx + if len(self.key_cache) <= layer_idx: + return 0 + return self.key_cache[layer_idx].shape[-2] + + def to_legacy_cache(self) -> tuple[tuple[torch.Tensor], tuple[torch.Tensor]]: + raise NotImplementedError("HybridMambaAttentionDynamicCache does not have a legacy cache equivalent.") + + @classmethod + def from_legacy_cache(cls, past_key_values: Optional[tuple[tuple[torch.FloatTensor]]] = None) -> "DynamicCache": + raise NotImplementedError("HybridMambaAttentionDynamicCache does not have a legacy cache equivalent.") + + # Copied from modeling_mamba2.py + def update_conv_state( + self, layer_idx: int, new_conv_state: torch.Tensor, cache_init: bool = False + ) -> torch.Tensor: + if cache_init: + self.conv_states[layer_idx] = new_conv_state.to(self.conv_states[0].device) + else: + self.conv_states[layer_idx] = self.conv_states[layer_idx].roll(shifts=-1, dims=-1) + self.conv_states[layer_idx][:, :, -1] = new_conv_state[:, 0, :].to(self.conv_states.device) + return self.conv_states[layer_idx] + + def update_ssm_state(self, layer_idx: int, new_ssm_state: torch.Tensor): + self.ssm_states[layer_idx] = new_ssm_state.to(self.ssm_states[0].device) + return self.ssm_states[layer_idx] + + def reset(self): + self.conv_states.zero_() + self.ssm_states.zero_() + + +class MambaRMSNormGated(torch.nn.Module): + def __init__(self, hidden_size, group_size, eps=1e-5): + super().__init__() + self.weight = nn.Parameter(torch.ones(hidden_size)) + self.variance_epsilon = eps + self.group_size = group_size + + # jan28b version + def forward(self, hidden_states, gate=None): + return rmsnorm_fn( + x=hidden_states, + weight=self.weight, + bias=None, # No bias + z=gate, + eps=self.variance_epsilon, + group_size=self.group_size, + norm_before_gate=False, + ) + + +class NemotronHMamba2Mixer(nn.Module): + """ + Compute ∆, A, B, C, and D the state space parameters and compute the `contextualized_states`. + A, D are input independent (see Mamba paper [1] Section 3.5.2 "Interpretation of A" for why A isn't selective) + ∆, B, C are input-dependent (this is a key difference between Mamba and the linear time invariant S4, + and is why Mamba is called **selective** state spaces) + """ + + def __init__(self, config: NemotronHConfig, layer_idx: int): + super().__init__() + self.num_heads = config.mamba_num_heads + self.hidden_size = config.hidden_size + self.ssm_state_size = config.ssm_state_size + self.conv_kernel_size = config.conv_kernel + self.intermediate_size = config.mamba_num_heads * config.mamba_head_dim + self.layer_idx = layer_idx + self.use_conv_bias = config.use_conv_bias + self.activation = config.mamba_hidden_act + self.act = ACT2FN[config.mamba_hidden_act] + + self.layer_norm_epsilon = config.layer_norm_epsilon + + self.n_groups = config.n_groups + self.head_dim = config.mamba_head_dim + self.chunk_size = config.chunk_size + + self.time_step_limit = config.time_step_limit + self.time_step_min = config.time_step_min + self.time_step_max = config.time_step_max + + self.conv_dim = self.intermediate_size + 2 * self.n_groups * self.ssm_state_size + self.conv1d = nn.Conv1d( + in_channels=self.conv_dim, + out_channels=self.conv_dim, + bias=config.use_conv_bias, + kernel_size=config.conv_kernel, + groups=self.conv_dim, + padding=config.conv_kernel - 1, + ) + + # projection of the input hidden states + projection_size = self.intermediate_size + self.conv_dim + self.num_heads + self.in_proj = nn.Linear( + self.hidden_size, + projection_size, + bias=config.use_bias, + ) + # selective projection used to make dt, B and C input dependant + + # time step projection (discretization) + # instantiate once and copy inv_dt in init_weights of PretrainedModel + self.dt_bias = nn.Parameter(torch.ones(self.num_heads)) + + # S4D real initialization. These are not discretized! + # The core is to load them, compute the discrete states, then write the updated state. Keeps the memory bounded + A = torch.arange(1, self.num_heads + 1) + self.A_log = nn.Parameter(torch.log(A)) + self.A_log._no_weight_decay = True + self.norm = MambaRMSNormGated( + self.intermediate_size, eps=self.layer_norm_epsilon, group_size=self.intermediate_size // self.n_groups + ) + self.D = nn.Parameter(torch.ones(self.num_heads)) + self.D._no_weight_decay = True + + self.out_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.use_bias) + self.use_bias = config.use_bias + + if not is_fast_path_available: + logger.warning_once( + "The fast path is not available because on of `(selective_state_update, causal_conv1d_fn, causal_conv1d_update)`" + " is None. Falling back to the naive implementation. To install follow https://github.com/state-spaces/mamba/#installation and" + " https://github.com/Dao-AILab/causal-conv1d" + ) + + def cuda_kernels_forward( + self, + hidden_states: torch.Tensor, + cache_params: Optional[HybridMambaAttentionDynamicCache] = None, + cache_position: Optional[torch.LongTensor] = None, + attention_mask: Optional[torch.Tensor] = None, + ): + # 1. Gated MLP's linear projection + hidden_states = apply_mask_to_padding_states(hidden_states, attention_mask) + projected_states = self.in_proj(hidden_states) + + # Set up dimensions for reshapes later + batch_size, seq_len, _ = hidden_states.shape + groups_time_state_size = self.n_groups * self.ssm_state_size + d_mlp = ( + projected_states.shape[-1] + - 2 * self.intermediate_size + - 2 * self.n_groups * self.ssm_state_size + - self.num_heads + ) // 2 + + # Single step calculations via cache + if cache_params is not None and cache_position is not None and cache_position[0] > 0: + _, _, gate, hidden_states_B_C, dt = projected_states.squeeze(1).split( + [d_mlp, d_mlp, self.intermediate_size, self.conv_dim, self.num_heads], dim=-1 + ) + + # 2. Convolution sequence transformation + hidden_states_B_C = causal_conv1d_update( + hidden_states_B_C, + cache_params.conv_states[self.layer_idx], + self.conv1d.weight.squeeze(1), + self.conv1d.bias, + self.activation, + ) + + hidden_states, B, C = torch.split( + hidden_states_B_C, + [self.intermediate_size, groups_time_state_size, groups_time_state_size], + dim=-1, + ) + + # 3. SSM transformation + A = -torch.exp(self.A_log.float()) # (nheads,) + A = A[:, None, ...][:, :, None].expand(-1, self.head_dim, self.ssm_state_size).to(dtype=torch.float32) + dt = dt[:, :, None].expand(-1, -1, self.head_dim) + dt_bias = self.dt_bias[:, None, ...].expand(-1, self.head_dim) + D = self.D[:, None, ...].expand(-1, self.head_dim) + B = B.view(batch_size, self.n_groups, B.shape[1] // self.n_groups) + C = C.view(batch_size, self.n_groups, C.shape[1] // self.n_groups) + hidden_states_reshaped = hidden_states.view(batch_size, self.num_heads, self.head_dim) + hidden_states = selective_state_update( + cache_params.ssm_states[self.layer_idx], + hidden_states_reshaped, + dt, + A, + B, + C, + D, + z=None, + dt_bias=dt_bias, + dt_softplus=True, + ) + hidden_states = hidden_states.view(batch_size, self.num_heads * self.head_dim) + hidden_states = self.norm(hidden_states, gate) + + # 4. Final linear projection + out = self.out_proj(hidden_states)[:, None, ...] + + # Fused calculations or step by step if no initialized cache is found + else: + A = -torch.exp(self.A_log.float()) # (num_heads) or (intermediate_size, state_size) + dt_limit_kwargs = {} if self.time_step_limit == (0.0, float("inf")) else {"dt_limit": self.time_step_limit} + + # 2-4. Fused kernel for conv1d, SSM, and the final projection + if self.training and cache_params is None: + out = mamba_split_conv1d_scan_combined( + projected_states, + self.conv1d.weight.squeeze(1), + self.conv1d.bias, + self.dt_bias, + A, + D=self.D, + chunk_size=self.chunk_size, + seq_idx=None, # was seq_idx + activation=self.activation, + rmsnorm_weight=self.norm.weight, + rmsnorm_eps=self.norm.variance_epsilon, + outproj_weight=self.out_proj.weight, + outproj_bias=self.out_proj.bias, + headdim=self.head_dim, + ngroups=self.n_groups, + norm_before_gate=False, + return_final_states=False, + **dt_limit_kwargs, + ) + + else: + _, _, gate, hidden_states_B_C, dt = projected_states.split( + [d_mlp, d_mlp, self.intermediate_size, self.conv_dim, self.num_heads], dim=-1 + ) + + # 2. Convolution sequence transformation + # Init cache + if cache_params is not None: + hidden_states_B_C_transposed = hidden_states_B_C.transpose(1, 2) + conv_states = nn.functional.pad( + hidden_states_B_C_transposed, + (cache_params.conv_kernel_size - hidden_states_B_C_transposed.shape[-1], 0), + ) + cache_params.update_conv_state( + layer_idx=self.layer_idx, new_conv_state=conv_states, cache_init=True + ) + + if self.activation not in ["silu", "swish"]: + hidden_states_B_C = self.act( + self.conv1d(hidden_states_B_C.transpose(1, 2))[..., :seq_len].transpose(1, 2) + ) + else: + hidden_states_B_C = causal_conv1d_fn( + x=hidden_states_B_C.transpose(1, 2), + weight=self.conv1d.weight.squeeze(1), + bias=self.conv1d.bias, + activation=self.activation, + ).transpose(1, 2) + hidden_states_B_C = apply_mask_to_padding_states(hidden_states_B_C, attention_mask) + hidden_states, B, C = torch.split( + hidden_states_B_C, + [self.intermediate_size, groups_time_state_size, groups_time_state_size], + dim=-1, + ) + + # 3. SSM transformation + scan_output, ssm_state = mamba_chunk_scan_combined( + hidden_states.view(batch_size, seq_len, -1, self.head_dim), + dt, + A, + B.view(batch_size, seq_len, self.n_groups, -1), + C.view(batch_size, seq_len, self.n_groups, -1), + chunk_size=self.chunk_size, + D=self.D, + z=None, + seq_idx=None, + return_final_states=True, + dt_bias=self.dt_bias, + dt_softplus=True, + **dt_limit_kwargs, + ) + + # Init cache + if ssm_state is not None and cache_params is not None: + cache_params.update_ssm_state(layer_idx=self.layer_idx, new_ssm_state=ssm_state) + + scan_output = scan_output.view(batch_size, seq_len, -1) + + # Multiply "gate" branch and apply extra normalization layer + scan_output = self.norm(scan_output, gate) + + # 4. Final linear projection + out = self.out_proj(scan_output) + return out + + # fmt: off + def torch_forward(self, input_states, cache_params: Optional[HybridMambaAttentionDynamicCache]=None, cache_position:Optional[torch.LongTensor]=None, attention_mask: Optional[torch.Tensor]=None): + batch_size, seq_len, _ = input_states.shape + dtype = input_states.dtype + + # 1. Gated MLP's linear projection + input_states = apply_mask_to_padding_states(input_states, attention_mask) + projected_states = self.in_proj(input_states) + d_mlp = (projected_states.shape[-1] - 2 * self.intermediate_size - 2 * self.n_groups * self.ssm_state_size-self.num_heads) // 2 + _, _, gate, hidden_states_B_C, dt = projected_states.split( + [d_mlp, d_mlp, self.intermediate_size, self.conv_dim, self.num_heads], dim=-1 + ) + + # 2. Convolution sequence transformation + if cache_params is not None and cache_position is not None and cache_position[0] > 0: + cache_params.update_conv_state(layer_idx=self.layer_idx, new_conv_state=hidden_states_B_C, cache_init=False) + + # We need to guarantee that anything regarding the cache is on the same device + conv_states = cache_params.conv_states[self.layer_idx].to(device=self.conv1d.weight.device) + + hidden_states_B_C = torch.sum( + conv_states * self.conv1d.weight.squeeze(1), dim=-1 + ) + if self.use_conv_bias: + hidden_states_B_C = hidden_states_B_C + self.conv1d.bias + hidden_states_B_C = self.act(hidden_states_B_C) + else: + # Init cache + if cache_params is not None: + hidden_states_B_C_transposed = hidden_states_B_C.transpose(1, 2) + conv_states = nn.functional.pad( + hidden_states_B_C_transposed, (cache_params.conv_kernel_size - hidden_states_B_C_transposed.shape[-1], 0) + ) + cache_params.update_conv_state(layer_idx=self.layer_idx, new_conv_state=conv_states, cache_init=True) + + hidden_states_B_C = self.act(self.conv1d(hidden_states_B_C.transpose(1, 2))[..., :seq_len].transpose(1, 2)) + + hidden_states_B_C = apply_mask_to_padding_states(hidden_states_B_C, attention_mask) + hidden_states, B, C = torch.split( + hidden_states_B_C, + [self.intermediate_size, self.n_groups * self.ssm_state_size, self.n_groups * self.ssm_state_size], + dim=-1 + ) + + # 3. SSM transformation + A = -torch.exp(self.A_log.float()) # [num_heads] + if cache_params is not None and cache_position is not None and cache_position[0] > 0: + # We need to guarantee that anything regarding the cache is on the same device + cache_device = cache_params.ssm_states.device + + # Note: there is no need to pad parameter matrices here, as there is just one new token + # for batched generation + dt = dt[:, 0, :][:, None, ...] + dt = dt.transpose(1, 2).expand(batch_size, dt.shape[-1], self.head_dim) + # [num_heads] -> [num_heads, head_dim] + dt_bias = self.dt_bias[..., None].expand(self.dt_bias.shape[0], self.head_dim) + + dt = torch.nn.functional.softplus(dt + dt_bias.to(dt.dtype)) + dt = torch.clamp(dt, self.time_step_limit[0], self.time_step_limit[1]) + A = A[..., None, None].expand(self.num_heads, self.head_dim, self.ssm_state_size).to(dtype=torch.float32) + # [bsz, num_heads, head_dim, state_size] + dA = (torch.exp(dt[..., None] * A)).to(device=cache_device) + + # Discretize B + # [bsz, n_groups * state_size] -> [bsz, n_groups, 1, state_size] -> + # -> [bsz, n_groups, group to head repetition factor, state_size] -> [bsz, num_heads, state_size] + B = B.reshape(batch_size, self.n_groups, -1)[..., None, :] + B = B.expand(batch_size, self.n_groups, self.num_heads // self.n_groups, B.shape[-1]).contiguous() + B = B.reshape(batch_size, -1, B.shape[-1]) + # [bsz, num_heads, head_dim, state_size] + dB = dt[..., None] * B[..., None, :] + + # Discretize x into dB + # [bsz, intermediate_size] -> [bsz, num_heads, head_dim] + hidden_states = hidden_states.reshape(batch_size, -1, self.head_dim) + dBx = (dB * hidden_states[..., None]).to(device=cache_device) + + # State calculation + cache_params.update_ssm_state( + layer_idx=self.layer_idx, + new_ssm_state=cache_params.ssm_states[self.layer_idx] * dA + dBx + ) + + # Subsequent output + # [bsz, n_groups * state_size] -> [bsz, num_heads, state_size] + C = C.reshape(batch_size, self.n_groups, -1)[..., None, :] + C = C.expand(batch_size, self.n_groups, self.num_heads // self.n_groups, C.shape[-1]).contiguous() + C = C.reshape(batch_size, -1, C.shape[-1]) + # [bsz, num_heads, head_dim] + + ssm_states = cache_params.ssm_states[self.layer_idx].to(device=C.device, dtype=C.dtype) # Shape: [b, h, d, n] + # Reshape ssm_states to merge the first two dimensions + ssm_states_reshaped = ssm_states.view(batch_size * self.num_heads, self.head_dim, self.ssm_state_size) # Shape: [b*h, d, n] + C_reshaped = C.view(batch_size * self.num_heads, self.ssm_state_size, 1) # Shape: [b*h, n, 1] + y = torch.bmm(ssm_states_reshaped, C_reshaped) + y = y.view(batch_size, self.num_heads, self.head_dim) + + # D skip connection + # [num_heads] -> [num_heads, head_dim] + D = self.D[..., None].expand(self.D.shape[0], self.head_dim) + y = (y + hidden_states * D).to(y.dtype) + + # [bsz, num_heads, head_dim] -> [bsz, 1, intermediate_size] + y = y.reshape(batch_size, -1)[:, None, ...] + else: + # begin ssd naive implementation without einsums + dt = nn.functional.softplus(dt + self.dt_bias) + dt = torch.clamp(dt, self.time_step_limit[0], self.time_step_limit[1]) + hidden_states = hidden_states.reshape(batch_size, seq_len, -1, self.head_dim).float() + B = B.reshape(batch_size, seq_len, -1, self.ssm_state_size).float() + C = C.reshape(batch_size, seq_len, -1, self.ssm_state_size).float() + B = B.repeat(1, 1, self.num_heads // self.n_groups, 1) + C = C.repeat(1, 1, self.num_heads // self.n_groups, 1) + pad_size = (self.chunk_size - seq_len % self.chunk_size) % self.chunk_size + + D_residual = self.D[..., None] * pad_tensor_by_size(hidden_states, pad_size) + + # Discretize x and A + hidden_states = hidden_states * dt[..., None] + A = A.to(hidden_states.dtype) * dt + + # Rearrange into blocks/chunks + hidden_states, A, B, C = (reshape_into_chunks(t, pad_size, self.chunk_size) for t in (hidden_states, A, B, C)) + + # [bsz, -1, chunk_size, num_heads] -> [bsz, num_heads, -1, chunk_size] + A = A.permute(0, 3, 1, 2) + A_cumsum = torch.cumsum(A, dim=-1) + + # 1. Compute the output for each intra-chunk (diagonal blocks) + # This is the analog of a causal mask + L = torch.exp(segment_sum(A)) + + # Contraction of C and B to get G (attention-weights like) + G_intermediate = C[:, :, :, None, :, :] * B[:, :, None, :, :, :] # shape: (b, c, l, s, h, n) + G = G_intermediate.sum(dim=-1) # shape: (b, c, l, s, h) + + # Compute M, equivalent to applying attention mask to weights + M_intermediate = G[..., None] * L.permute(0, 2, 3, 4, 1)[..., None] + M = M_intermediate.sum(dim=-1) + + # Compute Y_diag (apply to values) + Y_diag = (M[..., None] * hidden_states[:, :, None]).sum(dim=3) + + # 2. Compute the state for each intra-chunk + # (right term of low-rank factorization of off-diagonal blocks; B terms) + decay_states = torch.exp(A_cumsum[:, :, :, -1:] - A_cumsum) + B_decay = B * decay_states.permute(0, -2, -1, 1)[..., None] + states = (B_decay[..., None, :] * hidden_states[..., None]).sum(dim=2) + + # 3. Compute the inter-chunk SSM recurrence; produces correct SSM states at chunk boundaries + # (middle term of factorization of off-diag blocks; A terms) + if cache_params is not None and cache_position is not None and cache_position[0] > 0: + previous_states = cache_params.ssm_states[self.layer_idx][:, None, ...].to(device=states.device) + else: + previous_states = torch.zeros_like(states[:, :1]) + states = torch.cat([previous_states, states], dim=1) + decay_chunk = torch.exp(segment_sum(nn.functional.pad(A_cumsum[:, :, :, -1], (1, 0)))) + decay_chunk = decay_chunk.transpose(1, 3) + new_states = (decay_chunk[..., None, None] * states[:, :, None, ...]).sum(dim=1) + states, ssm_state = new_states[:, :-1], new_states[:, -1] + + # 4. Compute state -> output conversion per chunk + # (left term of low-rank factorization of off-diagonal blocks; C terms) + state_decay_out = torch.exp(A_cumsum) + C_times_states = (C[..., None, :] * states[:, :, None, ...]) + state_decay_out_permuted = state_decay_out.permute(0, 2, 3, 1) + Y_off = (C_times_states.sum(-1) * state_decay_out_permuted[..., None]) + + # Add output of intra-chunk and inter-chunk terms (diagonal and off-diagonal blocks) + y = Y_diag + Y_off + # [bsz, -1, self.chunk_size, num_heads, head_dim] -> [bsz, (padded) seq_len, num_heads, head_dim] + y = y.reshape(batch_size, -1, self.num_heads, self.head_dim) + + y = y + D_residual + # Cutting off padded chunks + if pad_size > 0: + y = y[:, :seq_len, :, :] + y = y.reshape(batch_size, seq_len, -1) + + # Init cache + if ssm_state is not None and cache_params is not None: + cache_params.update_ssm_state(layer_idx=self.layer_idx, new_ssm_state=ssm_state) + + scan_output = self.norm(y, gate) + + # end ssd naive + + # 4. Final linear projection + contextualized_states = self.out_proj(scan_output.to(dtype)) # [batch, seq_len, hidden_size] + return contextualized_states + # fmt: on + + def forward( + self, + hidden_states, + cache_params: Optional[HybridMambaAttentionDynamicCache] = None, + cache_position: Optional[torch.LongTensor] = None, + attention_mask: Optional[torch.Tensor] = None, + ): + if is_fast_path_available and "cuda" in self.in_proj.weight.device.type: + return self.cuda_kernels_forward(hidden_states, cache_params, cache_position, attention_mask) + dtype = hidden_states.dtype + if attention_mask is not None and attention_mask.shape[1] > 1 and attention_mask.shape[0] > 1: + # tune out hidden states for pad tokens, see https://github.com/state-spaces/mamba/issues/66 + hidden_states = (hidden_states * attention_mask[:, :, None]).to(dtype) + + return self.torch_forward(hidden_states, cache_params, cache_position, attention_mask) + + +class NemotronHRMSNorm(nn.Module): + def __init__(self, hidden_size, eps=1e-6): + """ + NemotronHRMSNorm is equivalent to T5LayerNorm and LlamaRMSNorm + """ + super().__init__() + self.weight = nn.Parameter(torch.ones(hidden_size)) + self.variance_epsilon = eps + + def forward(self, hidden_states): + input_dtype = hidden_states.dtype + hidden_states = hidden_states.to(torch.float32) + variance = hidden_states.pow(2).mean(-1, keepdim=True) + hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon) + # Weights are in float32 + return (self.weight.to(torch.float32) * hidden_states).to(input_dtype) + + +class NemotronHBlock(nn.Module): + def __init__(self, config, layer_idx): + super().__init__() + self.config = config + self.layer_idx = layer_idx + self.residual_in_fp32 = config.residual_in_fp32 + self.norm = NemotronHRMSNorm(config.hidden_size, eps=config.layer_norm_epsilon) + + # M: Mamba2, *: Attention, -: MLP + self.block_type = config.layers_block_type[layer_idx] + if self.block_type == "mamba": + self.mixer = NemotronHMamba2Mixer(config, layer_idx=layer_idx) + elif self.block_type == "attention": + self.mixer = NEMOTRONH_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx=layer_idx) + elif self.block_type == "mlp": + self.mixer = NemotronHMLP(config, layer_idx=layer_idx) + else: + raise ValueError(f"Invalid layer pattern {config.hybrid_override_pattern[layer_idx]}") + + def forward( + self, + hidden_states, + cache_params: Optional[HybridMambaAttentionDynamicCache] = None, + cache_position: Optional[torch.LongTensor] = None, + attention_mask: Optional[torch.Tensor] = None, + ): + with torch.cuda.stream(torch.cuda.default_stream(hidden_states.device)): + # * Use torch.cuda.stream() to avoid NaN issues when using multiple GPUs + residual = hidden_states + hidden_states = self.norm(hidden_states.to(dtype=self.norm.weight.dtype)) + if self.residual_in_fp32: + residual = residual.to(torch.float32) + + if self.block_type == "mamba": + hidden_states = self.mixer(hidden_states, cache_params=cache_params, cache_position=cache_position) + elif self.block_type == "attention": + hidden_states = self.mixer(hidden_states, cache_position=cache_position) + hidden_states = hidden_states[0] + elif self.block_type == "mlp": + hidden_states = self.mixer(hidden_states) + else: + raise ValueError(f"Invalid block_type: {self.block_type}") + + hidden_states = residual + hidden_states + return hidden_states + + +# Copied from transformers.models.nemotron.modeling_nemotron Nemotron->NemotronH +class NemotronHMLP(nn.Module): + def __init__(self, config, layer_idx: Optional[int] = None): + super().__init__() + self.config = config + self.layer_idx = layer_idx + if layer_idx is None: + logger.warning_once( + f"Instantiating {self.__class__.__name__} without passing a `layer_idx` is not recommended and will " + "lead to errors during the forward call if caching is used. Please make sure to provide a `layer_idx` " + "when creating this class." + ) + self.hidden_size = config.hidden_size + self.intermediate_size = config.intermediate_size + self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) + self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) + self.act_fn = ACT2FN[config.mlp_hidden_act] + + def forward(self, x): + return self.down_proj(self.act_fn(self.up_proj(x))) + + +# Copied from transformers.models.llama.modeling_llama.repeat_kv +def repeat_kv(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor: + """ + This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch, + num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim) + """ + batch, num_key_value_heads, slen, head_dim = hidden_states.shape + if n_rep == 1: + return hidden_states + hidden_states = hidden_states[:, :, None, :, :].expand(batch, num_key_value_heads, n_rep, slen, head_dim) + return hidden_states.reshape(batch, num_key_value_heads * n_rep, slen, head_dim) + + +class NemotronHAttention(nn.Module): + """Multi-headed attention from 'Attention Is All You Need' paper""" + + def __init__(self, config: NemotronHConfig, layer_idx: Optional[int] = None): + super().__init__() + self.config = config + self.layer_idx = layer_idx + if layer_idx is None: + logger.warning_once( + f"Instantiating {self.__class__.__name__} without passing a `layer_idx` is not recommended and will " + "lead to errors during the forward call if caching is used. Please make sure to provide a `layer_idx` " + "when creating this class." + ) + + self.attention_dropout = config.attention_dropout + self.hidden_size = config.hidden_size + self.num_heads = config.num_attention_heads + if config.attention_head_dim is not None: + self.head_dim = config.attention_head_dim + else: + self.head_dim = config.hidden_size // config.num_attention_heads + self.num_key_value_heads = config.num_key_value_heads + self.num_key_value_groups = self.num_heads // self.num_key_value_heads + self.max_position_embeddings = config.max_position_embeddings + self.is_causal = True + + self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=config.attention_bias) + self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) + self.v_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) + self.o_proj = nn.Linear(self.head_dim * self.num_heads, self.hidden_size, bias=config.attention_bias) + + def forward( + self, + hidden_states: torch.Tensor, + # position_embeddings: Tuple[torch.Tensor, torch.Tensor], #TODO + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[HybridMambaAttentionDynamicCache] = None, + output_attentions: bool = False, + use_cache: bool = False, + cache_position: Optional[torch.LongTensor] = None, + ) -> tuple[torch.Tensor, Optional[torch.Tensor], Optional[tuple[torch.Tensor]]]: + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + query_states = query_states.view(bsz, q_len, self.num_heads, self.head_dim).transpose(1, 2) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, self.head_dim).transpose(1, 2) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, self.head_dim).transpose(1, 2) + + if past_key_value is not None: + key_states, value_states = past_key_value.update(key_states, value_states, self.layer_idx) + + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + + causal_mask = attention_mask + if attention_mask is not None: # no matter the length, we just slice it + causal_mask = attention_mask[:, :, :, : key_states.shape[-2]] + + if query_states.device.type == "cuda" and attention_mask is not None: + query_states = query_states.contiguous() + key_states = key_states.contiguous() + value_states = value_states.contiguous() + + is_causal = True if causal_mask is None and q_len > 1 else False + + attn_output = torch.nn.functional.scaled_dot_product_attention( + query_states, + key_states, + value_states, + attn_mask=causal_mask, + dropout_p=self.attention_dropout if self.training else 0.0, + is_causal=is_causal, + ) + attn_output = attn_output.transpose(1, 2).contiguous() + # attn_output = attn_output.view(bsz, q_len, self.hidden_size) + attn_output = attn_output.view(bsz, q_len, self.num_heads * self.head_dim) + + attn_output = self.o_proj(attn_output) + + return attn_output, None, past_key_value + + +# Adapted from transformers.models.mistral.modeling_mistral.MistralFlashAttention2 with Mistral->Jamba +# class JambaFlashAttention2(JambaAttention): +class NemotronHFlashAttention2(NemotronHAttention): + """ + Jamba flash attention module. This module inherits from `JambaAttention` as the weights of the module stays + untouched. The only required change would be on the forward pass where it needs to correctly call the public API of + flash attention and deal with padding tokens in case the input contains any of them. + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + # TODO: Should be removed once Flash Attention for RoCm is bumped to 2.1. + # flash_attn<2.1 generates top-left aligned causal mask, while what is needed here is bottom-right alignement, that was made default for flash_attn>=2.1. This attribute is used to handle this difference. Reference: https://github.com/Dao-AILab/flash-attention/releases/tag/v2.1.0. + # Beware that with flash_attn<2.1, using q_seqlen != k_seqlen (except for the case q_seqlen == 1) produces a wrong mask (top-left). + self._flash_attn_uses_top_left_mask = not is_flash_attn_greater_or_equal_2_10() + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[HybridMambaAttentionDynamicCache] = None, + output_attentions: bool = False, + use_cache: bool = False, + cache_position: Optional[torch.LongTensor] = None, + **kwargs, + ): + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + # Flash attention requires the input to have the shape + # batch_size x seq_length x head_dim x hidden_dim + # therefore we just need to keep the original shape + query_states = query_states.view(bsz, q_len, self.num_heads, self.head_dim) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, self.head_dim).transpose(1, 2) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, self.head_dim).transpose(1, 2) + + if past_key_value is not None: + key_states, value_states = past_key_value.update(key_states, value_states, self.layer_idx) + + # repeat k/v heads if n_kv_heads < n_heads + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + dropout_rate = 0.0 if not self.training else self.attention_dropout + + # In PEFT, usually we cast the layer norms in float32 for training stability reasons + # therefore the input hidden states gets silently casted in float32. Hence, we need + # cast them back in float16 just to be sure everything works as expected. + input_dtype = query_states.dtype + if input_dtype == torch.float32: + if torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + # Handle the case where the model is quantized + elif hasattr(self.config, "_pre_quantization_dtype"): + target_dtype = self.config._pre_quantization_dtype + else: + target_dtype = self.q_proj.weight.dtype + + logger.warning_once( + f"The input hidden states seems to be silently casted in float32, this might be related to" + f" the fact you have upcasted embedding or layer norm layers in float32. We will cast back the input in" + f" {target_dtype}." + ) + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + # Reashape to the expected shape for Flash Attention + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + attn_output = _flash_attention_forward( + query_states, + key_states, + value_states, + attention_mask, + q_len, + dropout=dropout_rate, + sliding_window=getattr(self.config, "sliding_window", None), + is_causal=self.is_causal, + use_top_left_mask=self._flash_attn_uses_top_left_mask, + ) + + # attn_output = attn_output.reshape(bsz, q_len, self.hidden_size).contiguous() + attn_output = attn_output.reshape(bsz, q_len, self.num_heads * self.head_dim).contiguous() + attn_output = self.o_proj(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + +# Adapted from transformers.models.mistral.modeling_mistral.MistralSdpaAttention with Mistral->Jamba +# class JambaSdpaAttention(JambaAttention): +class NemotronHSdpaAttention(NemotronHAttention): + """ + Jamba attention module using torch.nn.functional.scaled_dot_product_attention. This module inherits from + `JambaAttention` as the weights of the module stays untouched. The only changes are on the forward pass to adapt to + SDPA API. + """ + + # Adapted from NemotronHAttention.forward + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[HybridMambaAttentionDynamicCache] = None, + output_attentions: bool = False, + use_cache: bool = False, + cache_position: Optional[torch.LongTensor] = None, + ) -> tuple[torch.Tensor, Optional[torch.Tensor], Optional[tuple[torch.Tensor]]]: + if output_attentions: + # TODO: Improve this warning with e.g. `model.config.attn_implementation = "manual"` once this is implemented. + logger.warning_once( + "NemotronHModel is using NemotronHSdpaAttention, but `torch.nn.functional.scaled_dot_product_attention` does not support `output_attentions=True`. Falling back to the manual attention implementation, " + 'but specifying the manual implementation will be required from Transformers version v5.0.0 onwards. This warning can be removed using the argument `attn_implementation="eager"` when loading the model.' + ) + return super().forward( + hidden_states=hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_value, + output_attentions=output_attentions, + use_cache=use_cache, + ) + + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + query_states = query_states.view(bsz, q_len, self.num_heads, self.head_dim).transpose(1, 2) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, self.head_dim).transpose(1, 2) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, self.head_dim).transpose(1, 2) + + if past_key_value is not None: + key_states, value_states = past_key_value.update(key_states, value_states, self.layer_idx) + + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + + causal_mask = attention_mask + if attention_mask is not None: + causal_mask = causal_mask[:, :, :, : key_states.shape[-2]] + + # SDPA with memory-efficient backend is currently (torch==2.1.2) bugged with non-contiguous inputs with custom attn_mask, + # Reference: https://github.com/pytorch/pytorch/issues/112577. + if query_states.device.type == "cuda" and attention_mask is not None: + query_states = query_states.contiguous() + key_states = key_states.contiguous() + value_states = value_states.contiguous() + + # We dispatch to SDPA's Flash Attention or Efficient kernels via this `is_causal` if statement instead of an inline conditional assignment + # in SDPA to support both torch.compile's dynamic shapes and full graph options. An inline conditional prevents dynamic shapes from compiling. + # The q_len > 1 is necessary to match with AttentionMaskConverter.to_causal_4d that does not create a causal mask in case q_len == 1. + is_causal = True if self.is_causal and causal_mask is None and q_len > 1 else False + + attn_output = torch.nn.functional.scaled_dot_product_attention( + query_states, + key_states, + value_states, + attn_mask=causal_mask, + dropout_p=self.attention_dropout if self.training else 0.0, + is_causal=is_causal, + ) + + attn_output = attn_output.transpose(1, 2).contiguous() + attn_output = attn_output.view(bsz, q_len, self.hidden_size) + + attn_output = self.o_proj(attn_output) + + return attn_output, None, past_key_value + + +NEMOTRONH_ATTENTION_CLASSES = { + "eager": NemotronHAttention, + "flash_attention_2": NemotronHFlashAttention2, + "sdpa": NemotronHSdpaAttention, +} + + +# Copied from transformers.models.mamba.modeling_mamba2.Mamba2PreTrainedModel +class NemotronHPreTrainedModel(PreTrainedModel): + """ + An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained + models. + """ + + config_class = NemotronHConfig + base_model_prefix = "backbone" + _no_split_modules = ["NemotronHBlock"] + supports_gradient_checkpointing = True + _is_stateful = True + + def _init_weights(self, module): + """Initialize the weights.""" + if isinstance(module, NemotronHMamba2Mixer): + module.A_log._no_weight_decay = True + module.D._no_weight_decay = True + + dt = torch.exp( + torch.rand(self.config.mamba_num_heads) + * (math.log(self.config.time_step_max) - math.log(self.config.time_step_min)) + + math.log(self.config.time_step_min) + ).clamp(min=self.config.time_step_floor) + + # # Inverse of softplus: https://github.com/pytorch/pytorch/issues/72759 + inv_dt = dt + torch.log(-torch.expm1(-dt)) + with torch.no_grad(): + module.dt_bias.copy_(inv_dt) + module.dt_bias._no_reinit = True + + if isinstance(module, nn.Linear): + if module.bias is not None: + if not getattr(module.bias, "_no_reinit", False): + nn.init.zeros_(module.bias) + elif isinstance(module, nn.Embedding): + nn.init.normal_(module.weight, std=self.config.initializer_range) + + # TODO: Check + if self.config.rescale_prenorm_residual: + # Reinitialize selected weights subject to the OpenAI GPT-2 Paper Scheme: + # > A modified initialization which accounts for the accumulation on the residual path with model depth. Scale + # > the weights of residual layers at initialization by a factor of 1/√N where N is the # of residual layers. + # > -- GPT-2 :: https://openai.com/blog/better-language-models/ + # + # Reference (Megatron-LM): https://github.com/NVIDIA/Megatron-LM/blob/main/megatron/model/gpt_model.py + for name, p in module.named_parameters(): + if name in ["out_proj.weight"]: + # Special Scaled Initialization --> There are 2 Layer Norms per Transformer Block + # Following Pytorch init, except scale by 1/sqrt(2 * n_layer) + # We need to reinit p since this code could be called multiple times + # Having just p *= scale would repeatedly scale it down + nn.init.kaiming_uniform_(p, a=math.sqrt(5)) + with torch.no_grad(): + p /= math.sqrt(self.config.num_hidden_layers) + + +@dataclass +# Copied from transformers.models.mamba.modeling_mamba2.Mamba2Output with MAMBA2->NemotronH,Mamba2->NemotronH +class NemotronHOutput(ModelOutput): + """ + Class for the NemotronH model outputs. + + Args: + last_hidden_state (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`): + Sequence of hidden-states at the output of the last layer of the model. + cache_params (`HybridMambaAttentionDynamicCache`): + The state of the model at the last time step. Can be used in a forward method with the next `input_ids` to + avoid providing the old `input_ids`. + + Includes both the State space model state matrices after the selective scan, and the Convolutional states + hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): + Tuple of `torch.FloatTensor` (one for the output of the embeddings, if the model has an embedding layer, + + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. + + Hidden-states of the model at the output of each layer plus the optional initial embedding outputs. + """ + + last_hidden_state: Optional[torch.FloatTensor] = None + cache_params: Optional[HybridMambaAttentionDynamicCache] = None + hidden_states: Optional[tuple[torch.FloatTensor]] = None + attentions: Optional[tuple[torch.FloatTensor]] = None + + +@dataclass +# Copied from transformers.models.mamba2.modeling_mamba2.MambaCausalLMOutput with Mamba2->NemotronH +class NemotronHCausalLMOutput(ModelOutput): + """ + Base class for causal language model (or autoregressive) outputs. + + Args: + loss (`torch.FloatTensor` of shape `(1,)`, *optional*, returned when `labels` is provided): + Language modeling loss (for next-token prediction). + logits (`torch.FloatTensor` of shape `(batch_size, sequence_length, config.vocab_size)`): + Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax). + cache_params (`HybridMambaAttentionDynamicCache`): + The state of the model at the last time step. Can be used in a forward method with the next `input_ids` to + avoid providing the old `input_ids`. + + Includes both the State space model state matrices after the selective scan, and the Convolutional states + hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): + Tuple of `torch.FloatTensor` (one for the output of the embeddings, if the model has an embedding layer, + + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. + + Hidden-states of the model at the output of each layer plus the optional initial embedding outputs. + """ + + loss: Optional[torch.FloatTensor] = None + logits: Optional[torch.FloatTensor] = None + cache_params: Optional[HybridMambaAttentionDynamicCache] = None + hidden_states: Optional[tuple[torch.FloatTensor]] = None + attentions: Optional[tuple[torch.FloatTensor]] = None + + +NEMOTRONH_START_DOCSTRING = r""" + + This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the + library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads + etc.) + + This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. + Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage + and behavior. + + Parameters: + config ([`NemotronHConfig`]): Model configuration class with all the parameters of the model. + Initializing with a config file does not load the weights associated with the model, only the + configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. +""" + +NEMOTRONH_INPUTS_DOCSTRING = r""" + Args: + input_ids (`torch.LongTensor` of shape `(batch_size, input_ids_length)`, *optional*): + Indices of input sequence tokens in the vocabulary. + + If `cache_params.seqlen_offset>0`, only `input_ids` that do not have their past calculated should be passed as + `input_ids`. + + Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): + Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This + is useful if you want more control over how to convert `input_ids` indices into associated vectors than the + model's internal embedding lookup matrix. + position_ids (`torch.LongTensor` of shape `(batch_size,)`, *optional*): + Indices of positions of each input sequence tokens in the position embeddings. + cache_params (`HybridMambaAttentionDynamicCache`, *optional*): + If passed along, the model uses the previous state in all the blocks (which will give the output for the + `input_ids` provided as if the model add `state_input_ids + input_ids` as context). + use_cache (`bool`, *optional*): + If set to `True`, the `cache_params` is returned and can be used to quickly generate the next logits. + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for + more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. + cache_position (`torch.LongTensor` of shape `(batch_size,)`, *optional*): + The position of the current input in the cache. This is used to ensure that the cache is correctly updated. + If `cache_params` is passed, `cache_position` should also be passed. + attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) +""" + + +@add_start_docstrings( + "The bare NemotronH Model transformer outputting raw hidden-states without any specific head on top.", + NEMOTRONH_START_DOCSTRING, +) +class NemotronHModel(NemotronHPreTrainedModel): + def __init__(self, config): + super().__init__(config) + + self.embeddings = nn.Embedding(config.vocab_size, config.hidden_size) + self.layers = nn.ModuleList([NemotronHBlock(config, layer_idx=idx) for idx in range(config.num_hidden_layers)]) + + self.gradient_checkpointing = False + self.norm_f = NemotronHRMSNorm(config.hidden_size, eps=config.layer_norm_epsilon) + # Initialize weights and apply final processing + self._register_load_state_dict_pre_hook(self.load_hook) + self.post_init() + + def load_hook(self, state_dict, prefix, *args): + for k in state_dict: + if "embedding." in k: + state_dict[k.replace("embedding.", "embeddings.")] = state_dict.pop(k) + break + + def get_input_embeddings(self): + return self.embeddings + + def set_input_embeddings(self, new_embeddings): + self.embeddings = new_embeddings + + @add_start_docstrings_to_model_forward(NEMOTRONH_INPUTS_DOCSTRING) + @add_code_sample_docstrings( + checkpoint=_CHECKPOINT_FOR_DOC, + output_type=NemotronHOutput, + config_class=_CONFIG_FOR_DOC, + ) + def forward( + self, + input_ids: Optional[torch.LongTensor] = None, + inputs_embeds: Optional[torch.LongTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + cache_params: Optional[HybridMambaAttentionDynamicCache] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + cache_position: Optional[torch.LongTensor] = None, + attention_mask: Optional[torch.Tensor] = None, + **kwargs, + ) -> Union[tuple, NemotronHOutput]: + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + # use_cache = use_cache if use_cache is not None else self.config.use_cache + use_cache = use_cache if use_cache is not None else (self.config.use_cache if not self.training else False) + + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + if (input_ids is None) ^ (inputs_embeds is not None): # ^ is python for xor + raise ValueError("You must specify exactly one of input_ids or inputs_embeds") + + if inputs_embeds is None: + inputs_embeds = self.embeddings(input_ids) + + if self.gradient_checkpointing and self.training and use_cache: + logger.warning_once( + "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`." + ) + use_cache = False + + # From zamba_modeling.py + if use_cache and cache_params is None: + logger.warning_once( + "NemotronH requires an initialized `NemotronHHybridDynamicCache` to return a cache. None was " + "provided, so no cache will be returned." + ) + + hidden_states = inputs_embeds + + if cache_position is None: + cache_position = torch.arange(hidden_states.shape[1], device=hidden_states.device) + if position_ids is None: + position_ids = cache_position.unsqueeze(0) + + causal_mask = self._update_causal_mask(attention_mask, inputs_embeds, cache_position) + mamba_mask = self._update_mamba_mask(attention_mask, cache_position) + + all_hidden_states = () if output_hidden_states else None + all_self_attns = () if output_attentions else None + # Until HERE + + for layer_idx, mixer_block in enumerate(self.layers): + # Depending on the layer type we opt for 2D base attention mask (Mamba) or 4D causal mask (Attention) + if mixer_block.block_type == "mamba": + layer_mask = mamba_mask + elif mixer_block.block_type == "attention": + layer_mask = causal_mask + elif mixer_block.block_type == "mlp": + layer_mask = None + else: + raise ValueError(f"Invalid block_type: {self.block_type}") + + if output_hidden_states: + all_hidden_states += (hidden_states,) + + if self.gradient_checkpointing and self.training: + hidden_states = self._gradient_checkpointing_func( + mixer_block.__call__, hidden_states, cache_params, cache_position, layer_mask + ) + else: + hidden_states = mixer_block( + hidden_states, + cache_params=cache_params, + cache_position=cache_position, + attention_mask=layer_mask, + ) + + # TODO: Store attentions + # if output_attentions: + # if layer_outputs[1] is not None: + # # append attentions only of attention layers. Mamba layers return `None` as the attention weights + # all_self_attns += (layer_outputs[1],) + + # TODO (Check): should it happen before the forward pass? + # if output_hidden_states: + # all_hidden_states = all_hidden_states + (hidden_states,) + + hidden_states = self.norm_f(hidden_states) + + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + if not return_dict: + return tuple(v for v in [hidden_states, cache_params, all_hidden_states] if v is not None) + + return NemotronHOutput( + last_hidden_state=hidden_states, + cache_params=cache_params if use_cache else None, + hidden_states=all_hidden_states, + attentions=all_self_attns, + ) + + # Copied from transformers.models.jamba.modeling_jamba.JambaModel._update_causal_mask + def _update_causal_mask(self, attention_mask, input_tensor, cache_position): + if self.config._attn_implementation == "flash_attention_2": + if attention_mask is not None and 0.0 in attention_mask: + return attention_mask + return None + + dtype, device = input_tensor.dtype, input_tensor.device + min_dtype = torch.finfo(dtype).min + sequence_length = input_tensor.shape[1] + target_length = cache_position[-1] + 1 + + causal_mask = torch.full((sequence_length, target_length), fill_value=min_dtype, dtype=dtype, device=device) + if sequence_length != 1: + causal_mask = torch.triu(causal_mask, diagonal=1) + causal_mask *= torch.arange(target_length, device=device) > cache_position.reshape(-1, 1) + causal_mask = causal_mask[None, None, :, :].expand(input_tensor.shape[0], 1, -1, -1) + if attention_mask is not None: + causal_mask = causal_mask.clone() # copy to contiguous memory for in-place edit + if attention_mask.dim() == 2: + mask_length = attention_mask.shape[-1] + padding_mask = causal_mask[..., :mask_length].eq(0.0) * attention_mask[:, None, None, :].eq(0.0) + causal_mask[..., :mask_length] = causal_mask[..., :mask_length].masked_fill(padding_mask, min_dtype) + + if ( + self.config._attn_implementation == "sdpa" + and attention_mask is not None + and attention_mask.device.type == "cuda" + ): + # Attend to all tokens in fully masked rows in the causal_mask, for example the relevant first rows when + # using left padding. This is required by F.scaled_dot_product_attention memory-efficient attention path. + # Details: https://github.com/pytorch/pytorch/issues/110213 + causal_mask = AttentionMaskConverter._unmask_unattended(causal_mask, min_dtype) + + return causal_mask + + def _update_mamba_mask(self, attention_mask, cache_position): + """ + No need for zeroing states when + 1. Cached forward + 2. Attending to all inputs + """ + mamba_mask = attention_mask + if cache_position[0] > 0 or (attention_mask is not None and torch.all(attention_mask == 1)): + mamba_mask = None + return mamba_mask + + +@add_start_docstrings( + """ + The NEMOTRONH Model transformer with a language modeling head on top (linear layer with weights not tied to the input + embeddings). + """, + NEMOTRONH_START_DOCSTRING, +) +class NemotronHForCausalLM(NemotronHPreTrainedModel, GenerationMixin): + _tied_weights_keys = ["lm_head.weight"] + + def __init__(self, config): + super().__init__(config) + self.backbone = NemotronHModel(config) + self.vocab_size = config.vocab_size + self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) + + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.backbone.get_input_embeddings() + + def set_input_embeddings(self, new_embeddings): + return self.backbone.set_input_embeddings(new_embeddings) + + def get_output_embeddings(self): + return self.lm_head + + def set_output_embeddings(self, new_embeddings): + self.lm_head = new_embeddings + + def get_decoder(self): + return self.model + + def set_decoder(self, decoder): + self.model = decoder + + def prepare_inputs_for_generation( + self, + input_ids, + past_key_values=None, + attention_mask=None, + inputs_embeds=None, + cache_position=None, + position_ids=None, + use_cache=True, + **kwargs, + ): + # Copy from https://github.com/huggingface/transformers/blob/main/src/transformers/models/jamba/modeling_jamba.py + # Overwitten -- uses `cache_params` as opposed to `past_key_values` + empty_past_kv = past_key_values is None + + # If we have cache: let's slice `input_ids` through `cache_position`, to keep only the unprocessed tokens + # Exception 1: when passing input_embeds, input_ids may be missing entries + # Exception 2: some generation methods do special slicing of input_ids, so we don't need to do it here + # Exception 3: with synced GPUs cache_position may go out of bounds, but we only want dummy token in that case. + # (we can't check exception 3 while compiling) + if not empty_past_kv: + if inputs_embeds is not None or cache_position[-1] >= input_ids.shape[1]: # Exception 1 # Exception 3 + input_ids = input_ids[:, -cache_position.shape[0] :] + elif input_ids.shape[1] != cache_position.shape[0]: # Default case (the "else", a no op, is Exception 2) + input_ids = input_ids[:, cache_position] + else: + past_key_values = HybridMambaAttentionDynamicCache( + self.config, input_ids.shape[0], self.dtype, device=self.device + ) + + if attention_mask is not None and position_ids is None: + # create position_ids on the fly for batch generation + position_ids = attention_mask.long().cumsum(-1) - 1 + position_ids.masked_fill_(attention_mask == 0, 1) + if not empty_past_kv: + position_ids = position_ids[:, -input_ids.shape[1] :] + + # if `inputs_embeds` are passed, we only want to use them in the 1st generation step + if inputs_embeds is not None and empty_past_kv: + model_inputs = {"inputs_embeds": inputs_embeds} + else: + model_inputs = {"input_ids": input_ids.contiguous()} # `contiguous()` needed for compilation use cases + + model_inputs.update( + { + "position_ids": position_ids, + "past_key_values": past_key_values, + "use_cache": use_cache, + "attention_mask": attention_mask, + "logits_to_keep": self.config.num_logits_to_keep, + "cache_position": cache_position, + } + ) + return model_inputs + + @add_start_docstrings_to_model_forward(NEMOTRONH_INPUTS_DOCSTRING) + @add_code_sample_docstrings( + checkpoint=_CHECKPOINT_FOR_DOC, + output_type=NemotronHCausalLMOutput, + config_class=_CONFIG_FOR_DOC, + ) + def forward( + self, + input_ids: Optional[torch.LongTensor] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + cache_params: Optional[HybridMambaAttentionDynamicCache] = None, + labels: Optional[torch.LongTensor] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + use_cache: Optional[bool] = None, + cache_position: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + **kwargs, # for now we need this for generation + ) -> Union[tuple, NemotronHCausalLMOutput]: + r""" + labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): + Labels for language modeling. Note that the labels **are shifted** inside the model, i.e. you can set + `labels = input_ids` Indices are selected in `[-100, 0, ..., config.vocab_size]` All labels set to `-100` + are ignored (masked), the loss is only computed for labels in `[0, ..., config.vocab_size]` + """ + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + cache_params = cache_params if cache_params is not None else kwargs["past_key_values"] + + nemotron_h_outputs = self.backbone( + input_ids, + cache_params=cache_params, + inputs_embeds=inputs_embeds, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + use_cache=use_cache, + cache_position=cache_position, + attention_mask=attention_mask, + ) + hidden_states = nemotron_h_outputs[0] + + # TODO: Check zamba_modeling.py: https://github.com/huggingface/transformers/blob/d7188ba600e36d3fd191b12e19f1b3bb81a8404f/src/transformers/models/zamba/modeling_zamba.py#L1284C1-L1286C2 + # logits = self.lm_head(hidden_states.to(self.lm_head.weight.dtype)).float() + logits = self.lm_head(hidden_states.to(self.lm_head.weight.dtype)).float() + + loss = None + if labels is not None: + # move labels to correct device to enable model parallelism + labels = labels.to(logits.device) + # Shift so that tokens < n predict n + shift_logits = logits[..., :-1, :].contiguous() + shift_labels = labels[..., 1:].contiguous() + # Flatten the tokens + loss_fct = CrossEntropyLoss() + loss = loss_fct(shift_logits.view(-1, shift_logits.size(-1)), shift_labels.view(-1)) + + if not return_dict: + output = (logits,) + nemotron_h_outputs[1:] + return ((loss,) + output) if loss is not None else output + + return NemotronHCausalLMOutput( + loss=loss, + logits=logits, + cache_params=nemotron_h_outputs.cache_params, + hidden_states=nemotron_h_outputs.hidden_states, + attentions=nemotron_h_outputs.attentions, + ) From bc25e7492b9bd503ec548418c3fa87eba6ed7c87 Mon Sep 17 00:00:00 2001 From: oleksost Date: Thu, 21 Aug 2025 12:15:34 +0000 Subject: [PATCH 10/18] mamba2 nemotron h tp --- fast_llm/layers/common/normalization.py | 26 + fast_llm/layers/ssm/config.py | 65 +- fast_llm/layers/ssm/mamba2.py | 763 +++++++----------- fast_llm/models/ssm/conversion.py | 3 + .../ssm/external/eval/apriel_eval_wrapper.py | 73 ++ tests/test_ssms.py | 32 +- 6 files changed, 477 insertions(+), 485 deletions(-) diff --git a/fast_llm/layers/common/normalization.py b/fast_llm/layers/common/normalization.py index bccc1d62..7b959e34 100644 --- a/fast_llm/layers/common/normalization.py +++ b/fast_llm/layers/common/normalization.py @@ -8,6 +8,13 @@ from fast_llm.tensor import ParameterMeta, accumulate_gradient, init_ones_, init_zeros_ from fast_llm.utils import Assert +try: + from mamba_ssm.ops.triton.layernorm_gated import rmsnorm_fn as mamba_rmsnorm_fn + + _mamba_ssm_available = True +except ImportError: + _mamba_ssm_available = False + try: import fused_layer_norm_cuda # noqa @@ -288,3 +295,22 @@ def _forward_fused(self, input_: torch.Tensor) -> torch.Tensor: def _forward_torch(self, input_: torch.Tensor) -> torch.Tensor: return torch.rms_norm(input_.to(self.weight.dtype), self.normalized_shape, self.weight, self._eps) + + +class MambaRMSNormGated(RMSNorm): + def __init__(self, hidden_dim: TensorDim, group_size: int, eps=1e-5, lr_scale: float | None = None): + assert _mamba_ssm_available + super().__init__(hidden_dim, eps=eps, lr_scale=lr_scale) + self.group_size = group_size + self._forward = mamba_rmsnorm_fn + + def forward(self, input_: torch.Tensor, gate=None): + return mamba_rmsnorm_fn( + x=input_, + weight=self.weight, + bias=None, # No bias + z=gate, + eps=self._eps, + group_size=self.group_size, + norm_before_gate=False, + ) diff --git a/fast_llm/layers/ssm/config.py b/fast_llm/layers/ssm/config.py index e2d0862f..32dc27c1 100644 --- a/fast_llm/layers/ssm/config.py +++ b/fast_llm/layers/ssm/config.py @@ -49,7 +49,10 @@ class SSMDimNames: # Composite dimensions composite_heads = "ssm_composite_heads" composite_heads_and_head_dim = "ssm_composite_heads_and_head_dim" + composite_heads_and_head_dim_nontp = "ssm_composite_heads_and_head_dim_nontp" + composite_heads_and_state_dim = "ssm_composite_heads_and_state_dim" composite_head_groups_and_state = "ssm_composite_head_groups_and_state" + composite_head_groups_and_head = "ssm_composite_head_groups_and_head" # Concatenated dimensions concatenated_convolution = "ssm_concatenated_convolution" @@ -232,10 +235,18 @@ class SSMConfig(LLMBlockConfig): valid=check_field(Assert.gt, 0), ) - # Nemotron H - n_groups: int = Field( - default=8, - desc="Number of groups for Nemotron H", + # Nemotron H Mamba2 (the real mamba2 actually) + # here instead of setting d_inner, we set head dim. and number of heads + # Note: we do not implement n_groups for Mamba2, because, sicne we do MiL init, we do not want to share B and C parameters accross heads. + # Instead, we mimic the GQA behaviour (x -> v, B -> k, C -> q), where x and B are shared accross heads. So this is the same as having n_groups = n_heads? + # n_groups: int = Field( + # default=8, + # desc="Number of groups for Mamba2. Allows sharing B and C parameters accross heads.", + # hint=FieldHint.architecture, + # ) + head_dim: int = Field( + default=64, + desc="Head dimension for Nemotron H", hint=FieldHint.architecture, ) @@ -257,8 +268,9 @@ def setup_tensor_space(self, tensor_space: TensorSpace, block_type: SSMBlockType num_heads = div(self.d_inner, self.state_size) num_head_groups = div(self.d_xb, self.state_size) elif block_type == SSMBlockType.nemotron_h_mamba2: - num_heads = div(self.d_inner, self.state_size) - num_head_groups = self.n_groups + # head dim and state size are not the same + num_heads = div(self.d_inner, self.head_dim) + num_head_groups = div(self.d_xb, self.head_dim) elif block_type == SSMBlockType.mamba2_discrete: # TODO: Use different variables? num_heads = self.n_v_heads @@ -269,6 +281,8 @@ def setup_tensor_space(self, tensor_space: TensorSpace, block_type: SSMBlockType tensor_space.add_tensor_dim(state := TensorDim(SSMDimNames.state, self.state_size)) if block_type == SSMBlockType.mamba2_discrete: tensor_space.add_tensor_dim(head_dim := TensorDim(SSMDimNames.head_dim, div(self.d_inner, num_heads))) + elif block_type == SSMBlockType.nemotron_h_mamba2: + tensor_space.add_tensor_dim(head_dim := TensorDim(SSMDimNames.head_dim, self.head_dim)) else: head_dim = state @@ -277,14 +291,16 @@ def setup_tensor_space(self, tensor_space: TensorSpace, block_type: SSMBlockType tensor_space.add_tensor_dim( heads := CompositeTensorDim(SSMDimNames.composite_heads, (head_groups, group_heads)) ) + # full d_inner or intermediate_size (e.g. for z gate, also the d_inner size for C in mamba2) tensor_space.add_tensor_dim( heads_and_head_dim := CompositeTensorDim( SSMDimNames.composite_heads_and_head_dim, (head_groups, group_heads, head_dim) ) ) + # d_xb tensor_space.add_tensor_dim( - head_groups_and_state := CompositeTensorDim( - SSMDimNames.composite_head_groups_and_state, (head_groups, state) + head_groups_and_head := CompositeTensorDim( + SSMDimNames.composite_head_groups_and_head, (head_groups, head_dim) ) ) tensor_space.add_tensor_dim(TensorDim(SSMDimNames.convolution_kernel, self.conv_kernel_dimension)) @@ -308,12 +324,41 @@ def setup_tensor_space(self, tensor_space: TensorSpace, block_type: SSMBlockType tensor_space.add_tensor_dim( ConcatenatedTensorDim( SSMDimNames.concatenated_inner_projection, - (heads_and_head_dim, head_groups_and_state, head_groups_and_state, heads_and_head_dim), + (heads_and_head_dim, head_groups_and_head, head_groups_and_head, heads_and_head_dim), + ) + ) + elif block_type == SSMBlockType.nemotron_h_mamba2: + # for the norm + tensor_space.add_tensor_dim( + TensorDim( + SSMDimNames.composite_heads_and_head_dim_nontp, head_groups.size * group_heads.size * head_dim.size + ) + ) + # state and head dim are not the same + # C: for each head, size of state + tensor_space.add_tensor_dim( + heads_and_state_dim := CompositeTensorDim( + SSMDimNames.composite_heads_and_state_dim, (head_groups, group_heads, state) ) ) + # B: for each head group, size of state + tensor_space.add_tensor_dim( + head_groups_and_state := CompositeTensorDim( + SSMDimNames.composite_head_groups_and_state, (head_groups, state) + ) + ) + # here we apply depthwise conv. layer to xBC, so the dim. is x (d_xb) x B (d_bb) x C + tensor_space.add_tensor_dim( + conv1d_dim := ConcatenatedTensorDim( + SSMDimNames.conv1d_dim, (heads_and_state_dim, head_groups_and_head, head_groups_and_state) + ) + ) + + # inner projection dimention: also includes z (gate), which has size d_inner (heads_and_head_dim) tensor_space.add_tensor_dim( ConcatenatedTensorDim( - SSMDimNames.conv1d_dim, (heads_and_head_dim, head_groups_and_state, head_groups_and_state) + SSMDimNames.concatenated_inner_projection, + (conv1d_dim, heads_and_head_dim), ) ) elif block_type == SSMBlockType.mamba2_discrete: diff --git a/fast_llm/layers/ssm/mamba2.py b/fast_llm/layers/ssm/mamba2.py index c044cbc8..9df56388 100644 --- a/fast_llm/layers/ssm/mamba2.py +++ b/fast_llm/layers/ssm/mamba2.py @@ -8,9 +8,9 @@ from fast_llm.engine.config_utils.tensor_space import DefaultDimNames, TensorDim, TensorSpace from fast_llm.functional.config import ActivationType from fast_llm.layers.common.linear import InputParallelLinear, Linear, OutputParallelLinear -from fast_llm.layers.common.normalization import RMSNorm +from fast_llm.layers.common.normalization import MambaRMSNormGated from fast_llm.layers.ssm.config import SSMConfig, SSMDimNames, SSMKwargs -from fast_llm.layers.ssm.mamba_layer import init_dtprojbias +from fast_llm.layers.ssm.mamba_layer import init_A, init_dtprojbias from fast_llm.layers.transformer.config import TransformerConfig, TransformerDimNames, TransformerKwargs from fast_llm.layers.transformer.transformer import Mixer from fast_llm.tensor import LambdaInitializer, ParameterMeta, init_kaiming_, init_ones_, init_uniform_centered_ @@ -45,449 +45,252 @@ logger = logging.getLogger(__name__) -# class Mamba2(Mixer): -# """ -# This code is adapted from https://github.com/jxiw/M1/blob/537a1ca5407a786a99dc6c721873493cf8750d5e/mamba/hybrid_mamba_layer.py -# """ - -# _mixer_name: typing.ClassVar[str] = "mamba_2" - -# _XZ_DIMS = ( -# TransformerDimNames.batch, -# SSMDimNames.composite_heads_and_head_dim, -# TransformerDimNames.sequence_q, -# ) -# _BC_DIMS = ( -# TransformerDimNames.batch, -# SSMDimNames.composite_heads, -# SSMDimNames.state, -# TransformerDimNames.sequence_q, -# ) - -# def __init__( -# self, -# config: SSMConfig, -# tensor_space: TensorSpace, -# block_index: int, -# transformer_config: TransformerConfig, -# ): -# super().__init__(tensor_space, block_index, debug_level=transformer_config.debug_transformer) -# self._config: SSMConfig = config -# Assert.eq(self._config.activation_type, ActivationType.silu) -# layer_lr_scale: float | None = config.per_layer_lr_scale[block_index] if config.per_layer_lr_scale else None -# lr_scale: float | tuple[float | None, ...] | None = get_lr_scale(self._config.mamba_lr_scale, layer_lr_scale) - -# inner_dim: TensorDim = tensor_space[SSMDimNames.composite_heads_and_head_dim] -# xb_dim = tensor_space[SSMDimNames.composite_head_groups_and_state] -# hidden_dim: TensorDim = tensor_space[TransformerDimNames.hidden] -# dt_rank_dim = tensor_space[SSMDimNames.dt_rank] - -# self._local_heads = tensor_space[SSMDimNames.composite_heads].size -# self._local_head_groups = tensor_space[SSMDimNames.head_groups].size -# self._group_heads = div(self._local_heads, self._local_head_groups) -# self._local_inner_size = inner_dim.size -# self._local_xb_size = xb_dim.size - -# state_size = tensor_space[SSMDimNames.state].size -# div(self._local_inner_size, state_size) - -# conv1d_dim = inner_dim if self._config.repeat_kv_before_conv else xb_dim -# self.conv1d_weight = ParameterMeta.from_dims( -# ( -# conv1d_dim, -# tensor_space[DefaultDimNames.scalar], -# tensor_space[SSMDimNames.convolution_kernel], -# ), -# init_method=init_uniform_centered_((conv1d_dim.global_size * self._config.conv_kernel_dimension) ** -0.5), -# lr_scale=lr_scale, -# ) -# self.conv1d_bias = ParameterMeta.from_dims( -# (conv1d_dim,), -# init_method=init_uniform_centered_(self._config.conv_kernel_dimension**-0.5), -# lr_scale=lr_scale, -# ) -# self.in_proj = OutputParallelLinear( -# hidden_dim, -# tensor_space[SSMDimNames.concatenated_inner_projection], -# bias=config.add_bias_linear, -# weight_init_method=init_kaiming_(transformer_config.hidden_size), -# sequence_parallel=self._sequence_parallel, -# lr_scale=lr_scale, -# ) - -# self.dt_in_proj = Linear( -# hidden_dim, -# dt_rank_dim, -# bias=config.add_bias_linear, -# weight_init_method=init_kaiming_(transformer_config.hidden_size), -# lr_scale=lr_scale, -# ) -# self.dt_proj = OutputParallelLinear( -# dt_rank_dim, -# inner_dim, -# bias=False, -# # Initialize special dt projection to preserve variance at initialization -# weight_init_method=self._config.dt_init.get_init_method( -# self._config.dt_rank**-0.5 * self._config.dt_scale -# ), -# sequence_parallel=self._sequence_parallel, -# lr_scale=lr_scale, -# ) -# # define bias outside the linear layer since it's also used in the selective_scan_fn -# self.dt_proj_bias = ParameterMeta.from_dims( -# (inner_dim,), -# init_method=init_dtprojbias(self._config.dt_max, self._config.dt_min, self._config.dt_init_floor), -# lr_scale=lr_scale, -# ) -# self.A_log = ParameterMeta.from_dims( -# (inner_dim, tensor_space[SSMDimNames.state]), -# init_method=init_A(self._config.state_size, self._config.d_inner), -# lr_scale=lr_scale, -# weight_decay=False, -# ) -# self.D = ParameterMeta.from_dims( -# (inner_dim,), -# weight_decay=False, -# init_method=init_ones_, -# lr_scale=lr_scale, -# ) -# self.out_proj = InputParallelLinear( -# inner_dim, -# hidden_dim, -# bias=config.add_bias_linear, -# weight_init_method=init_kaiming_(self._config.d_inner), -# sequence_parallel=self._sequence_parallel, -# # TODO: lr_scale? -# ) - -# def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[torch.Tensor, torch.Tensor | None]: -# """ -# Note, we are nto doing "read" sequence-tensor parallel trainign here, since inner_projection is gathered over all GPUS. -# This is also desired, since the currently used mamba kernel does not support STP. -# TODO: use correct kernel from Mamba2! -# """ -# assert _mamba_available -# assert _causal_conv1d_available -# cu_seqlens = kwargs[SSMKwargs.cu_seqlens] -# seq_idx = kwargs[SSMKwargs.seq_idx] -# position_indices = kwargs[SSMKwargs.ssm_position_ids] - -# # inner_projection : (batch/local_sequence, local_sequence/batch, hidden) -# # -> (batch/sequence, sequence/batch, inner_projection) -# inner_projection = self.in_proj(input_) -# dt = self.dt_proj(self.dt_in_proj(input_)) + self.dt_proj_bias -# # Standardize to (batch, sequence, inner_projection) -# if kwargs[TransformerKwargs.sequence_first]: -# inner_projection = inner_projection.transpose(0, 1) -# dt = dt.transpose(0, 1) - -# sequence_length = inner_projection.size(1) - -# z, x, b, c = torch.split( -# inner_projection, -# [self._local_inner_size, self._local_xb_size, self._local_xb_size, self._local_inner_size], -# dim=2, -# ) - -# # z: (batch, sequence, local_heads * state) -> (batch, local_heads * state, sequence) -# z = z.transpose(1, 2) - -# # x: (batch, sequence, local_head_groups * state) -> (batch, local_heads * state, sequence) -# x = x.transpose(1, 2) -# # x: (batch, local_heads * state, sequence) -> (batch, local_head_per_groups, state, sequence) -# if self._config.repeat_kv_before_conv: -# x = ( -# x.unflatten(1, (self._local_head_groups, self._config.state_size)) -# .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) -# .flatten(1, 2) -# ) - -# if cu_seqlens is not None: -# # from https://github.com/jxiw/M1/blob/d92b53faa640f8ebf624d3e9e771fe24648ef014/rl/verl/verl/models/mamba/hybrid_wrapper.py#L152 -# x = _causal_conv1d_fn( -# x=x.transpose(1, 2).contiguous().transpose(1, 2), -# weight=self.conv1d_weight.squeeze(1), -# bias=self.conv1d_bias, -# seq_idx=seq_idx, -# activation="silu", -# ) -# else: -# x = _causal_conv1d_fn(x=x, weight=self.conv1d_weight.squeeze(1), bias=self.conv1d_bias, activation="silu") - -# if not self._config.repeat_kv_before_conv: -# x = ( -# x.unflatten(1, (self._local_head_groups, self._config.state_size)) -# .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) -# .flatten(1, 2) -# ) - -# # b: (batch, sequence, local_head_groups * state) -> (batch, local_heads, state, sequence) -# b = ( -# b.transpose(1, 2) -# .unflatten(1, (self._local_head_groups, self._config.state_size)) -# .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) -# ) - -# # c: (batch, sequence, heads * state) -> (batch, heads, state, sequence) -# c = c.transpose(1, 2).unflatten(1, (self._local_heads, self._config.state_size)) - -# # dt: (batch, sequence, heads * state) -> (batch, heads * state, sequence) -# dt = dt.transpose(1, 2) - -# if self._debug_level: -# self._debug_log(z, "z", self._XZ_DIMS, kwargs) -# self._debug_log(x, "x", self._XZ_DIMS, kwargs) -# self._debug_log(b, "b", self._BC_DIMS, kwargs) -# self._debug_log(c, "c", self._BC_DIMS, kwargs) -# self._debug_log(dt, "dt", self._XZ_DIMS, kwargs) - -# if not _mamba_varlen: -# Assert.eq(cu_seqlens, None, msg="This version of Mamba2 does not support cu_seqlens, install verlen mamba") -# y = selective_scan_fn( -# x, -# dt, -# -torch.exp(self.A_log.float()), -# b, -# c, -# self.D.float(), -# z, -# delta_bias=self.dt_proj_bias.float(), -# delta_softplus=True, -# ) -# else: -# position_indices = position_indices if cu_seqlens is not None else None - -# y = selective_scan_fn( -# x, -# dt, -# -torch.exp(self.A_log.float()), -# b, -# c, -# self.D.float(), -# z, -# delta_bias=self.dt_proj_bias.float(), -# delta_softplus=True, -# position_indices=position_indices, -# ) - -# if self._debug_level: -# self._debug_log(y, "y", self._XZ_DIMS, kwargs) - -# # y: (batch, local_heads * state, sequence) -> (batch, sequence, local_heads * state) -# y = y.transpose(1, 2)[:, :sequence_length] -# if kwargs[TransformerKwargs.sequence_first]: -# # TODO: Is contiguous needed? -# y = y.transpose(0, 1).contiguous() -# # (batch/sequence, sequence/batch, local_heads * state) -# # -> (batch/local_sequence, local_sequence/batch, hidden) -# return self.out_proj(y) - - -# class Mamba2M1(Mixer): -# """ -# This code is adapted from https://github.com/jxiw/M1/blob/main/mamba2/hybrid_mamba_layer.py -# """ - -# _mixer_name: typing.ClassVar[str] = "mamba_2" - -# _XZ_DIMS = ( -# TransformerDimNames.batch, -# SSMDimNames.composite_heads_and_head_dim, -# TransformerDimNames.sequence_q, -# ) -# _BC_DIMS = ( -# TransformerDimNames.batch, -# SSMDimNames.composite_heads, -# SSMDimNames.state, -# TransformerDimNames.sequence_q, -# ) - -# def __init__( -# self, -# config: SSMConfig, -# tensor_space: TensorSpace, -# block_index: int, -# transformer_config: TransformerConfig, -# ): -# super().__init__(tensor_space, block_index, debug_level=transformer_config.debug_transformer) -# self._config: SSMConfig = config -# Assert.eq(self._config.activation_type, ActivationType.silu) -# layer_lr_scale: float | None = config.per_layer_lr_scale[block_index] if config.per_layer_lr_scale else None -# lr_scale: float | tuple[float | None, ...] | None = get_lr_scale(self._config.mamba_lr_scale, layer_lr_scale) - -# inner_dim: TensorDim = tensor_space[SSMDimNames.composite_heads_and_head_dim] -# xb_dim = tensor_space[SSMDimNames.composite_head_groups_and_state] -# hidden_dim: TensorDim = tensor_space[TransformerDimNames.hidden] -# tensor_space[SSMDimNames.dt_rank] - -# self._local_heads = tensor_space[SSMDimNames.composite_heads].size -# self._local_head_groups = tensor_space[SSMDimNames.head_groups].size -# self._group_heads = div(self._local_heads, self._local_head_groups) -# self._local_inner_size = inner_dim.size -# self._local_xb_size = xb_dim.size - -# conv1d_dim = tensor_space[SSMDimNames.conv1d_dim] -# self.conv1d_weight = ParameterMeta.from_dims( -# ( -# conv1d_dim, -# tensor_space[DefaultDimNames.scalar], -# tensor_space[SSMDimNames.convolution_kernel], -# ), -# init_method=init_uniform_centered_((conv1d_dim.global_size * self._config.conv_kernel_dimension) ** -0.5), -# lr_scale=lr_scale, -# ) -# self.conv1d_bias = ParameterMeta.from_dims( -# (conv1d_dim,), -# init_method=init_uniform_centered_(self._config.conv_kernel_dimension**-0.5), -# lr_scale=lr_scale, -# ) -# self.in_proj = OutputParallelLinear( -# hidden_dim, -# tensor_space[SSMDimNames.concatenated_inner_projection], -# bias=config.add_bias_linear, -# weight_init_method=init_kaiming_(transformer_config.hidden_size), -# sequence_parallel=self._sequence_parallel, -# lr_scale=lr_scale, -# ) - -# self.dt_in_proj = Linear( -# hidden_dim, -# tensor_space[SSMDimNames.composite_heads], -# bias=config.add_bias_linear, -# weight_init_method=init_kaiming_(transformer_config.hidden_size), -# lr_scale=lr_scale, -# ) - -# self.dt_proj_bias = ParameterMeta.from_dims( -# (tensor_space[SSMDimNames.composite_heads],), -# init_method=init_dtprojbias(self._config.dt_max, self._config.dt_min, self._config.dt_init_floor), -# lr_scale=lr_scale, -# ) - -# def init_A_uniform(A_init_range: tuple[float, float]=(1, 16)) -> LambdaInitializer: -# def init_(meta: ParameterMeta, tensor: torch.Tensor, generator: torch.Generator) -> None: # noqa -# tensor.uniform_(*A_init_range).log_() -# return LambdaInitializer(init_, requires_global_initialization=True) - -# self.A_log = ParameterMeta.from_dims( -# (tensor_space[SSMDimNames.composite_heads],), -# init_method=init_A_uniform(A_init_range=(1, 16)), -# lr_scale=lr_scale, -# weight_decay=False, -# ) -# self.D = ParameterMeta.from_dims( -# (tensor_space[SSMDimNames.composite_heads],), # can also be nheads x headim -# weight_decay=False, -# init_method=init_ones_, -# lr_scale=lr_scale, -# ) -# self.out_proj = InputParallelLinear( -# inner_dim, -# hidden_dim, -# bias=config.add_bias_linear, -# weight_init_method=init_kaiming_(self._config.d_inner), -# sequence_parallel=self._sequence_parallel, -# lr_scale=lr_scale, -# ) -# self.norm = RMSNorm( -# inner_dim, -# eps=1e-5, -# lr_scale=lr_scale, -# ) - -# def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[torch.Tensor, torch.Tensor | None]: -# """ """ -# assert _mamba_available -# assert _causal_conv1d_available -# cu_seqlens = kwargs[SSMKwargs.cu_seqlens] -# seq_idx = kwargs[SSMKwargs.seq_idx] -# kwargs[SSMKwargs.ssm_position_ids] - -# # inner_projection : (batch/local_sequence, local_sequence/batch, hidden) -# # -> (batch/sequence, sequence/batch, inner_projection) -# inner_projection = self.in_proj(input_) -# dt = self.dt_in_proj(input_) # bs, seq, heads #+ self.dt_proj_bias -# # Standardize to (batch, sequence, inner_projection) -# if kwargs[TransformerKwargs.sequence_first]: -# inner_projection = inner_projection.transpose(0, 1) -# dt = dt.transpose(0, 1) - -# sequence_length = inner_projection.size(1) - -# z, xBC = torch.split( -# inner_projection, -# [self._local_inner_size, self._local_xb_size + self._local_xb_size + self._local_inner_size], -# dim=2, -# ) - -# if cu_seqlens is not None: -# # from https://github.com/jxiw/M1/blob/d92b53faa640f8ebf624d3e9e771fe24648ef014/rl/verl/verl/models/mamba/hybrid_wrapper.py#L152 -# xBC = _causal_conv1d_fn( -# xBC.transpose(1, 2), -# weight=self.conv1d_weight.squeeze(1), -# bias=self.conv1d_bias, -# seq_idx=seq_idx, -# activation="silu", -# ).transpose(1, 2) -# else: -# xBC = _causal_conv1d_fn( -# x=xBC.transpose(1, 2), weight=self.conv1d_weight.squeeze(1), bias=self.conv1d_bias, activation="silu" -# ).transpose(1, 2) - -# x, b, c = torch.split(xBC, [self._local_xb_size, self._local_xb_size, self._local_inner_size], dim=-1) -# x = einops.rearrange(x, "b l (xb_group dstate) -> b xb_group l dstate", dstate=self._config.state_size) -# b = einops.rearrange(b, "b l (xb_group dstate) -> b xb_group l dstate", dstate=self._config.state_size) -# batch, num_key_value_heads, slen, head_dim = x.shape -# x = x[:, :, None, :, :].expand(batch, num_key_value_heads, self._group_heads, slen, head_dim) -# x = x.reshape(batch, num_key_value_heads * self._group_heads, slen, head_dim) -# b = b[:, :, None, :, :].expand(batch, num_key_value_heads, self._group_heads, slen, head_dim) -# b = b.reshape(batch, num_key_value_heads * self._group_heads, slen, head_dim) - -# if self._debug_level: -# self._debug_log(z, "z", self._XZ_DIMS, kwargs) -# self._debug_log(x, "x", self._XZ_DIMS, kwargs) -# self._debug_log(b, "b", self._BC_DIMS, kwargs) -# self._debug_log(c, "c", self._BC_DIMS, kwargs) -# self._debug_log(dt, "dt", self._XZ_DIMS, kwargs) - -# dt_limit_kwargs = {} -# # c is b x seq x heads * state -# y = mamba_chunk_scan_combined( -# # rearrange(x, "b l (h p) -> b l h p", p=self.headdim), -# einops.rearrange(x, "b g l p -> b l g p"), -# dt, -# -torch.exp(self.A_log.float()), -# # rearrange(B, "b l (g n) -> b l g n", g=self.ngroups), -# einops.rearrange(b, "b g l n -> b l g n"), -# einops.rearrange(c, "b l (g n) -> b l g n", g=self._local_heads), -# chunk_size=self._config.chunk_size, -# D=self.D, -# z=None, -# dt_bias=self.dt_proj_bias, -# dt_softplus=True, -# seq_idx=seq_idx, -# cu_seqlens=cu_seqlens, -# **dt_limit_kwargs, -# return_final_states=False, -# return_varlen_states=False, -# ) - -# if self._debug_level: -# self._debug_log(y, "y", self._XZ_DIMS, kwargs) - -# # y: (batch, local_heads * state, sequence) -> (batch, sequence, local_heads * state) -# y = y.transpose(1, 2)[:, :sequence_length] -# if kwargs[TransformerKwargs.sequence_first]: -# # TODO: Is contiguous needed? -# y = y.transpose(0, 1).contiguous() -# # (batch/sequence, sequence/batch, local_heads * state) -# # -> (batch/local_sequence, local_sequence/batch, hidden) -# return self.out_proj(y) +class Mamba2(Mixer): + """ + This code is adapted from https://github.com/jxiw/M1/blob/537a1ca5407a786a99dc6c721873493cf8750d5e/mamba/hybrid_mamba_layer.py + """ + + _mixer_name: typing.ClassVar[str] = "mamba_2" + + _XZ_DIMS = ( + TransformerDimNames.batch, + SSMDimNames.composite_heads_and_head_dim, + TransformerDimNames.sequence_q, + ) + _BC_DIMS = ( + TransformerDimNames.batch, + SSMDimNames.composite_heads, + SSMDimNames.state, + TransformerDimNames.sequence_q, + ) + + def __init__( + self, + config: SSMConfig, + tensor_space: TensorSpace, + block_index: int, + transformer_config: TransformerConfig, + ): + super().__init__(tensor_space, block_index, debug_level=transformer_config.debug_transformer) + self._config: SSMConfig = config + Assert.eq(self._config.activation_type, ActivationType.silu) + layer_lr_scale: float | None = config.per_layer_lr_scale[block_index] if config.per_layer_lr_scale else None + lr_scale: float | tuple[float | None, ...] | None = get_lr_scale(self._config.mamba_lr_scale, layer_lr_scale) + + inner_dim: TensorDim = tensor_space[SSMDimNames.composite_heads_and_head_dim] + xb_dim = tensor_space[SSMDimNames.composite_head_groups_and_head] + hidden_dim: TensorDim = tensor_space[TransformerDimNames.hidden] + dt_rank_dim = tensor_space[SSMDimNames.dt_rank] + + self._local_heads = tensor_space[SSMDimNames.composite_heads].size + self._local_head_groups = tensor_space[SSMDimNames.head_groups].size + self._group_heads = div(self._local_heads, self._local_head_groups) + self._local_inner_size = inner_dim.size + self._local_xb_size = xb_dim.size + + state_size = tensor_space[SSMDimNames.state].size + div(self._local_inner_size, state_size) + + conv1d_dim = inner_dim if self._config.repeat_kv_before_conv else xb_dim + self.conv1d_weight = ParameterMeta.from_dims( + ( + conv1d_dim, + tensor_space[DefaultDimNames.scalar], + tensor_space[SSMDimNames.convolution_kernel], + ), + init_method=init_uniform_centered_((conv1d_dim.global_size * self._config.conv_kernel_dimension) ** -0.5), + lr_scale=lr_scale, + ) + self.conv1d_bias = ParameterMeta.from_dims( + (conv1d_dim,), + init_method=init_uniform_centered_(self._config.conv_kernel_dimension**-0.5), + lr_scale=lr_scale, + ) + self.in_proj = OutputParallelLinear( + hidden_dim, + tensor_space[SSMDimNames.concatenated_inner_projection], + bias=config.add_bias_linear, + weight_init_method=init_kaiming_(transformer_config.hidden_size), + sequence_parallel=self._sequence_parallel, + lr_scale=lr_scale, + ) + + self.dt_in_proj = Linear( + hidden_dim, + dt_rank_dim, + bias=config.add_bias_linear, + weight_init_method=init_kaiming_(transformer_config.hidden_size), + lr_scale=lr_scale, + ) + self.dt_proj = OutputParallelLinear( + dt_rank_dim, + inner_dim, + bias=False, + # Initialize special dt projection to preserve variance at initialization + weight_init_method=self._config.dt_init.get_init_method( + self._config.dt_rank**-0.5 * self._config.dt_scale + ), + sequence_parallel=self._sequence_parallel, + lr_scale=lr_scale, + ) + # define bias outside the linear layer since it's also used in the selective_scan_fn + self.dt_proj_bias = ParameterMeta.from_dims( + (inner_dim,), + init_method=init_dtprojbias(self._config.dt_max, self._config.dt_min, self._config.dt_init_floor), + lr_scale=lr_scale, + ) + self.A_log = ParameterMeta.from_dims( + (inner_dim, tensor_space[SSMDimNames.state]), + init_method=init_A(self._config.state_size, self._config.d_inner), + lr_scale=lr_scale, + weight_decay=False, + ) + self.D = ParameterMeta.from_dims( + (inner_dim,), + weight_decay=False, + init_method=init_ones_, + lr_scale=lr_scale, + ) + self.out_proj = InputParallelLinear( + inner_dim, + hidden_dim, + bias=config.add_bias_linear, + weight_init_method=init_kaiming_(self._config.d_inner), + sequence_parallel=self._sequence_parallel, + # TODO: lr_scale? + ) + + def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[torch.Tensor, torch.Tensor | None]: + """ + Note, we are nto doing "read" sequence-tensor parallel trainign here, since inner_projection is gathered over all GPUS. + This is also desired, since the currently used mamba kernel does not support STP. + TODO: use correct kernel from Mamba2! + """ + assert _mamba_available + assert _causal_conv1d_available + cu_seqlens = kwargs[SSMKwargs.cu_seqlens] + seq_idx = kwargs[SSMKwargs.seq_idx] + position_indices = kwargs[SSMKwargs.ssm_position_ids] + + # inner_projection : (batch/local_sequence, local_sequence/batch, hidden) + # -> (batch/sequence, sequence/batch, inner_projection) + inner_projection = self.in_proj(input_) + dt = self.dt_proj(self.dt_in_proj(input_)) + self.dt_proj_bias + # Standardize to (batch, sequence, inner_projection) + if kwargs[TransformerKwargs.sequence_first]: + inner_projection = inner_projection.transpose(0, 1) + dt = dt.transpose(0, 1) + + sequence_length = inner_projection.size(1) + # is this like Mamba1, the conv is only on the x? + z, x, b, c = torch.split( + inner_projection, + [self._local_inner_size, self._local_xb_size, self._local_xb_size, self._local_inner_size], + dim=2, + ) + + # z: (batch, sequence, local_heads * state) -> (batch, local_heads * state, sequence) + z = z.transpose(1, 2) + + # x: (batch, sequence, local_head_groups * state) -> (batch, local_heads * state, sequence) + x = x.transpose(1, 2) + # x: (batch, local_heads * state, sequence) -> (batch, local_head_per_groups, state, sequence) + if self._config.repeat_kv_before_conv: + x = ( + x.unflatten(1, (self._local_head_groups, self._config.state_size)) + .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) + .flatten(1, 2) + ) + + if cu_seqlens is not None: + # from https://github.com/jxiw/M1/blob/d92b53faa640f8ebf624d3e9e771fe24648ef014/rl/verl/verl/models/mamba/hybrid_wrapper.py#L152 + x = _causal_conv1d_fn( + x=x.transpose(1, 2).contiguous().transpose(1, 2), + weight=self.conv1d_weight.squeeze(1), + bias=self.conv1d_bias, + seq_idx=seq_idx, + activation="silu", + ) + else: + x = _causal_conv1d_fn(x=x, weight=self.conv1d_weight.squeeze(1), bias=self.conv1d_bias, activation="silu") + + if not self._config.repeat_kv_before_conv: + x = ( + x.unflatten(1, (self._local_head_groups, self._config.state_size)) + .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) + .flatten(1, 2) + ) + + # b: (batch, sequence, local_head_groups * state) -> (batch, local_heads, state, sequence) + b = ( + b.transpose(1, 2) + .unflatten(1, (self._local_head_groups, self._config.state_size)) + .repeat_interleave(self._group_heads, 1, output_size=self._local_heads) + ) + + # c: (batch, sequence, heads * state) -> (batch, heads, state, sequence) + c = c.transpose(1, 2).unflatten(1, (self._local_heads, self._config.state_size)) + + # dt: (batch, sequence, heads * state) -> (batch, heads * state, sequence) + dt = dt.transpose(1, 2) + + if self._debug_level: + self._debug_log(z, "z", self._XZ_DIMS, kwargs) + self._debug_log(x, "x", self._XZ_DIMS, kwargs) + self._debug_log(b, "b", self._BC_DIMS, kwargs) + self._debug_log(c, "c", self._BC_DIMS, kwargs) + self._debug_log(dt, "dt", self._XZ_DIMS, kwargs) + + if not _mamba_varlen: + Assert.eq(cu_seqlens, None, msg="This version of Mamba2 does not support cu_seqlens, install verlen mamba") + y = selective_scan_fn( + x, + dt, + -torch.exp(self.A_log.float()), + b, + c, + self.D.float(), + z, + delta_bias=self.dt_proj_bias.float(), + delta_softplus=True, + ) + else: + position_indices = position_indices if cu_seqlens is not None else None + + y = selective_scan_fn( + x, + dt, + -torch.exp(self.A_log.float()), + b, + c, + self.D.float(), + z, + delta_bias=self.dt_proj_bias.float(), + delta_softplus=True, + position_indices=position_indices, + ) + + if self._debug_level: + self._debug_log(y, "y", self._XZ_DIMS, kwargs) + + # y: (batch, local_heads * state, sequence) -> (batch, sequence, local_heads * state) + y = y.transpose(1, 2)[:, :sequence_length] + if kwargs[TransformerKwargs.sequence_first]: + # TODO: Is contiguous needed? + y = y.transpose(0, 1).contiguous() + # (batch/sequence, sequence/batch, local_heads * state) + # -> (batch/local_sequence, local_sequence/batch, hidden) + return self.out_proj(y) class NemotronHMamba2(Mixer): """ - This code is adapted from https://github.com/jxiw/M1/blob/main/mamba2/hybrid_mamba_layer.py + This is the actual Mamab2, called NemotronHMamba2 for historical reasons. + Decompesl, d_state and head_dim. + Head dimention -- later head dimention means me project hidden statte into larger space (more channel mixing) + Larger state size -- more temporar memory. + + This code is adapted from https://huggingface.co/nvidia/Nemotron-H-8B-Base-8K/blob/main/modeling_nemotron_h.py """ _mixer_name: typing.ClassVar[str] = "mamba_2" @@ -518,17 +321,28 @@ def __init__( lr_scale: float | tuple[float | None, ...] | None = get_lr_scale(self._config.mamba_lr_scale, layer_lr_scale) inner_dim: TensorDim = tensor_space[SSMDimNames.composite_heads_and_head_dim] - xb_dim = tensor_space[SSMDimNames.composite_head_groups_and_state] + inner_dim_non_tp: TensorDim = tensor_space[SSMDimNames.composite_heads_and_head_dim_nontp] + c_dim: TensorDim = tensor_space[SSMDimNames.composite_heads_and_state_dim] + xb_dim = tensor_space[SSMDimNames.composite_head_groups_and_head] + bb_dim = tensor_space[SSMDimNames.composite_head_groups_and_state] hidden_dim: TensorDim = tensor_space[TransformerDimNames.hidden] - tensor_space[SSMDimNames.dt_rank] + self._head_dim_size: TensorDim = tensor_space[SSMDimNames.head_dim].size self._local_heads = tensor_space[SSMDimNames.composite_heads].size self._local_head_groups = tensor_space[SSMDimNames.head_groups].size self._group_heads = div(self._local_heads, self._local_head_groups) + Assert.eq(self._local_heads, self._local_head_groups * self._group_heads) + self._local_inner_size = inner_dim.size - self._local_xb_size = xb_dim.size + self._local_c_size = c_dim.size - conv1d_dim = tensor_space[SSMDimNames.conv1d_dim] + Assert.eq(self._local_inner_size, self._head_dim_size * self._local_heads) + self._local_xb_size = xb_dim.size # x has head dim and is for each head group + self._local_bb_size = bb_dim.size # b has state dim and is for each head group + Assert.eq(self._local_xb_size, self._head_dim_size * self._local_head_groups) + Assert.eq(self._local_bb_size, self._config.state_size * self._local_head_groups) + + conv1d_dim = tensor_space[SSMDimNames.conv1d_dim] # applied to xBC, so d_xb + d_bb + c_dim self.conv1d_weight = ParameterMeta.from_dims( ( conv1d_dim, @@ -552,11 +366,13 @@ def __init__( lr_scale=lr_scale, ) - self.dt_in_proj = Linear( + # project single number per head + self.dt_in_proj = OutputParallelLinear( hidden_dim, tensor_space[SSMDimNames.composite_heads], bias=config.add_bias_linear, weight_init_method=init_kaiming_(transformer_config.hidden_size), + sequence_parallel=self._sequence_parallel, lr_scale=lr_scale, ) @@ -592,8 +408,9 @@ def init_(meta: ParameterMeta, tensor: torch.Tensor, generator: torch.Generator) sequence_parallel=self._sequence_parallel, lr_scale=lr_scale, ) - self.norm = RMSNorm( - inner_dim, + self.norm = MambaRMSNormGated( + inner_dim_non_tp, + group_size=self._local_inner_size, eps=1e-5, lr_scale=lr_scale, ) @@ -604,7 +421,6 @@ def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[ assert _causal_conv1d_available cu_seqlens = kwargs[SSMKwargs.cu_seqlens] seq_idx = kwargs[SSMKwargs.seq_idx] - kwargs[SSMKwargs.ssm_position_ids] # inner_projection : (batch/local_sequence, local_sequence/batch, hidden) # -> (batch/sequence, sequence/batch, inner_projection) @@ -614,17 +430,16 @@ def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[ if kwargs[TransformerKwargs.sequence_first]: inner_projection = inner_projection.transpose(0, 1) dt = dt.transpose(0, 1) - + # note: self.in_proj gathers full sequence length here sequence_length = inner_projection.size(1) z, xBC = torch.split( inner_projection, - [self._local_inner_size, self._local_xb_size + self._local_xb_size + self._local_inner_size], + [self._local_inner_size, self._local_xb_size + self._local_bb_size + self._local_c_size], dim=2, ) if cu_seqlens is not None: - # from https://github.com/jxiw/M1/blob/d92b53faa640f8ebf624d3e9e771fe24648ef014/rl/verl/verl/models/mamba/hybrid_wrapper.py#L152 xBC = _causal_conv1d_fn( xBC.transpose(1, 2), weight=self.conv1d_weight.squeeze(1), @@ -637,14 +452,21 @@ def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[ x=xBC.transpose(1, 2), weight=self.conv1d_weight.squeeze(1), bias=self.conv1d_bias, activation="silu" ).transpose(1, 2) - x, b, c = torch.split(xBC, [self._local_xb_size, self._local_xb_size, self._local_inner_size], dim=-1) - x = einops.rearrange(x, "b l (xb_group dstate) -> b xb_group l dstate", dstate=self._config.state_size) - b = einops.rearrange(b, "b l (xb_group dstate) -> b xb_group l dstate", dstate=self._config.state_size) + x, b, c = torch.split(xBC, [self._local_xb_size, self._local_bb_size, self._local_c_size], dim=-1) + # simulate GQA by repeating heads in x,b, x -> v, B -> k, C -> q + x = einops.rearrange( + x, "b l (local_head_groups head_dim) -> b local_head_groups l head_dim", head_dim=self._head_dim_size + ) # x is b x local_head_groups x l x head_dim + b = einops.rearrange( + b, + "b l (local_head_groups state_size) -> b local_head_groups l state_size", + state_size=self._config.state_size, + ) # b is b x local_head_groups x l x state_size batch, num_key_value_heads, slen, head_dim = x.shape x = x[:, :, None, :, :].expand(batch, num_key_value_heads, self._group_heads, slen, head_dim) x = x.reshape(batch, num_key_value_heads * self._group_heads, slen, head_dim) - b = b[:, :, None, :, :].expand(batch, num_key_value_heads, self._group_heads, slen, head_dim) - b = b.reshape(batch, num_key_value_heads * self._group_heads, slen, head_dim) + b = b[:, :, None, :, :].expand(batch, num_key_value_heads, self._group_heads, slen, self._config.state_size) + b = b.reshape(batch, num_key_value_heads * self._group_heads, slen, self._config.state_size) if self._debug_level: self._debug_log(z, "z", self._XZ_DIMS, kwargs) @@ -653,23 +475,26 @@ def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[ self._debug_log(c, "c", self._BC_DIMS, kwargs) self._debug_log(dt, "dt", self._XZ_DIMS, kwargs) - dt_limit_kwargs = {} - # c is b x seq x heads * state + dt_limit_kwargs = ( + {} + ) # can be used to set time-step limit as in https://huggingface.co/nvidia/Nemotron-H-8B-Base-8K/blob/main/modeling_nemotron_h.py#L424 + # c is b x seq x (heads * state) + # b is b x heads x seq x state) + # x is b x heads x seq x head_dim + # note, we could used mamba_split_conv1d_scan_combined directly for training, however because of the GQA, we need to use the chunked version. y = mamba_chunk_scan_combined( - # rearrange(x, "b l (h p) -> b l h p", p=self.headdim), einops.rearrange(x, "b g l p -> b l g p"), dt, - -torch.exp(self.A_log.float()), - # rearrange(B, "b l (g n) -> b l g n", g=self.ngroups), - einops.rearrange(b, "b g l n -> b l g n"), - einops.rearrange(c, "b l (g n) -> b l g n", g=self._local_heads), + A=-torch.exp(self.A_log.float()), + B=einops.rearrange(b, "b g l n -> b l g n"), + C=einops.rearrange(c, "b l (g n) -> b l g n", g=self._local_heads), chunk_size=self._config.chunk_size, D=self.D, z=None, dt_bias=self.dt_proj_bias, dt_softplus=True, - seq_idx=seq_idx, - cu_seqlens=cu_seqlens, + seq_idx=seq_idx, # assume this is used for packing + cu_seqlens=cu_seqlens, # assume this is used for packing, but maybe not needed at training **dt_limit_kwargs, return_final_states=False, return_varlen_states=False, @@ -679,7 +504,11 @@ def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[ self._debug_log(y, "y", self._XZ_DIMS, kwargs) # y: (batch, local_heads * state, sequence) -> (batch, sequence, local_heads * state) - y = y.transpose(1, 2)[:, :sequence_length] + y = y.view(batch, sequence_length, -1) + + # gate norm + y = self.norm(y, gate=z) + if kwargs[TransformerKwargs.sequence_first]: # TODO: Is contiguous needed? y = y.transpose(0, 1).contiguous() diff --git a/fast_llm/models/ssm/conversion.py b/fast_llm/models/ssm/conversion.py index 64afbea0..457c42b9 100644 --- a/fast_llm/models/ssm/conversion.py +++ b/fast_llm/models/ssm/conversion.py @@ -296,6 +296,9 @@ def _create_weight_converters( converters += self._get_weight_and_bias_converters( f"layers.{offset+i+1}.mixer.dt_proj", f"{hf_base_prefix}model.layers.{i}.mixer.dt_proj", False ) + converters += self._get_weight_and_bias_converters( + f"layers.{offset+i+1}.mixer.norm", f"{hf_base_prefix}model.layers.{i}.mixer.norm", True + ) # bias is treated separately in Mamba2 and must always exist (https://github.com/jxiw/M1/blob/537a1ca5407a786a99dc6c721873493cf8750d5e/mamba/hybrid_mamba_layer.py) converters.append( WeightConverter( diff --git a/fast_llm/models/ssm/external/eval/apriel_eval_wrapper.py b/fast_llm/models/ssm/external/eval/apriel_eval_wrapper.py index ee2c83e0..ea1d6cd3 100644 --- a/fast_llm/models/ssm/external/eval/apriel_eval_wrapper.py +++ b/fast_llm/models/ssm/external/eval/apriel_eval_wrapper.py @@ -250,3 +250,76 @@ def _model_generate(self, context, max_length, stop, **generation_kwargs): use_cache=True, **generation_kwargs, ) + + +@register_model("nemotron_h") +class NemotronHWrapper(HFLM): + """Wrapper for NemotronH model for compatibility with lm-evaluation-harness.""" + + def __init__(self, pretrained, **kwargs) -> None: + if "backend" in kwargs: + assert kwargs["backend"] == "causal" + + super().__init__( + pretrained=pretrained, + backend=kwargs.pop("backend", "causal"), + **kwargs, + ) + + # Override device detection for distributed settings + self._device = _get_device() + + def _get_config(self, pretrained: str, **kwargs) -> None: + """Get the model configuration.""" + from fast_llm.models.ssm.external.nemotron.config import NemotronHConfig + + self._config = NemotronHConfig.from_pretrained(pretrained, trust_remote_code=True) + + def _create_model(self, pretrained: str, dtype: Optional[Union[str, torch.dtype]] = "float16", **kwargs) -> None: + """Create the model.""" + from fast_llm.models.ssm.external.nemotron.modeling import NemotronHForCausalLM + + # Ensure we're using the correct device + device = _get_device() + self._device = device + + self._model = NemotronHForCausalLM.from_pretrained( + pretrained, + # device=device, + torch_dtype=torch.bfloat16 if dtype == "auto" else lm_eval.models.utils.get_dtype(dtype), + config=self._config, + ) + + def _model_generate(self, context, max_length, stop, **generation_kwargs): + # Ensure we're using the correct device + device = _get_device() + + # Ensure context is on the same device as the model + context = context.to(device) + self.model.to(device) + + # Move any tensors in generation_kwargs to the correct device + generation_kwargs = _move_tensors_to_device(generation_kwargs, device) + + stopping_criteria = lm_eval.models.utils.stop_sequences_criteria( + self.tokenizer, + stop, + context.shape[1], + context.shape[0], + ) + + generation_kwargs["temperature"] = generation_kwargs.get("temperature", 0.0) + do_sample = generation_kwargs.get("do_sample", None) + + # The temperature has to be a strictly positive float -- if it is 0.0, use greedy decoding strategies + if generation_kwargs.get("temperature") == 0.0 and do_sample is None: + generation_kwargs["do_sample"] = do_sample = False + if do_sample is False and generation_kwargs.get("temperature") == 0.0: + generation_kwargs.pop("temperature") + return self.model.generate( + input_ids=context, + max_length=max_length, + stopping_criteria=stopping_criteria, + use_cache=True, + **generation_kwargs, + ) diff --git a/tests/test_ssms.py b/tests/test_ssms.py index 2a338f1b..f9c7dc57 100644 --- a/tests/test_ssms.py +++ b/tests/test_ssms.py @@ -5,7 +5,7 @@ import pytest import torch -from mamba2 import Mamba2 +from mamba2 import Mamba2, NemotronHMamba2 from fast_llm.config import NoAutoValidate from fast_llm.engine.checkpoint.config import CheckpointLoadConfig @@ -43,7 +43,7 @@ def get_hybrid_config(hybrid_block_layout=["t", "m2"], prediction_heads=1, defau hidden_size = 512 config = HybridSSMBaseModelConfig( transformer=TransformerConfig(num_layers=len(hybrid_block_layout), hidden_size=hidden_size), - ssm=SSMConfig(d_xb=hidden_size, dt_rank=10, d_inner=hidden_size * 2), + ssm=SSMConfig(d_xb=hidden_size, dt_rank=10, d_inner=hidden_size * 2, state_size=16, head_dim=8), hybrid_block_layout=hybrid_block_layout, prediction_heads=prediction_heads, default_mtp_type=default_mtp_type, @@ -225,22 +225,39 @@ def generate_random_cu_seqlens(seq_len, packages_num=2): # Quick and dirty test for Mamba2 varlen block from https://github.com/jxiw/M1/blob/d92b53faa640f8ebf624d3e9e771fe24648ef014/rl/verl/tests/pack_mamba/test_mamba_layer.py +# test that packed and not packed are producing the same result in terms of outputs and gradients # TODO: integrate in the testing framework @pytest.mark.slow @pytest.mark.skipif(not torch.cuda.is_available(), reason="No CUDA available") @pytest.mark.skipif(not _mamba_available, reason="Mamba2 is not available") -@pytest.mark.skipif(not _mamba_varlen, reason="Mamba2 varlen is not available") -def test_mamba_varlen_block(distributed_config, distributed): +@pytest.mark.parametrize( + "mixer_cls, hybrid_block_layout, tollerance", + [ + pytest.param( + partial(NemotronHMamba2, block_index=0), + ["nm2", "t"], + 1e-3, # not 100% sure why, mamba2 requires lower tollerance (maybe its not really supporting packing) + id="nemotron_hmamba2", + ), + pytest.param( + partial(Mamba2, block_index=0), + ["m2", "t"], + 1e-4, + marks=pytest.mark.skipif(not _mamba_varlen, reason="Mamba2 varlen is not available"), + id="mamba2", + ), + ], +) +def test_mamba_varlen_block(mixer_cls, hybrid_block_layout, tollerance, distributed_config, distributed): """ Compare that the output and grads of packed and unpacked Mamba2 varlen block are the same. """ - hybrid_config = get_hybrid_config(hybrid_block_layout=["m2", "t"]) + hybrid_config = get_hybrid_config(hybrid_block_layout=hybrid_block_layout) tensor_space = TensorSpace(distributed_config=distributed_config) tensor_space.setup(distributed) hybrid_config.setup_tensor_space(tensor_space) layer_idx = 0 - mixer_cls = partial(Mamba2, block_index=layer_idx) block_packed = SSMBlock( hybrid_config.transformer, hybrid_config.ssm, @@ -304,14 +321,13 @@ def test_mamba_varlen_block(distributed_config, distributed): output_states_unpacked = block_ref( hidden_states.clone(), {"cu_seqlens": None, "seq_idx": None, "ssm_position_ids": None, "sequence_first": False} ) - tollerance = 1e-4 assert output_states_packed.shape == packed_hidden_states.shape assert output_states_unpacked.shape == hidden_states.shape assert not torch.isnan(hidden_states).any() assert not torch.isinf(hidden_states).any() output_states_unpacked = pack(output_states_unpacked, cu_seqlens, batch_size) - torch.allclose(output_states_packed, output_states_unpacked, atol=tollerance) + assert torch.allclose(output_states_packed, output_states_unpacked, atol=tollerance) loss = output_states_packed.sum() loss.backward() From 7c5fb0abe85f23d3f50ce0eb6fba80846343ea2a Mon Sep 17 00:00:00 2001 From: oleksost Date: Fri, 22 Aug 2025 12:24:42 +0000 Subject: [PATCH 11/18] modeling --- fast_llm/layers/ssm/mamba2.py | 6 +- .../configuration_ssm_hybrid_apriel15b.py | 3 + .../modeling_ssm_hybrid_apriel15b.py | 430 +++++++++++++++++- .../apriel_15b_hybrid/test_modeling.py | 53 +++ 4 files changed, 480 insertions(+), 12 deletions(-) create mode 100644 fast_llm/models/ssm/external/apriel_15b_hybrid/test_modeling.py diff --git a/fast_llm/layers/ssm/mamba2.py b/fast_llm/layers/ssm/mamba2.py index 9df56388..34cd8c54 100644 --- a/fast_llm/layers/ssm/mamba2.py +++ b/fast_llm/layers/ssm/mamba2.py @@ -376,7 +376,7 @@ def __init__( lr_scale=lr_scale, ) - self.dt_proj_bias = ParameterMeta.from_dims( + self.dt_bias = ParameterMeta.from_dims( (tensor_space[SSMDimNames.composite_heads],), init_method=init_dtprojbias(self._config.dt_max, self._config.dt_min, self._config.dt_init_floor), lr_scale=lr_scale, @@ -425,7 +425,7 @@ def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[ # inner_projection : (batch/local_sequence, local_sequence/batch, hidden) # -> (batch/sequence, sequence/batch, inner_projection) inner_projection = self.in_proj(input_) - dt = self.dt_in_proj(input_) # bs, seq, heads #+ self.dt_proj_bias + dt = self.dt_in_proj(input_) # bs, seq, heads # Standardize to (batch, sequence, inner_projection) if kwargs[TransformerKwargs.sequence_first]: inner_projection = inner_projection.transpose(0, 1) @@ -491,7 +491,7 @@ def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[ chunk_size=self._config.chunk_size, D=self.D, z=None, - dt_bias=self.dt_proj_bias, + dt_bias=self.dt_bias, dt_softplus=True, seq_idx=seq_idx, # assume this is used for packing cu_seqlens=cu_seqlens, # assume this is used for packing, but maybe not needed at training diff --git a/fast_llm/models/ssm/external/apriel_15b_hybrid/configuration_ssm_hybrid_apriel15b.py b/fast_llm/models/ssm/external/apriel_15b_hybrid/configuration_ssm_hybrid_apriel15b.py index 98d2fc28..5f42bf35 100644 --- a/fast_llm/models/ssm/external/apriel_15b_hybrid/configuration_ssm_hybrid_apriel15b.py +++ b/fast_llm/models/ssm/external/apriel_15b_hybrid/configuration_ssm_hybrid_apriel15b.py @@ -23,6 +23,9 @@ "dt_scale": 1.0, "dt_init_floor": 1e-4, "conv_bias": True, + # nemotron mamba2 + "head_dim": 128, + "layer_norm_epsilon": 1e-5, } diff --git a/fast_llm/models/ssm/external/apriel_15b_hybrid/modeling_ssm_hybrid_apriel15b.py b/fast_llm/models/ssm/external/apriel_15b_hybrid/modeling_ssm_hybrid_apriel15b.py index 9f4588a2..c6f7de15 100644 --- a/fast_llm/models/ssm/external/apriel_15b_hybrid/modeling_ssm_hybrid_apriel15b.py +++ b/fast_llm/models/ssm/external/apriel_15b_hybrid/modeling_ssm_hybrid_apriel15b.py @@ -8,6 +8,7 @@ from causal_conv1d import causal_conv1d_fn, causal_conv1d_update from einops import rearrange, repeat from mamba_ssm.ops.selective_scan_interface import selective_scan_fn +from mamba_ssm.ops.triton.layernorm_gated import rmsnorm_fn from mamba_ssm.ops.triton.selective_state_update import selective_state_update from mamba_ssm.ops.triton.ssd_combined import mamba_chunk_scan_combined from torch import nn @@ -202,14 +203,14 @@ def update_conv_state( self, layer_idx: int, new_conv_state: torch.Tensor, cache_init: bool = False ) -> torch.Tensor: if cache_init: - self.conv_states[layer_idx] = new_conv_state.to(self.conv_states.device) + self.conv_states[layer_idx] = new_conv_state.to(self.conv_states[0].device) else: self.conv_states[layer_idx] = self.conv_states[layer_idx].roll(shifts=-1, dims=-1) self.conv_states[layer_idx][:, :, -1] = new_conv_state[:, 0, :].to(self.conv_states.device) return self.conv_states[layer_idx] def update_ssm_state(self, layer_idx: int, new_ssm_state: torch.Tensor): - self.ssm_states[layer_idx] = new_ssm_state.to(self.ssm_states.device) + self.ssm_states[layer_idx] = new_ssm_state.to(self.ssm_states[0].device) return self.ssm_states[layer_idx] def reset(self): @@ -217,7 +218,8 @@ def reset(self): self.ssm_states.zero_() -# Copied from https://github.com/huggingface/transformers/blob/main/src/transformers/models/jamba/modeling_jamba.py +# Copied from https://huggingface.co/nvidia/Nemotron-H-8B-Base-8K/blob/main/modeling_nemotron_h.py whichis taken from +# https://github.com/huggingface/transformers/blob/main/src/transformers/models/jamba/modeling_jamba.py class HybridMambaAttentionDynamicCache(DynamicCache): """ A dynamic cache that can handle both the attention cache (which has a seq_len dimension) and the mamba cache @@ -242,7 +244,7 @@ def __init__(self, config: AprielSSMHybridConfig, batch_size, dtype=torch.float1 else config.ssm_cfg["expand"] * config.hidden_size ) ssm_state_size = config.ssm_cfg["d_state"] - conv_kernel_size = config.ssm_cfg["d_conv"] + self.conv_kernel_size = conv_kernel_size = config.ssm_cfg["d_conv"] self.n_qk_heads = config.ssm_cfg["n_qk_heads"] self.num_C_head = intermediate_size // ssm_state_size # mamba2 assert intermediate_size % self.n_qk_heads == 0, "d_inner must be divisible by n_qk_heads" @@ -341,14 +343,14 @@ def update_conv_state( self, layer_idx: int, new_conv_state: torch.Tensor, cache_init: bool = False ) -> torch.Tensor: if cache_init: - self.conv_states[layer_idx] = new_conv_state.to(self.conv_states.device) + self.conv_states[layer_idx] = new_conv_state.to(self.conv_states[0].device) else: self.conv_states[layer_idx] = self.conv_states[layer_idx].roll(shifts=-1, dims=-1) - self.conv_states[layer_idx][:, :, -1] = new_conv_state[:, 0, :].to(self.conv_states.device) + self.conv_states[layer_idx][:, :, -1] = new_conv_state[:, 0, :].to(self.conv_states[0].device) return self.conv_states[layer_idx] def update_ssm_state(self, layer_idx: int, new_ssm_state: torch.Tensor): - self.ssm_states[layer_idx] = new_ssm_state.to(self.ssm_states.device) + self.ssm_states[layer_idx] = new_ssm_state.to(self.ssm_states[0].device) return self.ssm_states[layer_idx] def get_seq_length(self, layer_idx: Optional[int] = 0) -> int: @@ -783,7 +785,411 @@ def convolutional_step(self, xBC, conv_state): return xBC, conv_state +def pad_tensor_by_size(input_tensor: torch.Tensor, pad_size: int): + """ + Padding x tensor with `pad_size` on the seq_len dim (dim=1) + + Assumes that we only have tensors of either size 4 or 3 + """ + pad_shape = (0, 0, 0, 0, 0, pad_size, 0, 0) if len(input_tensor.shape) == 4 else (0, 0, 0, pad_size, 0, 0) + + return torch.nn.functional.pad(input_tensor, pad_shape, mode="constant", value=0) + + +class MambaRMSNormGated(torch.nn.Module): + def __init__(self, hidden_size, group_size, eps=1e-5): + super().__init__() + self.weight = nn.Parameter(torch.ones(hidden_size)) + self.variance_epsilon = eps + self.group_size = group_size + + # jan28b version + def forward(self, hidden_states, gate=None): + return rmsnorm_fn( + x=hidden_states, + weight=self.weight, + bias=None, # No bias + z=gate, + eps=self.variance_epsilon, + group_size=self.group_size, + norm_before_gate=False, + ) + + +class NemotronHMamba2Mixer(nn.Module): + """ + From https://huggingface.co/nvidia/Nemotron-H-8B-Base-8K/blob/main/modeling_nemotron_h.py. + Compute ∆, A, B, C, and D the state space parameters and compute the `contextualized_states`. + A, D are input independent (see Mamba paper [1] Section 3.5.2 "Interpretation of A" for why A isn't selective) + ∆, B, C are input-dependent (this is a key difference between Mamba and the linear time invariant S4, + and is why Mamba is called **selective** state spaces) + + + Note: we assume n_groups = num_heads here, we do not want to share B or C over heads. + Why: we mimic GQA here, so sharing B over heads would result in additional complecity which we want to avoid at this point. + Note: to reconstruct the architecture of original nemotron-H mixer (but iwth n_groups = num_heads), d_xb needs to be same as d_inner. + """ + + def __init__( + self, + d_model, + d_inner, + d_xb=None, + d_state=16, + d_conv=4, + expand=2, + head_dim=128, + layer_norm_epsilon=1e-5, + conv_bias=True, + chunk_size=128, + bias=False, + layer_idx=None, + # device=None, + # dtype=None, + **kwargs, + ): + super().__init__() + self.hidden_size = d_model + self.ssm_state_size = d_state + self.conv_kernel_size = d_conv + self.expand = expand + self.intermediate_size = ( + d_inner if d_inner is not None else d_model * expand + ) # config.mamba_num_heads * config.mamba_head_dim + + self.d_xb = d_xb if d_xb is not None else self.intermediate_size + self.layer_idx = layer_idx + self.use_conv_bias = conv_bias + self.activation = "silu" + self.act = nn.SiLU() + self.head_dim = head_dim + assert self.intermediate_size % self.head_dim == 0, "intermediate_size must be divisible by head_dim" + self.num_heads = self.intermediate_size // self.head_dim + + # for GQA simulation, where we repeat x and B for each group + self.num_xb_heads = self.d_xb // self.head_dim + assert self.num_heads % self.num_xb_heads == 0, "num_heads must be divisible by num_xb_heads" + self.repeat_groups = self.num_heads // self.num_xb_heads + if self.d_xb == self.intermediate_size: + assert self.repeat_groups == 1 + logger.warning( + f"d_xb == intermediate_size, d_xb: {self.d_xb}, intermediate_size: {self.intermediate_size}, repeat_groups: {self.repeat_groups}" + ) + + self.layer_norm_epsilon = layer_norm_epsilon + + logger.warning( + f"Instantiating mamba2 with num_heads: {self.num_heads}, head_dim: {self.head_dim}, \n \ + intermediate_size: {self.intermediate_size}, \n \ + d_xb: {self.d_xb}, \n \ + number_xb_heads: {self.num_xb_heads}, \n \ + repeat_groups: {self.repeat_groups}, \n \ + d_state: {self.ssm_state_size}" + ) + + self.n_groups = ( + self.num_heads + ) # nemotron allows for any num_groups, we use the same as num_heads for now, otherwisxe it becomes too complecated with GQA simulation + self.chunk_size = chunk_size + + self.time_step_limit = (0.0, float("inf")) # hard coded + # conv is over xBC -- d_xb (head_dim), d_bb (state_dim), d_c (state_dim) + # self.conv_dim = self.intermediate_size + 2 * self.n_groups * self.ssm_state_size + self.conv_dim = ( + self.d_xb # self.num_xb_heads x head_dim + + self.num_xb_heads * self.ssm_state_size + + self.num_heads * self.ssm_state_size + ) + self.conv1d = nn.Conv1d( + in_channels=self.conv_dim, + out_channels=self.conv_dim, + bias=self.use_conv_bias, + kernel_size=self.conv_kernel_size, + groups=self.conv_dim, + padding=self.conv_kernel_size - 1, + ) + + # projection of the input hidden states + projection_size = self.intermediate_size + self.conv_dim # + self.num_heads + self.in_proj = nn.Linear( + self.hidden_size, + projection_size, + bias=bias, + ) + self.dt_in_proj = nn.Linear( + self.hidden_size, + self.num_heads, + bias=bias, + ) + # selective projection used to make dt, B and C input dependant + # time step projection (discretization) + # instantiate once and copy inv_dt in init_weights of PretrainedModel + self.dt_bias = nn.Parameter(torch.ones(self.num_heads)) + + # S4D real initialization. These are not discretized! + # The core is to load them, compute the discrete states, then write the updated state. Keeps the memory bounded + A = torch.arange(1, self.num_heads + 1) + self.A_log = nn.Parameter(torch.log(A)) + self.A_log._no_weight_decay = True + self.norm = MambaRMSNormGated( + self.intermediate_size, eps=self.layer_norm_epsilon, group_size=self.intermediate_size // self.n_groups + ) + self.D = nn.Parameter(torch.ones(self.num_heads)) + self.D._no_weight_decay = True + + self.out_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=bias) + self.use_bias = bias + + if not is_fast_path_available: + logger.warning_once( + "The fast path is not available because on of `(selective_state_update, causal_conv1d_fn, causal_conv1d_update)`" + " is None. Falling back to the naive implementation. To install follow https://github.com/state-spaces/mamba/#installation and" + " https://github.com/Dao-AILab/causal-conv1d" + ) + + def cuda_kernels_forward( + self, + hidden_states: torch.Tensor, + cache_params: Optional[HybridMambaAttentionDynamicCache] = None, + cache_position: Optional[torch.LongTensor] = None, + attention_mask: Optional[torch.Tensor] = None, + ): + outputs = {} + # 1. Gated MLP's linear projection + # Apply_mask_to_padding_states is not used in nemotron, + # because attention_mask is not pased, see https://huggingface.co/nvidia/Nemotron-H-8B-Base-8K/blob/main/modeling_nemotron_h.py#L774 + attention_mask = None # so apply_mask_to_padding_states does nothing + hidden_states = apply_mask_to_padding_states(hidden_states, attention_mask) + projected_states = self.in_proj(hidden_states) + + # Set up dimensions for reshapes later + batch_size, seq_len, _ = hidden_states.shape + # C dim, note we keep number of groups same as number of heads here + head_time_state_size = self.num_heads * self.ssm_state_size + num_xb_heads_time_state_size = self.num_xb_heads * self.ssm_state_size + + # d_mlp = ( + # projected_states.shape[-1] + # - 2 * self.intermediate_size + # - 2 * self.n_groups * self.ssm_state_size + # - self.num_heads + # ) // 2 + + # Single step calculations via cache + if cache_params is not None and cache_position is not None and cache_position[0] > 0: + gate, hidden_states_B_C = projected_states.squeeze(1).split( + [self.intermediate_size, self.conv_dim], dim=-1 + ) + dt = self.dt_in_proj(hidden_states).squeeze(1) + + # 2. Convolution sequence transformation + hidden_states_B_C = causal_conv1d_update( + hidden_states_B_C, + cache_params.conv_states[self.layer_idx], + self.conv1d.weight.squeeze(1), + self.conv1d.bias, + self.activation, + ) + + hidden_states, B, C = torch.split( + hidden_states_B_C, + [self.d_xb, num_xb_heads_time_state_size, head_time_state_size], + dim=-1, + ) + # simulate GQA by repeating heads in x,b, x -> v, B -> k, C -> q + hidden_states = rearrange( + hidden_states, + "b (local_head_groups head_dim) -> b local_head_groups head_dim", + head_dim=self.head_dim, + ) # x is b x local_head_groups x l x head_dim + B = rearrange( + B, + "b (local_head_groups state_size) -> b local_head_groups state_size", + state_size=self.ssm_state_size, + ) # b is b x local_head_groups x l x state_size + batch, num_key_value_heads, head_dim = hidden_states.shape + hidden_states = hidden_states[:, :, None, :].expand( + batch, num_key_value_heads, self.repeat_groups, head_dim + ) + hidden_states = hidden_states.reshape(batch, num_key_value_heads * self.repeat_groups, head_dim) + B = B[:, :, None, :].expand(batch, num_key_value_heads, self.repeat_groups, self.ssm_state_size) + B = B.reshape(batch, num_key_value_heads * self.repeat_groups, self.ssm_state_size) + + # 3. SSM transformation z + A = -torch.exp(self.A_log.float()) # (nheads,) + A = A[:, None, ...][:, :, None].expand(-1, self.head_dim, self.ssm_state_size).to(dtype=torch.float32) + dt = dt[:, :, None].expand(-1, -1, self.head_dim) + dt_bias = self.dt_bias[:, None, ...].expand(-1, self.head_dim) + D = self.D[:, None, ...].expand(-1, self.head_dim) + C = C.view(batch_size, self.num_heads, self.ssm_state_size) + # B = B.view(batch_size, self.n_groups, B.shape[1] // self.n_groups) + # C = C.view(batch_size, self.n_groups, C.shape[1] // self.n_groups) + hidden_states_reshaped = hidden_states.view(batch_size, self.num_heads, self.head_dim) + hidden_states = selective_state_update( + cache_params.ssm_states[self.layer_idx], + hidden_states_reshaped, + dt, + A, + B, + C, + D, + z=None, + dt_bias=dt_bias, + dt_softplus=True, + ) + hidden_states = hidden_states.view(batch_size, self.num_heads * self.head_dim) + hidden_states = self.norm(hidden_states, gate) + + # 4. Final linear projection + out = self.out_proj(hidden_states)[:, None, ...] + + # Fused calculations or step by step if no initialized cache is found + else: + A = -torch.exp(self.A_log.float()) # (num_heads) or (intermediate_size, state_size) + dt_limit_kwargs = {} if self.time_step_limit == (0.0, float("inf")) else {"dt_limit": self.time_step_limit} + + # 2-4. Fused kernel for conv1d, SSM, and the final projection + if self.training and cache_params is None: + assert False, "Should not have ended here for inference" + out = mamba_split_conv1d_scan_combined( + projected_states, + self.conv1d.weight.squeeze(1), + self.conv1d.bias, + self.dt_bias, + A, + D=self.D, + chunk_size=self.chunk_size, + seq_idx=None, # was seq_idx + activation=self.activation, + rmsnorm_weight=self.norm.weight, + rmsnorm_eps=self.norm.variance_epsilon, + outproj_weight=self.out_proj.weight, + outproj_bias=self.out_proj.bias, + headdim=self.head_dim, + ngroups=self.n_groups, + norm_before_gate=False, + return_final_states=False, + **dt_limit_kwargs, + ) + + else: + # we are not using mlp here, leaving it here from nemotron modeling + gate, hidden_states_B_C = projected_states.split([self.intermediate_size, self.conv_dim], dim=-1) + dt = self.dt_in_proj(hidden_states) + + # 2. Convolution sequence transformation + # Init cache + if cache_params is not None: + hidden_states_B_C_transposed = hidden_states_B_C.transpose(1, 2) + conv_states = nn.functional.pad( + hidden_states_B_C_transposed, + (cache_params.conv_kernel_size - hidden_states_B_C_transposed.shape[-1], 0), + ) + cache_params.update_conv_state( + layer_idx=self.layer_idx, new_conv_state=conv_states, cache_init=True + ) + + if self.activation not in ["silu", "swish"]: + hidden_states_B_C = self.act( + self.conv1d(hidden_states_B_C.transpose(1, 2))[..., :seq_len].transpose(1, 2) + ) + else: + hidden_states_B_C = causal_conv1d_fn( + x=hidden_states_B_C.transpose(1, 2), + weight=self.conv1d.weight.squeeze(1), + bias=self.conv1d.bias, + activation=self.activation, + ).transpose(1, 2) + hidden_states_B_C = apply_mask_to_padding_states( + hidden_states_B_C, attention_mask + ) # this does not seem to do anything in nemotron + hidden_states, B, C = torch.split( + hidden_states_B_C, + [self.d_xb, num_xb_heads_time_state_size, head_time_state_size], + dim=-1, + ) + # simulate GQA by repeating heads in x,b, x -> v, B -> k, C -> q + hidden_states = rearrange( + hidden_states, + "b l (local_head_groups head_dim) -> b local_head_groups l head_dim", + head_dim=self.head_dim, + ) # x is b x local_head_groups x l x head_dim + B = rearrange( + B, + "b l (local_head_groups state_size) -> b local_head_groups l state_size", + state_size=self.ssm_state_size, + ) # b is b x local_head_groups x l x state_size + batch, num_key_value_heads, slen, head_dim = hidden_states.shape + hidden_states = hidden_states[:, :, None, :, :].expand( + batch, num_key_value_heads, self.repeat_groups, slen, head_dim + ) + hidden_states = hidden_states.reshape(batch, num_key_value_heads * self.repeat_groups, slen, head_dim) + B = B[:, :, None, :, :].expand( + batch, num_key_value_heads, self.repeat_groups, slen, self.ssm_state_size + ) + B = B.reshape(batch, num_key_value_heads * self.repeat_groups, slen, self.ssm_state_size) + hidden_states = hidden_states.transpose(1, 2).contiguous() + B = B.transpose(1, 2).contiguous() + + # 3. SSM transformation + scan_output, ssm_state = mamba_chunk_scan_combined( + hidden_states.view(batch_size, seq_len, -1, self.head_dim), # (b, s, h, d) + dt, # (b, s, h) + A, # (h) + B.view(batch_size, seq_len, self.num_heads, -1), # (b, s, n_groups, state) + C.view(batch_size, seq_len, self.num_heads, -1), # (b, s, n_groups, state) + chunk_size=self.chunk_size, + D=self.D, + z=None, + seq_idx=None, + return_final_states=True, + dt_bias=self.dt_bias, + dt_softplus=True, + **dt_limit_kwargs, + ) + + # Init cache + if ssm_state is not None and cache_params is not None: + cache_params.update_ssm_state(layer_idx=self.layer_idx, new_ssm_state=ssm_state) + + scan_output = scan_output.view(batch_size, seq_len, -1) + + # Multiply "gate" branch and apply extra normalization layer + scan_output = self.norm(scan_output, gate) + + # 4. Final linear projection + out = self.out_proj(scan_output) + outputs["hidden_states"] = out[:, :seq_len, :] + return outputs + + def torch_forward(self, *args, **kwargs): + assert False, "Should not have ended here for inference, make sure all neccessary kernels are installed" + # see implementation in nemotron modeling https://huggingface.co/nvidia/Nemotron-H-8B-Base-8K/blob/main/modeling_nemotron_h.py + + def forward( + self, + hidden_states, + past_key_value: Optional[HybridMambaAttentionDynamicCache] = None, + cache_position: Optional[torch.LongTensor] = None, + attention_mask: Optional[torch.Tensor] = None, + **kwargs, + ): + cache_params = past_key_value + if is_fast_path_available and "cuda" in self.in_proj.weight.device.type: + return self.cuda_kernels_forward(hidden_states, cache_params, cache_position, attention_mask) + dtype = hidden_states.dtype + if attention_mask is not None and attention_mask.shape[1] > 1 and attention_mask.shape[0] > 1: + # tune out hidden states for pad tokens, see https://github.com/state-spaces/mamba/issues/66 + hidden_states = (hidden_states * attention_mask[:, :, None]).to(dtype) + + return self.torch_forward(hidden_states, cache_params, cache_position, attention_mask) + + class Mamba2(nn.Module): + """ + From https://github.com/jxiw/M1/blob/537a1ca5407a786a99dc6c721873493cf8750d5e/mamba/hybrid_mamba_layer.py + """ + def __init__( self, d_model, @@ -1200,6 +1606,10 @@ def forward(self, hidden_states: torch.Tensor, **kwargs): return (hidden_states,) +class AprielSSMNemotronHM2DecoderLayer(AprielSSMDecoderLayer): + _mixer_class = NemotronHMamba2Mixer + + class AprielThinkerSSMHybridModel(MistralModel): """ Transformer decoder consisting of *config.num_hidden_layers* layers. Each layer is a [`AprielDecoderLayer`, `AprielSSMDecoderLayer`] @@ -1213,7 +1623,7 @@ def __init__(self, config: AprielSSMHybridConfig, **kwargs): super().__init__(config_copy, **kwargs) self.config = config blocks = [] - logger.info(f"Loading hyubrid model with the following layout: {config.hybrid_block_layout}") + logger.info(f"Loading hybrid model with the following layout: {config.hybrid_block_layout}") for layer_idx, type in enumerate(config.hybrid_block_layout): if type == "m2d": blocks.append(AprielSSMDecoderLayer(config, layer_idx)) @@ -1223,6 +1633,8 @@ def __init__(self, config: AprielSSMHybridConfig, **kwargs): blocks.append(MistralDecoderLayer(config, layer_idx)) elif type == "i": blocks.append(AprielHybridIdentity(config)) + elif type == "nm2": + blocks.append(AprielSSMNemotronHM2DecoderLayer(config, layer_idx)) else: raise ValueError(f"Invalid block type: {type}") self.layers = nn.ModuleList(blocks) @@ -1246,7 +1658,7 @@ def forward( ) -> BaseModelOutputWithPast: use_cache = use_cache if use_cache is not None else self.config.use_cache if use_cache and past_key_values is None: - # for the case where prepare_inputs_for_generation is not called to create the cache (as in fast-llm test) + # for the case where prepare_inputs_for_generation is not called to create the cache (as in fast-llm test) batch_size = input_ids.shape[0] if input_ids is not None else inputs_embeds.shape[0] past_key_values = HybridMambaAttentionDynamicCache(self.config, batch_size, self.dtype, device=self.device) output = super().forward( diff --git a/fast_llm/models/ssm/external/apriel_15b_hybrid/test_modeling.py b/fast_llm/models/ssm/external/apriel_15b_hybrid/test_modeling.py new file mode 100644 index 00000000..91857d8a --- /dev/null +++ b/fast_llm/models/ssm/external/apriel_15b_hybrid/test_modeling.py @@ -0,0 +1,53 @@ +import pytest + +from fast_llm.models.ssm.external.apriel_15b_hybrid.modeling_ssm_hybrid_apriel15b import NemotronHMamba2Mixer +from fast_llm.models.ssm.external.nemotron.config import NemotronHConfig +from fast_llm.models.ssm.external.nemotron.modeling import NemotronHMamba2Mixer as NemotronHMamba2Mixer_original + + +# in apriel's mamba2 mixer we do not used groups for B and C, but we have the d_xb dim, that simulates GQA +# so in order to reconstruct the original nemotron mixer, we need to set d_xb same as d_inner +@pytest.mark.parametrize( + "apriel_ssm_config, nemotron_h_config", + [ + ( + { + "d_state": 16, + "d_xb": 4096, + "expand": 1, + "d_conv": 4, + "d_inner": 4096, + "conv_bias": True, + "bias": False, + "head_dim": 128, # 4096/128 = 32 heads, 1024/128 = 8 KVheads and 4 repeat groups + }, + NemotronHConfig( + hidden_size=4096, + mamba_num_heads=32, + mamba_head_dim=128, + mamba_n_groups=32, + mamba_d_conv=4, + mamba_expand=1, + ssm_state_size=16, + use_bias=False, + mamba_hidden_act="silu", + ), + ) + ], +) +def test_nemotron_h_mamba2_mixers_identical(apriel_ssm_config: dict, nemotron_h_config: dict): + mixer_apriel = NemotronHMamba2Mixer(d_model=4096, **apriel_ssm_config) + mixer_nemotron_h = NemotronHMamba2Mixer_original(nemotron_h_config, 0) + + for k_a, v_a in mixer_apriel.state_dict().items(): + if k_a == "dt_in_proj.weight": + continue + v_b = mixer_nemotron_h.state_dict()[k_a] + if k_a == "in_proj.weight": + assert [v_a.shape[0], v_a.shape[1]] == [v_b.shape[0] - nemotron_h_config.mamba_num_heads, v_b.shape[1]] + else: + assert v_a.shape == v_b.shape + + +if __name__ == "__main__": + pytest.main([__file__]) From 9cef978a88e8f248bbf93929ce2bdd59a8784e8b Mon Sep 17 00:00:00 2001 From: oleksost Date: Mon, 25 Aug 2025 10:09:35 +0000 Subject: [PATCH 12/18] convertion + MIL init --- fast_llm/models/ssm/conversion.py | 4 + fast_llm/models/ssm/external/15B_hybrid.ipynb | 630 +++++++++++++----- .../configuration_ssm_hybrid_apriel15b.py | 2 +- 3 files changed, 470 insertions(+), 166 deletions(-) diff --git a/fast_llm/models/ssm/conversion.py b/fast_llm/models/ssm/conversion.py index 457c42b9..f3c3fcf2 100644 --- a/fast_llm/models/ssm/conversion.py +++ b/fast_llm/models/ssm/conversion.py @@ -792,6 +792,10 @@ def _create_config_converters(cls) -> list[ParamConverter]: fast_llm_names=(("ssm", "d_inner"),), export_names=(("ssm_cfg", "d_inner"),), ), + RenameParamConverter( + fast_llm_names=(("ssm", "head_dim"),), + export_names=(("ssm_cfg", "head_dim"),), + ), IgnoreImportParamConverter(export_names=(("sliding_window",),), ignore_export_value=None), ] diff --git a/fast_llm/models/ssm/external/15B_hybrid.ipynb b/fast_llm/models/ssm/external/15B_hybrid.ipynb index 8d433e5b..b1e945ac 100644 --- a/fast_llm/models/ssm/external/15B_hybrid.ipynb +++ b/fast_llm/models/ssm/external/15B_hybrid.ipynb @@ -745,7 +745,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -846,22 +846,30 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ "import numpy as np\n", "path_thinker = \"/mnt/checkpoints/upstream/Apriel-Nemotron-15b-Thinker\"\n", - "n_ssm = 25\n", + "# path_hybrid = \"/mnt/checkpoints/fast_llm_exp/slam_ssm_distill/15b-hyb25distsftvrlm2-bs64-lr5e-06-lrs1-1-1-1-sl16384_ti60000_aprsft/export/apriel_ssm_thinker_hybrid/23000\"\n", + "path_hybrid=\"/mnt/checkpoints/fast_llm_exp/slam_ssm_distill/15b-h27distsftvrlm2f145-bs64-lr5e-06-lrs1-1-1-1-sl16384_ti60000_aprsft/export/apriel_ssm_thinker_hybrid/3500\"\n", "\n", + "n_ssm = 25\n", "\n", + "config_hybrid = AprielSSMHybridConfig.from_pretrained(path_hybrid)\n", "config_thinker = AutoConfig.from_pretrained(path_thinker)\n", "# config_thinker.num_hidden_layers = 5\n", - "hybrid_block_layout = [\"t\"] * config_thinker.num_hidden_layers\n", + "# hybrid_block_layout = [\"t\"] * config_thinker.num_hidden_layers\n", "# hybrid_block_layout[3] = \"m2\"\n", + "hybrid_block_layout = config_hybrid.hybrid_block_layout\n", + "# hybrid_block_layout[7] = \"m2\"\n", + "hybrid_block_layout[6] = \"m2\"\n", + "hybrid_block_layout[8] = \"m2\"\n", "\n", - "for i in range(n_ssm):\n", - " hybrid_block_layout[layer_importance[i]] = \"m2\"\n", + "\n", + "# for i in range(n_ssm):\n", + "# hybrid_block_layout[layer_importance[i]] = \"m2\"\n", "\n", "# group_size = 10 # 2nd layer importance is missing\n", "# for i in range(0, len(layer_importance), group_size):\n", @@ -922,7 +930,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 5, "metadata": {}, "outputs": [ { @@ -931,12 +939,12 @@ "['t',\n", " 't',\n", " 't',\n", + " 'm2',\n", " 't',\n", + " 'm2',\n", + " 'm2',\n", " 't',\n", - " 't',\n", - " 't',\n", - " 't',\n", - " 't',\n", + " 'm2',\n", " 't',\n", " 't',\n", " 't',\n", @@ -980,7 +988,7 @@ " 'm2']" ] }, - "execution_count": 10, + "execution_count": 5, "metadata": {}, "output_type": "execute_result" } @@ -991,25 +999,32 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 6, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "Loading checkpoint shards: 100%|██████████| 7/7 [00:04<00:00, 1.58it/s]\n" + "Loading checkpoint shards: 29%|██▊ | 2/7 [00:01<00:04, 1.13it/s]\n" ] }, { - "data": { - "text/plain": [ - "_IncompatibleKeys(missing_keys=['model.layers.3.mixer.A_log', 'model.layers.3.mixer.D', 'model.layers.3.mixer.conv1d.weight', 'model.layers.3.mixer.conv1d.bias', 'model.layers.3.mixer.in_proj.weight', 'model.layers.3.mixer.dt_proj.weight', 'model.layers.3.mixer.dt_proj.bias', 'model.layers.3.mixer.out_proj.weight'], unexpected_keys=['model.layers.5.self_attn.q_proj.weight', 'model.layers.5.self_attn.k_proj.weight', 'model.layers.5.self_attn.v_proj.weight', 'model.layers.5.self_attn.o_proj.weight', 'model.layers.5.mlp.gate_proj.weight', 'model.layers.5.mlp.up_proj.weight', 'model.layers.5.mlp.down_proj.weight', 'model.layers.5.input_layernorm.weight', 'model.layers.5.post_attention_layernorm.weight', 'model.layers.6.self_attn.q_proj.weight', 'model.layers.6.self_attn.k_proj.weight', 'model.layers.6.self_attn.v_proj.weight', 'model.layers.6.self_attn.o_proj.weight', 'model.layers.6.mlp.gate_proj.weight', 'model.layers.6.mlp.up_proj.weight', 'model.layers.6.mlp.down_proj.weight', 'model.layers.6.input_layernorm.weight', 'model.layers.6.post_attention_layernorm.weight', 'model.layers.7.self_attn.q_proj.weight', 'model.layers.7.self_attn.k_proj.weight', 'model.layers.7.self_attn.v_proj.weight', 'model.layers.7.self_attn.o_proj.weight', 'model.layers.7.mlp.gate_proj.weight', 'model.layers.7.mlp.up_proj.weight', 'model.layers.7.mlp.down_proj.weight', 'model.layers.7.input_layernorm.weight', 'model.layers.7.post_attention_layernorm.weight', 'model.layers.8.self_attn.q_proj.weight', 'model.layers.8.self_attn.k_proj.weight', 'model.layers.8.self_attn.v_proj.weight', 'model.layers.8.self_attn.o_proj.weight', 'model.layers.8.mlp.gate_proj.weight', 'model.layers.8.mlp.up_proj.weight', 'model.layers.8.mlp.down_proj.weight', 'model.layers.8.input_layernorm.weight', 'model.layers.8.post_attention_layernorm.weight', 'model.layers.9.self_attn.q_proj.weight', 'model.layers.9.self_attn.k_proj.weight', 'model.layers.9.self_attn.v_proj.weight', 'model.layers.9.self_attn.o_proj.weight', 'model.layers.9.mlp.gate_proj.weight', 'model.layers.9.mlp.up_proj.weight', 'model.layers.9.mlp.down_proj.weight', 'model.layers.9.input_layernorm.weight', 'model.layers.9.post_attention_layernorm.weight', 'model.layers.10.self_attn.q_proj.weight', 'model.layers.10.self_attn.k_proj.weight', 'model.layers.10.self_attn.v_proj.weight', 'model.layers.10.self_attn.o_proj.weight', 'model.layers.10.mlp.gate_proj.weight', 'model.layers.10.mlp.up_proj.weight', 'model.layers.10.mlp.down_proj.weight', 'model.layers.10.input_layernorm.weight', 'model.layers.10.post_attention_layernorm.weight', 'model.layers.11.self_attn.q_proj.weight', 'model.layers.11.self_attn.k_proj.weight', 'model.layers.11.self_attn.v_proj.weight', 'model.layers.11.self_attn.o_proj.weight', 'model.layers.11.mlp.gate_proj.weight', 'model.layers.11.mlp.up_proj.weight', 'model.layers.11.mlp.down_proj.weight', 'model.layers.11.input_layernorm.weight', 'model.layers.11.post_attention_layernorm.weight', 'model.layers.12.self_attn.q_proj.weight', 'model.layers.12.self_attn.k_proj.weight', 'model.layers.12.self_attn.v_proj.weight', 'model.layers.12.self_attn.o_proj.weight', 'model.layers.12.mlp.gate_proj.weight', 'model.layers.12.mlp.up_proj.weight', 'model.layers.12.mlp.down_proj.weight', 'model.layers.12.input_layernorm.weight', 'model.layers.12.post_attention_layernorm.weight', 'model.layers.13.self_attn.q_proj.weight', 'model.layers.13.self_attn.k_proj.weight', 'model.layers.13.self_attn.v_proj.weight', 'model.layers.13.self_attn.o_proj.weight', 'model.layers.13.mlp.gate_proj.weight', 'model.layers.13.mlp.up_proj.weight', 'model.layers.13.mlp.down_proj.weight', 'model.layers.13.input_layernorm.weight', 'model.layers.13.post_attention_layernorm.weight', 'model.layers.14.self_attn.q_proj.weight', 'model.layers.14.self_attn.k_proj.weight', 'model.layers.14.self_attn.v_proj.weight', 'model.layers.14.self_attn.o_proj.weight', 'model.layers.14.mlp.gate_proj.weight', 'model.layers.14.mlp.up_proj.weight', 'model.layers.14.mlp.down_proj.weight', 'model.layers.14.input_layernorm.weight', 'model.layers.14.post_attention_layernorm.weight', 'model.layers.15.self_attn.q_proj.weight', 'model.layers.15.self_attn.k_proj.weight', 'model.layers.15.self_attn.v_proj.weight', 'model.layers.15.self_attn.o_proj.weight', 'model.layers.15.mlp.gate_proj.weight', 'model.layers.15.mlp.up_proj.weight', 'model.layers.15.mlp.down_proj.weight', 'model.layers.15.input_layernorm.weight', 'model.layers.15.post_attention_layernorm.weight', 'model.layers.16.self_attn.q_proj.weight', 'model.layers.16.self_attn.k_proj.weight', 'model.layers.16.self_attn.v_proj.weight', 'model.layers.16.self_attn.o_proj.weight', 'model.layers.16.mlp.gate_proj.weight', 'model.layers.16.mlp.up_proj.weight', 'model.layers.16.mlp.down_proj.weight', 'model.layers.16.input_layernorm.weight', 'model.layers.16.post_attention_layernorm.weight', 'model.layers.17.self_attn.q_proj.weight', 'model.layers.17.self_attn.k_proj.weight', 'model.layers.17.self_attn.v_proj.weight', 'model.layers.17.self_attn.o_proj.weight', 'model.layers.17.mlp.gate_proj.weight', 'model.layers.17.mlp.up_proj.weight', 'model.layers.17.mlp.down_proj.weight', 'model.layers.17.input_layernorm.weight', 'model.layers.17.post_attention_layernorm.weight', 'model.layers.18.self_attn.q_proj.weight', 'model.layers.18.self_attn.k_proj.weight', 'model.layers.18.self_attn.v_proj.weight', 'model.layers.18.self_attn.o_proj.weight', 'model.layers.18.mlp.gate_proj.weight', 'model.layers.18.mlp.up_proj.weight', 'model.layers.18.mlp.down_proj.weight', 'model.layers.18.input_layernorm.weight', 'model.layers.18.post_attention_layernorm.weight', 'model.layers.19.self_attn.q_proj.weight', 'model.layers.19.self_attn.k_proj.weight', 'model.layers.19.self_attn.v_proj.weight', 'model.layers.19.self_attn.o_proj.weight', 'model.layers.19.mlp.gate_proj.weight', 'model.layers.19.mlp.up_proj.weight', 'model.layers.19.mlp.down_proj.weight', 'model.layers.19.input_layernorm.weight', 'model.layers.19.post_attention_layernorm.weight', 'model.layers.20.self_attn.q_proj.weight', 'model.layers.20.self_attn.k_proj.weight', 'model.layers.20.self_attn.v_proj.weight', 'model.layers.20.self_attn.o_proj.weight', 'model.layers.20.mlp.gate_proj.weight', 'model.layers.20.mlp.up_proj.weight', 'model.layers.20.mlp.down_proj.weight', 'model.layers.20.input_layernorm.weight', 'model.layers.20.post_attention_layernorm.weight', 'model.layers.21.self_attn.q_proj.weight', 'model.layers.21.self_attn.k_proj.weight', 'model.layers.21.self_attn.v_proj.weight', 'model.layers.21.self_attn.o_proj.weight', 'model.layers.21.mlp.gate_proj.weight', 'model.layers.21.mlp.up_proj.weight', 'model.layers.21.mlp.down_proj.weight', 'model.layers.21.input_layernorm.weight', 'model.layers.21.post_attention_layernorm.weight', 'model.layers.22.self_attn.q_proj.weight', 'model.layers.22.self_attn.k_proj.weight', 'model.layers.22.self_attn.v_proj.weight', 'model.layers.22.self_attn.o_proj.weight', 'model.layers.22.mlp.gate_proj.weight', 'model.layers.22.mlp.up_proj.weight', 'model.layers.22.mlp.down_proj.weight', 'model.layers.22.input_layernorm.weight', 'model.layers.22.post_attention_layernorm.weight', 'model.layers.23.self_attn.q_proj.weight', 'model.layers.23.self_attn.k_proj.weight', 'model.layers.23.self_attn.v_proj.weight', 'model.layers.23.self_attn.o_proj.weight', 'model.layers.23.mlp.gate_proj.weight', 'model.layers.23.mlp.up_proj.weight', 'model.layers.23.mlp.down_proj.weight', 'model.layers.23.input_layernorm.weight', 'model.layers.23.post_attention_layernorm.weight', 'model.layers.24.self_attn.q_proj.weight', 'model.layers.24.self_attn.k_proj.weight', 'model.layers.24.self_attn.v_proj.weight', 'model.layers.24.self_attn.o_proj.weight', 'model.layers.24.mlp.gate_proj.weight', 'model.layers.24.mlp.up_proj.weight', 'model.layers.24.mlp.down_proj.weight', 'model.layers.24.input_layernorm.weight', 'model.layers.24.post_attention_layernorm.weight', 'model.layers.25.self_attn.q_proj.weight', 'model.layers.25.self_attn.k_proj.weight', 'model.layers.25.self_attn.v_proj.weight', 'model.layers.25.self_attn.o_proj.weight', 'model.layers.25.mlp.gate_proj.weight', 'model.layers.25.mlp.up_proj.weight', 'model.layers.25.mlp.down_proj.weight', 'model.layers.25.input_layernorm.weight', 'model.layers.25.post_attention_layernorm.weight', 'model.layers.26.self_attn.q_proj.weight', 'model.layers.26.self_attn.k_proj.weight', 'model.layers.26.self_attn.v_proj.weight', 'model.layers.26.self_attn.o_proj.weight', 'model.layers.26.mlp.gate_proj.weight', 'model.layers.26.mlp.up_proj.weight', 'model.layers.26.mlp.down_proj.weight', 'model.layers.26.input_layernorm.weight', 'model.layers.26.post_attention_layernorm.weight', 'model.layers.27.self_attn.q_proj.weight', 'model.layers.27.self_attn.k_proj.weight', 'model.layers.27.self_attn.v_proj.weight', 'model.layers.27.self_attn.o_proj.weight', 'model.layers.27.mlp.gate_proj.weight', 'model.layers.27.mlp.up_proj.weight', 'model.layers.27.mlp.down_proj.weight', 'model.layers.27.input_layernorm.weight', 'model.layers.27.post_attention_layernorm.weight', 'model.layers.28.self_attn.q_proj.weight', 'model.layers.28.self_attn.k_proj.weight', 'model.layers.28.self_attn.v_proj.weight', 'model.layers.28.self_attn.o_proj.weight', 'model.layers.28.mlp.gate_proj.weight', 'model.layers.28.mlp.up_proj.weight', 'model.layers.28.mlp.down_proj.weight', 'model.layers.28.input_layernorm.weight', 'model.layers.28.post_attention_layernorm.weight', 'model.layers.29.self_attn.q_proj.weight', 'model.layers.29.self_attn.k_proj.weight', 'model.layers.29.self_attn.v_proj.weight', 'model.layers.29.self_attn.o_proj.weight', 'model.layers.29.mlp.gate_proj.weight', 'model.layers.29.mlp.up_proj.weight', 'model.layers.29.mlp.down_proj.weight', 'model.layers.29.input_layernorm.weight', 'model.layers.29.post_attention_layernorm.weight', 'model.layers.30.self_attn.q_proj.weight', 'model.layers.30.self_attn.k_proj.weight', 'model.layers.30.self_attn.v_proj.weight', 'model.layers.30.self_attn.o_proj.weight', 'model.layers.30.mlp.gate_proj.weight', 'model.layers.30.mlp.up_proj.weight', 'model.layers.30.mlp.down_proj.weight', 'model.layers.30.input_layernorm.weight', 'model.layers.30.post_attention_layernorm.weight', 'model.layers.31.self_attn.q_proj.weight', 'model.layers.31.self_attn.k_proj.weight', 'model.layers.31.self_attn.v_proj.weight', 'model.layers.31.self_attn.o_proj.weight', 'model.layers.31.mlp.gate_proj.weight', 'model.layers.31.mlp.up_proj.weight', 'model.layers.31.mlp.down_proj.weight', 'model.layers.31.input_layernorm.weight', 'model.layers.31.post_attention_layernorm.weight', 'model.layers.32.self_attn.q_proj.weight', 'model.layers.32.self_attn.k_proj.weight', 'model.layers.32.self_attn.v_proj.weight', 'model.layers.32.self_attn.o_proj.weight', 'model.layers.32.mlp.gate_proj.weight', 'model.layers.32.mlp.up_proj.weight', 'model.layers.32.mlp.down_proj.weight', 'model.layers.32.input_layernorm.weight', 'model.layers.32.post_attention_layernorm.weight', 'model.layers.33.self_attn.q_proj.weight', 'model.layers.33.self_attn.k_proj.weight', 'model.layers.33.self_attn.v_proj.weight', 'model.layers.33.self_attn.o_proj.weight', 'model.layers.33.mlp.gate_proj.weight', 'model.layers.33.mlp.up_proj.weight', 'model.layers.33.mlp.down_proj.weight', 'model.layers.33.input_layernorm.weight', 'model.layers.33.post_attention_layernorm.weight', 'model.layers.34.self_attn.q_proj.weight', 'model.layers.34.self_attn.k_proj.weight', 'model.layers.34.self_attn.v_proj.weight', 'model.layers.34.self_attn.o_proj.weight', 'model.layers.34.mlp.gate_proj.weight', 'model.layers.34.mlp.up_proj.weight', 'model.layers.34.mlp.down_proj.weight', 'model.layers.34.input_layernorm.weight', 'model.layers.34.post_attention_layernorm.weight', 'model.layers.35.self_attn.q_proj.weight', 'model.layers.35.self_attn.k_proj.weight', 'model.layers.35.self_attn.v_proj.weight', 'model.layers.35.self_attn.o_proj.weight', 'model.layers.35.mlp.gate_proj.weight', 'model.layers.35.mlp.up_proj.weight', 'model.layers.35.mlp.down_proj.weight', 'model.layers.35.input_layernorm.weight', 'model.layers.35.post_attention_layernorm.weight', 'model.layers.36.self_attn.q_proj.weight', 'model.layers.36.self_attn.k_proj.weight', 'model.layers.36.self_attn.v_proj.weight', 'model.layers.36.self_attn.o_proj.weight', 'model.layers.36.mlp.gate_proj.weight', 'model.layers.36.mlp.up_proj.weight', 'model.layers.36.mlp.down_proj.weight', 'model.layers.36.input_layernorm.weight', 'model.layers.36.post_attention_layernorm.weight', 'model.layers.37.self_attn.q_proj.weight', 'model.layers.37.self_attn.k_proj.weight', 'model.layers.37.self_attn.v_proj.weight', 'model.layers.37.self_attn.o_proj.weight', 'model.layers.37.mlp.gate_proj.weight', 'model.layers.37.mlp.up_proj.weight', 'model.layers.37.mlp.down_proj.weight', 'model.layers.37.input_layernorm.weight', 'model.layers.37.post_attention_layernorm.weight', 'model.layers.38.self_attn.q_proj.weight', 'model.layers.38.self_attn.k_proj.weight', 'model.layers.38.self_attn.v_proj.weight', 'model.layers.38.self_attn.o_proj.weight', 'model.layers.38.mlp.gate_proj.weight', 'model.layers.38.mlp.up_proj.weight', 'model.layers.38.mlp.down_proj.weight', 'model.layers.38.input_layernorm.weight', 'model.layers.38.post_attention_layernorm.weight', 'model.layers.39.self_attn.q_proj.weight', 'model.layers.39.self_attn.k_proj.weight', 'model.layers.39.self_attn.v_proj.weight', 'model.layers.39.self_attn.o_proj.weight', 'model.layers.39.mlp.gate_proj.weight', 'model.layers.39.mlp.up_proj.weight', 'model.layers.39.mlp.down_proj.weight', 'model.layers.39.input_layernorm.weight', 'model.layers.39.post_attention_layernorm.weight', 'model.layers.40.self_attn.q_proj.weight', 'model.layers.40.self_attn.k_proj.weight', 'model.layers.40.self_attn.v_proj.weight', 'model.layers.40.self_attn.o_proj.weight', 'model.layers.40.mlp.gate_proj.weight', 'model.layers.40.mlp.up_proj.weight', 'model.layers.40.mlp.down_proj.weight', 'model.layers.40.input_layernorm.weight', 'model.layers.40.post_attention_layernorm.weight', 'model.layers.41.self_attn.q_proj.weight', 'model.layers.41.self_attn.k_proj.weight', 'model.layers.41.self_attn.v_proj.weight', 'model.layers.41.self_attn.o_proj.weight', 'model.layers.41.mlp.gate_proj.weight', 'model.layers.41.mlp.up_proj.weight', 'model.layers.41.mlp.down_proj.weight', 'model.layers.41.input_layernorm.weight', 'model.layers.41.post_attention_layernorm.weight', 'model.layers.42.self_attn.q_proj.weight', 'model.layers.42.self_attn.k_proj.weight', 'model.layers.42.self_attn.v_proj.weight', 'model.layers.42.self_attn.o_proj.weight', 'model.layers.42.mlp.gate_proj.weight', 'model.layers.42.mlp.up_proj.weight', 'model.layers.42.mlp.down_proj.weight', 'model.layers.42.input_layernorm.weight', 'model.layers.42.post_attention_layernorm.weight', 'model.layers.43.self_attn.q_proj.weight', 'model.layers.43.self_attn.k_proj.weight', 'model.layers.43.self_attn.v_proj.weight', 'model.layers.43.self_attn.o_proj.weight', 'model.layers.43.mlp.gate_proj.weight', 'model.layers.43.mlp.up_proj.weight', 'model.layers.43.mlp.down_proj.weight', 'model.layers.43.input_layernorm.weight', 'model.layers.43.post_attention_layernorm.weight', 'model.layers.44.self_attn.q_proj.weight', 'model.layers.44.self_attn.k_proj.weight', 'model.layers.44.self_attn.v_proj.weight', 'model.layers.44.self_attn.o_proj.weight', 'model.layers.44.mlp.gate_proj.weight', 'model.layers.44.mlp.up_proj.weight', 'model.layers.44.mlp.down_proj.weight', 'model.layers.44.input_layernorm.weight', 'model.layers.44.post_attention_layernorm.weight', 'model.layers.45.self_attn.q_proj.weight', 'model.layers.45.self_attn.k_proj.weight', 'model.layers.45.self_attn.v_proj.weight', 'model.layers.45.self_attn.o_proj.weight', 'model.layers.45.mlp.gate_proj.weight', 'model.layers.45.mlp.up_proj.weight', 'model.layers.45.mlp.down_proj.weight', 'model.layers.45.input_layernorm.weight', 'model.layers.45.post_attention_layernorm.weight', 'model.layers.46.self_attn.q_proj.weight', 'model.layers.46.self_attn.k_proj.weight', 'model.layers.46.self_attn.v_proj.weight', 'model.layers.46.self_attn.o_proj.weight', 'model.layers.46.mlp.gate_proj.weight', 'model.layers.46.mlp.up_proj.weight', 'model.layers.46.mlp.down_proj.weight', 'model.layers.46.input_layernorm.weight', 'model.layers.46.post_attention_layernorm.weight', 'model.layers.47.self_attn.q_proj.weight', 'model.layers.47.self_attn.k_proj.weight', 'model.layers.47.self_attn.v_proj.weight', 'model.layers.47.self_attn.o_proj.weight', 'model.layers.47.mlp.gate_proj.weight', 'model.layers.47.mlp.up_proj.weight', 'model.layers.47.mlp.down_proj.weight', 'model.layers.47.input_layernorm.weight', 'model.layers.47.post_attention_layernorm.weight', 'model.layers.48.self_attn.q_proj.weight', 'model.layers.48.self_attn.k_proj.weight', 'model.layers.48.self_attn.v_proj.weight', 'model.layers.48.self_attn.o_proj.weight', 'model.layers.48.mlp.gate_proj.weight', 'model.layers.48.mlp.up_proj.weight', 'model.layers.48.mlp.down_proj.weight', 'model.layers.48.input_layernorm.weight', 'model.layers.48.post_attention_layernorm.weight', 'model.layers.49.self_attn.q_proj.weight', 'model.layers.49.self_attn.k_proj.weight', 'model.layers.49.self_attn.v_proj.weight', 'model.layers.49.self_attn.o_proj.weight', 'model.layers.49.mlp.gate_proj.weight', 'model.layers.49.mlp.up_proj.weight', 'model.layers.49.mlp.down_proj.weight', 'model.layers.49.input_layernorm.weight', 'model.layers.49.post_attention_layernorm.weight', 'model.layers.3.self_attn.q_proj.weight', 'model.layers.3.self_attn.k_proj.weight', 'model.layers.3.self_attn.v_proj.weight', 'model.layers.3.self_attn.o_proj.weight'])" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[6], line 2\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;66;03m# Load state dict into hybrid model from Thinker\u001b[39;00m\n\u001b[0;32m----> 2\u001b[0m model_base \u001b[38;5;241m=\u001b[39m \u001b[43mMistralForCausalLM\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfrom_pretrained\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpath_thinker\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 3\u001b[0m model_hybrid\u001b[38;5;241m.\u001b[39mload_state_dict(model_base\u001b[38;5;241m.\u001b[39mstate_dict(), strict\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m)\n", + "File \u001b[0;32m~/.conda/envs/fast_llm/lib/python3.12/site-packages/transformers/modeling_utils.py:311\u001b[0m, in \u001b[0;36mrestore_default_torch_dtype.._wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 309\u001b[0m old_dtype \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mget_default_dtype()\n\u001b[1;32m 310\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 311\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 312\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[1;32m 313\u001b[0m torch\u001b[38;5;241m.\u001b[39mset_default_dtype(old_dtype)\n", + "File \u001b[0;32m~/.conda/envs/fast_llm/lib/python3.12/site-packages/transformers/modeling_utils.py:4839\u001b[0m, in \u001b[0;36mPreTrainedModel.from_pretrained\u001b[0;34m(cls, pretrained_model_name_or_path, config, cache_dir, ignore_mismatched_sizes, force_download, local_files_only, token, revision, use_safetensors, weights_only, *model_args, **kwargs)\u001b[0m\n\u001b[1;32m 4829\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m dtype_orig \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 4830\u001b[0m torch\u001b[38;5;241m.\u001b[39mset_default_dtype(dtype_orig)\n\u001b[1;32m 4832\u001b[0m (\n\u001b[1;32m 4833\u001b[0m model,\n\u001b[1;32m 4834\u001b[0m missing_keys,\n\u001b[1;32m 4835\u001b[0m unexpected_keys,\n\u001b[1;32m 4836\u001b[0m mismatched_keys,\n\u001b[1;32m 4837\u001b[0m offload_index,\n\u001b[1;32m 4838\u001b[0m error_msgs,\n\u001b[0;32m-> 4839\u001b[0m ) \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mcls\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_load_pretrained_model\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 4840\u001b[0m \u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 4841\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_dict\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 4842\u001b[0m \u001b[43m \u001b[49m\u001b[43mcheckpoint_files\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 4843\u001b[0m \u001b[43m \u001b[49m\u001b[43mpretrained_model_name_or_path\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 4844\u001b[0m \u001b[43m \u001b[49m\u001b[43mignore_mismatched_sizes\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mignore_mismatched_sizes\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 4845\u001b[0m \u001b[43m \u001b[49m\u001b[43msharded_metadata\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43msharded_metadata\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 4846\u001b[0m \u001b[43m \u001b[49m\u001b[43mdevice_map\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdevice_map\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 4847\u001b[0m \u001b[43m \u001b[49m\u001b[43mdisk_offload_folder\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moffload_folder\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 4848\u001b[0m \u001b[43m \u001b[49m\u001b[43moffload_state_dict\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moffload_state_dict\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 4849\u001b[0m \u001b[43m \u001b[49m\u001b[43mdtype\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtorch_dtype\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 4850\u001b[0m \u001b[43m \u001b[49m\u001b[43mhf_quantizer\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhf_quantizer\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 4851\u001b[0m \u001b[43m \u001b[49m\u001b[43mkeep_in_fp32_regex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mkeep_in_fp32_regex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 4852\u001b[0m \u001b[43m \u001b[49m\u001b[43mdevice_mesh\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdevice_mesh\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 4853\u001b[0m \u001b[43m \u001b[49m\u001b[43mkey_mapping\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mkey_mapping\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 4854\u001b[0m \u001b[43m \u001b[49m\u001b[43mweights_only\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mweights_only\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 4855\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 4857\u001b[0m \u001b[38;5;66;03m# record tp degree the model sharded to\u001b[39;00m\n\u001b[1;32m 4858\u001b[0m model\u001b[38;5;241m.\u001b[39m_tp_size \u001b[38;5;241m=\u001b[39m tp_size\n", + "File \u001b[0;32m~/.conda/envs/fast_llm/lib/python3.12/site-packages/transformers/modeling_utils.py:5302\u001b[0m, in \u001b[0;36mPreTrainedModel._load_pretrained_model\u001b[0;34m(cls, model, state_dict, checkpoint_files, pretrained_model_name_or_path, ignore_mismatched_sizes, sharded_metadata, device_map, disk_offload_folder, offload_state_dict, dtype, hf_quantizer, keep_in_fp32_regex, device_mesh, key_mapping, weights_only)\u001b[0m\n\u001b[1;32m 5299\u001b[0m args_list \u001b[38;5;241m=\u001b[39m logging\u001b[38;5;241m.\u001b[39mtqdm(args_list, desc\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mLoading checkpoint shards\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 5301\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m args \u001b[38;5;129;01min\u001b[39;00m args_list:\n\u001b[0;32m-> 5302\u001b[0m _error_msgs, disk_offload_index, cpu_offload_index \u001b[38;5;241m=\u001b[39m \u001b[43mload_shard_file\u001b[49m\u001b[43m(\u001b[49m\u001b[43margs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 5303\u001b[0m error_msgs \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m _error_msgs\n\u001b[1;32m 5305\u001b[0m \u001b[38;5;66;03m# Adjust offloaded weights name and save if needed\u001b[39;00m\n", + "File \u001b[0;32m~/.conda/envs/fast_llm/lib/python3.12/site-packages/transformers/modeling_utils.py:933\u001b[0m, in \u001b[0;36mload_shard_file\u001b[0;34m(args)\u001b[0m\n\u001b[1;32m 931\u001b[0m \u001b[38;5;66;03m# Skip it with fsdp on ranks other than 0\u001b[39;00m\n\u001b[1;32m 932\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (is_fsdp_enabled() \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m is_local_dist_rank_0() \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m is_quantized):\n\u001b[0;32m--> 933\u001b[0m disk_offload_index, cpu_offload_index \u001b[38;5;241m=\u001b[39m \u001b[43m_load_state_dict_into_meta_model\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 934\u001b[0m \u001b[43m \u001b[49m\u001b[43mmodel_to_load\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 935\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate_dict\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 936\u001b[0m \u001b[43m \u001b[49m\u001b[43mshard_file\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 937\u001b[0m \u001b[43m \u001b[49m\u001b[43mexpected_keys\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 938\u001b[0m \u001b[43m \u001b[49m\u001b[43mreverse_key_renaming_mapping\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 939\u001b[0m \u001b[43m \u001b[49m\u001b[43mdevice_map\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdevice_map\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 940\u001b[0m \u001b[43m \u001b[49m\u001b[43mdisk_offload_folder\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdisk_offload_folder\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 941\u001b[0m \u001b[43m \u001b[49m\u001b[43mdisk_offload_index\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdisk_offload_index\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 942\u001b[0m \u001b[43m \u001b[49m\u001b[43mcpu_offload_folder\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcpu_offload_folder\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 943\u001b[0m \u001b[43m \u001b[49m\u001b[43mcpu_offload_index\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcpu_offload_index\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 944\u001b[0m \u001b[43m \u001b[49m\u001b[43mhf_quantizer\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhf_quantizer\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 945\u001b[0m \u001b[43m \u001b[49m\u001b[43mis_safetensors\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mis_offloaded_safetensors\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 946\u001b[0m \u001b[43m \u001b[49m\u001b[43mkeep_in_fp32_regex\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mkeep_in_fp32_regex\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 947\u001b[0m \u001b[43m \u001b[49m\u001b[43munexpected_keys\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43munexpected_keys\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 948\u001b[0m \u001b[43m \u001b[49m\u001b[43mdevice_mesh\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdevice_mesh\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 949\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 951\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m error_msgs, disk_offload_index, cpu_offload_index\n", + "File \u001b[0;32m~/.conda/envs/fast_llm/lib/python3.12/site-packages/torch/utils/_contextlib.py:116\u001b[0m, in \u001b[0;36mcontext_decorator..decorate_context\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 113\u001b[0m \u001b[38;5;129m@functools\u001b[39m\u001b[38;5;241m.\u001b[39mwraps(func)\n\u001b[1;32m 114\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21mdecorate_context\u001b[39m(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[1;32m 115\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m ctx_factory():\n\u001b[0;32m--> 116\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/.conda/envs/fast_llm/lib/python3.12/site-packages/transformers/modeling_utils.py:810\u001b[0m, in \u001b[0;36m_load_state_dict_into_meta_model\u001b[0;34m(model, state_dict, shard_file, expected_keys, reverse_renaming_mapping, device_map, disk_offload_folder, disk_offload_index, cpu_offload_folder, cpu_offload_index, hf_quantizer, is_safetensors, keep_in_fp32_regex, unexpected_keys, device_mesh)\u001b[0m\n\u001b[1;32m 808\u001b[0m param \u001b[38;5;241m=\u001b[39m param[\u001b[38;5;241m.\u001b[39m\u001b[38;5;241m.\u001b[39m\u001b[38;5;241m.\u001b[39m]\n\u001b[1;32m 809\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m casting_dtype \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m--> 810\u001b[0m param \u001b[38;5;241m=\u001b[39m \u001b[43mparam\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcasting_dtype\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 811\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m to_contiguous:\n\u001b[1;32m 812\u001b[0m param \u001b[38;5;241m=\u001b[39m param\u001b[38;5;241m.\u001b[39mcontiguous()\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m: " + ] } ], "source": [ @@ -1050,7 +1065,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 14, "metadata": {}, "outputs": [], "source": [ @@ -1125,14 +1140,14 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 8, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "Loading checkpoint shards: 100%|██████████| 7/7 [00:05<00:00, 1.22it/s]\n" + "Loading checkpoint shards: 100%|██████████| 7/7 [00:07<00:00, 1.01s/it]\n" ] }, { @@ -1146,17 +1161,21 @@ "Converting layer %d... 2\n", "Skipping transformer layer 2...\n", "Converting layer %d... 3\n", - "Skipping transformer layer 3...\n", + "Converting layer 3...\n", + "Init Mamba using Attention\n", "Converting layer %d... 4\n", "Skipping transformer layer 4...\n", "Converting layer %d... 5\n", - "Skipping transformer layer 5...\n", + "Converting layer 5...\n", + "Init Mamba using Attention\n", "Converting layer %d... 6\n", - "Skipping transformer layer 6...\n", + "Converting layer 6...\n", + "Init Mamba using Attention\n", "Converting layer %d... 7\n", "Skipping transformer layer 7...\n", "Converting layer %d... 8\n", - "Skipping transformer layer 8...\n", + "Converting layer 8...\n", + "Init Mamba using Attention\n", "Converting layer %d... 9\n", "Skipping transformer layer 9...\n", "Converting layer %d... 10\n", @@ -1277,7 +1296,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 9, "metadata": {}, "outputs": [], "source": [ @@ -1286,7 +1305,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 10, "metadata": {}, "outputs": [ { @@ -1306,12 +1325,12 @@ " \"t\",\n", " \"t\",\n", " \"t\",\n", + " \"m2\",\n", " \"t\",\n", + " \"m2\",\n", + " \"m2\",\n", " \"t\",\n", - " \"t\",\n", - " \"t\",\n", - " \"t\",\n", - " \"t\",\n", + " \"m2\",\n", " \"t\",\n", " \"t\",\n", " \"t\",\n", @@ -1380,6 +1399,8 @@ " \"dt_rank\": \"auto\",\n", " \"dt_scale\": 1.0,\n", " \"expand\": 1,\n", + " \"head_dim\": 128,\n", + " \"layer_norm_epsilon\": 1e-05,\n", " \"n_qk_heads\": 32,\n", " \"n_v_heads\": 32\n", " },\n", @@ -1391,7 +1412,7 @@ "}" ] }, - "execution_count": 14, + "execution_count": 10, "metadata": {}, "output_type": "execute_result" } @@ -1402,7 +1423,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 11, "metadata": {}, "outputs": [], "source": [ @@ -1411,26 +1432,40 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 12, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "Loading checkpoint shards: 100%|██████████| 3/3 [00:00<00:00, 427.77it/s]\n" + "Loading checkpoint shards: 100%|██████████| 4/4 [00:00<00:00, 37.80it/s]" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['model.layers.6.mixer.A_log', 'model.layers.6.mixer.D', 'model.layers.6.mixer.conv1d.weight', 'model.layers.6.mixer.conv1d.bias', 'model.layers.6.mixer.in_proj.weight', 'model.layers.6.mixer.dt_in_proj.weight', 'model.layers.6.mixer.dt_proj.weight', 'model.layers.6.mixer.dt_proj.bias', 'model.layers.6.mixer.out_proj.weight', 'model.layers.8.mixer.A_log', 'model.layers.8.mixer.D', 'model.layers.8.mixer.conv1d.weight', 'model.layers.8.mixer.conv1d.bias', 'model.layers.8.mixer.in_proj.weight', 'model.layers.8.mixer.dt_in_proj.weight', 'model.layers.8.mixer.dt_proj.weight', 'model.layers.8.mixer.dt_proj.bias', 'model.layers.8.mixer.out_proj.weight']\n", + "['model.layers.6.self_attn.q_proj.weight', 'model.layers.6.self_attn.k_proj.weight', 'model.layers.6.self_attn.v_proj.weight', 'model.layers.6.self_attn.o_proj.weight', 'model.layers.8.self_attn.q_proj.weight', 'model.layers.8.self_attn.k_proj.weight', 'model.layers.8.self_attn.v_proj.weight', 'model.layers.8.self_attn.o_proj.weight']\n" ] } ], "source": [ "# load state dict from existing pretrained SSM?\n", - "path_25hyb = \"/mnt/checkpoints/ssm/apriel_ssm_thinker5l_hybrid_1ssm_init_rand_debug_tpformat\" #\"/mnt/checkpoints/fast_llm_exp/slam_ssm_distill/15b-oshyb25lmil-bs768-lr0.0003-lrs0-0-0-0-sl4096_ti5000_lm6/export/apriel_ssm_thinker_hybrid/5000_new\"\n", + "path_25hyb = path_hybrid #\"/mnt/checkpoints/ssm/apriel_ssm_thinker5l_hybrid_1ssm_init_rand_debug_tpformat\" #\"/mnt/checkpoints/fast_llm_exp/slam_ssm_distill/15b-oshyb25lmil-bs768-lr0.0003-lrs0-0-0-0-sl4096_ti5000_lm6/export/apriel_ssm_thinker_hybrid/5000_new\"\n", "model = AprielThinkerSSMHybridForCausalLM.from_pretrained(path_25hyb)\n", "state_dict = model.state_dict()\n", - "\n", - "# missing, unexpected = transformer.load_state_dict(state_dict, strict=False)\n", - "# print(missing)\n", - "# print(unexpected)\n", + "missing, unexpected = transformer.load_state_dict(state_dict, strict=False)\n", + "print(missing)\n", + "print(unexpected)\n", "\n", "\n", "\n", @@ -1450,7 +1485,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1458,7 +1493,10 @@ "# mamba2, state 16, expand 1, i.e. same as M1, but with discrete mamba2 and MIL\n", "# transformer.save_pretrained(\"/mnt/checkpoints/ssm/apriel_ssm_thinker15b_hybrid_1ssm_leastimportant_m2_16hexp1_init_mil\") # 1 ssm\n", "# transformer.save_pretrained(\"/mnt/checkpoints/ssm/apriel_ssm_thinker15b_hybrid_25ssm_leastimportant_m2_16hexp1_init_mil\") # 25 ssm\n", - "transformer.save_pretrained(\"/mnt/checkpoints/ssm/apriel_ssm_thinker15b_hybrid_25ssm_leastimportant_m2_16hexp1_init_mil_tpformat\") # 25 ssm\n", + "# transformer.save_pretrained(\"/mnt/checkpoints/ssm/apriel_ssm_thinker15b_hybrid_25ssm_leastimportant_m2_16hexp1_init_mil_tpformat\") # 25 ssm\n", + "# transformer.save_pretrained(\"/mnt/checkpoints/ssm/apriel_ssm_thinker15b_hybrid_27ssm_leastimportant_m2_16hexp1_init_hyb25distsftvrlm223k_mil\") # 25 ssm\n", + "# transformer.save_pretrained(\"/mnt/checkpoints/ssm/apriel_ssm_thinker15b_hybrid_30ssm_leastimportant_m2_16hexp1_init_hyb27distsftvrlm2_3p5ksteps_mil\") # 30 ssm\n", + "transformer.save_pretrained(\"/mnt/checkpoints/ssm/apriel_ssm_thinker15b_hybrid_29ssm_leastimportant_m2_16hexp1_init_hyb27distsftvrlm2_3p5ksteps_mil\") # 29 ssm\n", "\n", "\n", "# transformer.save_pretrained(\"/mnt/checkpoints/ssm/apriel_ssm_thinker15b_hybrid_40ssm_leastimportant_m2_16hexp1_init_mil_uniform_from_25h5000lm6\") # 40 ssm" @@ -1482,25 +1520,266 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Data mixing" + "# Nemotron-h mamba layer" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/toolkit/.local/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n" + ] + } + ], + "source": [ + "import gc\n", + "\n", + "import click\n", + "import torch\n", + "from transformers import AutoConfig, AutoModelForCausalLM\n", + "from transformers import MistralForCausalLM\n", + "\n", + "from fast_llm.models.ssm.external.apriel_15b_hybrid.configuration_ssm_hybrid_apriel15b import AprielSSMHybridConfig\n", + "from fast_llm.models.ssm.external.apriel_15b_hybrid.modeling_ssm_hybrid_apriel15b import AprielThinkerSSMHybridForCausalLM, AprielSSMM2DecoderLayer, AprielSSMDecoderLayer, AprielSSMNemotronHM2DecoderLayer, NemotronHMamba2Mixer\n", + "from fast_llm.models.ssm.external.nemotron.modeling import NemotronHMamba2Mixer as NemotronHMamba2Mixer_original\n", + "from fast_llm.models.ssm.external.nemotron.config import NemotronHConfig\n", + "from transformers.models.mistral.modeling_mistral import MistralDecoderLayer\n", + "\n", + "# enable file reload \n", + "%load_ext autoreload\n", + "%autoreload 2" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "path_hybrid = \"/mnt/checkpoints/fast_llm_exp/slam_ssm_distill/15b-hyb25distsftvrlm2-bs64-lr5e-06-lrs1-1-1-1-sl16384_ti60000_aprsft/export/apriel_ssm_thinker_hybrid/23000\"\n", + "path_thinker = \"/mnt/checkpoints/upstream/Apriel-Nemotron-15b-Thinker\"\n", + "\n", + "config_thinker = AutoConfig.from_pretrained(path_thinker)\n", + "\n", + "# config_hybrid = AprielSSMHybridConfig.from_pretrained(path_hybrid)\n", + "config_thinker.num_hidden_layers = 5\n", + "hybrid_block_layout = [\"t\"] * config_thinker.num_hidden_layers\n", + "# debug\n", + "hybrid_block_layout[2] = \"nm2\"\n", + "hybrid_block_layout[3] = \"nm2\"\n", + "\n", + "# 25/50\n", + "# hybrid_block_layout = [\"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"t\", \"nm2\", \"t\", \"t\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"t\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"t\", \"nm2\" ]\n", + "\n", + "\n", + "ssm_config = {\n", + " \"d_state\": 16,\n", + " \"d_xb\": 1024,\n", + " \"expand\": 1,\n", + " \"d_conv\": 4,\n", + " \"d_inner\": 4096,\n", + " \"conv_bias\": True,\n", + " \"bias\": False,\n", + " \"head_dim\": 16, # 4096/16 = 32 heads, 1024/128 = 8 KVheads and 4 repeat groups\n", + "}\n", + "config_thinker.hybrid_block_layout = hybrid_block_layout\n", + "# config_thinker.ssm_cfg = ssm_config\n", + "# model = AprielThinkerSSMHybridForCausalLM(config_hybrid)\n", + "# mixer = NemotronHMamba2Mixer(d_model=4096, **ssm_config)\n", + "\n", + "\n", + "\n", + "config_hybrid = AprielSSMHybridConfig(\n", + " **config_thinker.to_dict(),\n", + " ssm_cfg=ssm_config\n", + ")\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['t', 't', 'nm2', 'nm2', 't']" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "config_hybrid.hybrid_block_layout" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "def convert_layers(transformer, mamba_config, hybrid_block_layout, init_with_kqvo, attn_bias, torch_dtype):\n", + " config = transformer.config\n", + " embed_dim = config.hidden_size\n", + " num_heads = config.num_attention_heads\n", + " num_heads_kv = config.num_key_value_heads\n", + " head_dim = embed_dim // num_heads\n", + " q_dim = head_dim * num_heads\n", + " kv_dim = head_dim * num_heads_kv\n", + "\n", + " for layer_idx, type in enumerate(hybrid_block_layout):\n", + " print(\"Converting layer %d...\", layer_idx)\n", + " # Fetch the layer module for easier access\n", + " layer_module = transformer.model.layers._modules[f\"{layer_idx}\"]\n", + " if type == \"t\":\n", + " print(\"Skipping transformer layer %d...\" % layer_idx)\n", + " elif type == \"nm2\":\n", + " print(\"Converting layer %d...\" % layer_idx)\n", + " # Use MambaDecoderLayer for the remaining layers\n", + " mamba_encoder = AprielSSMNemotronHM2DecoderLayer(\n", + " mamba_config,\n", + " layer_idx,\n", + " device=\"cpu\",\n", + " dtype=torch_dtype,\n", + " )\n", + " \n", + " mamba_encoder.mlp.load_state_dict(layer_module.mlp.state_dict())\n", + " mamba_encoder.input_layernorm.load_state_dict(layer_module.input_layernorm.state_dict())\n", + " mamba_encoder.post_attention_layernorm.load_state_dict(layer_module.post_attention_layernorm.state_dict())\n", + " mamba_encoder.mixer.out_proj.load_state_dict(layer_module.self_attn.o_proj.state_dict())\n", + "\n", + " num_xb_heads = mamba_config.ssm_cfg[\"d_xb\"] // mamba_config.ssm_cfg[\"head_dim\"]\n", + " num_heads = mamba_config.ssm_cfg[\"d_inner\"] // mamba_config.ssm_cfg[\"head_dim\"]\n", + "\n", + " if init_with_kqvo:\n", + " # Copy weights: [z, x, B, C, dt], x -> v, B -> k, C -> q\n", + " mamba_encoder.mixer.in_proj.weight.data[\n", + " mamba_config.ssm_cfg[\"d_inner\"] : mamba_config.ssm_cfg[\"d_inner\"] + mamba_config.ssm_cfg[\"d_xb\"], :\n", + " ].copy_(layer_module.self_attn.v_proj.weight.data)\n", + " mamba_encoder.mixer.in_proj.weight.data[\n", + " mamba_config.ssm_cfg[\"d_inner\"] + mamba_config.ssm_cfg[\"d_xb\"] : mamba_config.ssm_cfg[\"d_inner\"] + mamba_config.ssm_cfg[\"d_xb\"] + (num_xb_heads * mamba_config.ssm_cfg[\"d_state\"]), :\n", + " ].copy_(layer_module.self_attn.k_proj.weight.data)\n", + " mamba_encoder.mixer.in_proj.weight.data[\n", + " mamba_config.ssm_cfg[\"d_inner\"] + mamba_config.ssm_cfg[\"d_xb\"] + (num_xb_heads * mamba_config.ssm_cfg[\"d_state\"]) : mamba_config.ssm_cfg[\"d_inner\"] + mamba_config.ssm_cfg[\"d_xb\"] + (num_xb_heads * mamba_config.ssm_cfg[\"d_state\"]) + (num_heads * mamba_config.ssm_cfg[\"d_state\"]), :\n", + " ].copy_(layer_module.self_attn.q_proj.weight.data)\n", + "\n", + " print(\"Init Mamba using Attention\")\n", + "\n", + " transformer.model.layers[layer_idx] = mamba_encoder\n", + "\n", + " # elif type == \"m2d\":\n", + " # print(\"Converting layer %d...\" % layer_idx)\n", + " # mamba_encoder = AprielSSMDecoderLayer(\n", + " # mamba_config,\n", + " # layer_idx,\n", + " # device=\"cpu\",\n", + " # dtype=torch_dtype,\n", + " # )\n", + " # mamba_encoder.mlp.load_state_dict(layer_module.mlp.state_dict())\n", + " # mamba_encoder.input_layernorm.load_state_dict(layer_module.input_layernorm.state_dict())\n", + " # mamba_encoder.post_attention_layernorm.load_state_dict(layer_module.post_attention_layernorm.state_dict())\n", + " # mamba_encoder.mixer.out_proj.load_state_dict(layer_module.self_attn.o_proj.state_dict())\n", + "\n", + " # if init_with_kqvo:\n", + " \n", + "\n", + "\n", + " \n", + " else:\n", + " raise ValueError(f\"Invalid layer type: {type}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 8, "metadata": {}, "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|██████████| 7/7 [00:04<00:00, 1.57it/s]\n", + "Instantiating mamba2 with num_heads: 256, head_dim: 16, \n", + " intermediate_size: 4096, \n", + " d_xb: 1024, \n", + " number_xb_heads: 64, \n", + " repeat_groups: 4, \n", + " d_state: 16\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Converting layer %d... 0\n", + "Skipping transformer layer 0...\n", + "Converting layer %d... 1\n", + "Skipping transformer layer 1...\n", + "Converting layer %d... 2\n", + "Converting layer 2...\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Instantiating mamba2 with num_heads: 256, head_dim: 16, \n", + " intermediate_size: 4096, \n", + " d_xb: 1024, \n", + " number_xb_heads: 64, \n", + " repeat_groups: 4, \n", + " d_state: 16\n" + ] + }, { "name": "stdout", "output_type": "stream", "text": [ - "torch.Size([])\n", - "KL (global, F.kl_div) = 0.738795\n", - "KL (sum of shards, manual) = 0.738795\n" + "Init Mamba using Attention\n", + "Converting layer %d... 3\n", + "Converting layer 3...\n", + "Init Mamba using Attention\n", + "Converting layer %d... 4\n", + "Skipping transformer layer 4...\n" ] } ], - "source": [] + "source": [ + "transformer = AutoModelForCausalLM.from_pretrained(path_thinker)\n", + "init_with_kqvo = True\n", + "torch_dtype = torch.bfloat16\n", + "attn_bias = True\n", + "convert_layers(transformer, config_hybrid, hybrid_block_layout, init_with_kqvo, attn_bias, torch_dtype)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "transformer.config = config_hybrid" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "transformer.save_pretrained(\"/mnt/checkpoints/ssm/apriel_ssm_thinker15b_hybrid_nmtrhnm2_5l_debug\")\n", + "# transformer.save_pretrained(\"/mnt/checkpoints/ssm/apriel_ssm_thinker15b_hybrid_25ssm_leastimportant_nm2_16hexp1_init_mil\") # 25 ssm" + ] }, { "cell_type": "code", @@ -1509,12 +1788,87 @@ "outputs": [], "source": [] }, + { + "cell_type": "code", + "execution_count": 48, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "NemotronHMamba2Mixer(\n", + " (act): SiLU()\n", + " (conv1d): Conv1d(5120, 5120, kernel_size=(4,), stride=(1,), padding=(3,), groups=5120)\n", + " (in_proj): Linear(in_features=4096, out_features=9216, bias=False)\n", + " (dt_in_proj): Linear(in_features=4096, out_features=32, bias=False)\n", + " (norm): MambaRMSNormGated()\n", + " (out_proj): Linear(in_features=4096, out_features=4096, bias=False)\n", + ")" + ] + }, + "execution_count": 48, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "mixer" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": {}, + "outputs": [], + "source": [ + "nconfig = NemotronHConfig(\n", + " hidden_size=4096,\n", + " mamba_num_heads = 32,\n", + " mamba_head_dim = 128,\n", + " mamba_n_groups = 32,\n", + " mamba_d_conv = 4,\n", + " mamba_expand = 1,\n", + " ssm_state_size = 16,\n", + " mamba_hidden_act = \"silu\"\n", + "\n", + ")\n", + "mixer_nemotron_h = NemotronHMamba2Mixer_original(nconfig, 0)" + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "NemotronHMamba2Mixer(\n", + " (act): SiLU()\n", + " (conv1d): Conv1d(5120, 5120, kernel_size=(4,), stride=(1,), padding=(3,), groups=5120)\n", + " (in_proj): Linear(in_features=4096, out_features=9248, bias=False)\n", + " (norm): MambaRMSNormGated()\n", + " (out_proj): Linear(in_features=4096, out_features=4096, bias=False)\n", + ")" + ] + }, + "execution_count": 52, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "mixer_nemotron_h" + ] + }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], - "source": [] + "source": [ + "# model.save_pretrained(\"/mnt/checkpoints/ssm/apriel_ssm_thinker15b_hybrid_nmtrhm2_5l_debug\")" + ] }, { "cell_type": "code", @@ -1534,140 +1888,86 @@ "cell_type": "code", "execution_count": null, "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "from safetensors.torch import load_file\n", + "import glob\n", + "safe_tensors = glob.glob(path_hybrid + \"/model*.safetensors\")\n", + "\n", + "combined_state_dict = {}\n", + "for safe_tensor in safe_tensors:\n", + " state_dict = load_file(safe_tensor)\n", + " combined_state_dict.update(state_dict)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "============================================================\n", - "WRONG APPROACH: Naive Discretization\n", - "============================================================\n", - "Step 1: x = 7.000\n", - "Step 2: x = 5.500\n", - "Step 3: x = 4.750\n", - "Step 4: x = 4.375\n", - "Step 5: x = 4.188\n", - "\n", - "============================================================\n", - "CORRECT APPROACH: Solving the Differential Equation\n", - "============================================================\n", - "\n", - "We need to solve: dx/dt = Ax + Bu\n", - "This is a first-order linear ODE with constant coefficients.\n", - "\n", - "Step 1: Homogeneous solution (u=0)\n", - " dx/dt = Ax\n", - " Solution: x_h(t) = e^(At) * x(0)\n", - "\n", - "Step 2: Particular solution (variation of parameters)\n", - " Full solution: x(t) = e^(At)*x(0) + ∫[0,t] e^(A(t-τ))*B*u(τ) dτ\n", - "\n", - "Step 3: Apply ZOH (u is constant over [0,Δ])\n", - " x(Δ) = e^(AΔ)*x(0) + (∫[0,Δ] e^(As) ds)*B*u\n", - " x(Δ) = e^(AΔ)*x(0) + A^(-1)*(e^(AΔ) - 1)*B*u\n", - "\n", - "Discretized system:\n", - "A_d = e^(AΔ) = e^(-0.5*1.0) = 0.607\n", - "B_d = (e^(AΔ)-1)/A * B = 1.574\n", - "Step 1: x = 7.639\n", - "Step 2: x = 6.207\n", - "Step 3: x = 5.339\n", - "Step 4: x = 4.812\n", - "Step 5: x = 4.493\n" - ] - }, - { - "ename": "TypeError", - "evalue": "unsupported format string passed to numpy.ndarray.__format__", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[9], line 111\u001b[0m\n\u001b[1;32m 109\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m i \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mrange\u001b[39m(\u001b[38;5;241m1\u001b[39m, \u001b[38;5;28mlen\u001b[39m(t_discrete)):\n\u001b[1;32m 110\u001b[0m error_naive \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mabs\u001b[39m(x_naive_history[i] \u001b[38;5;241m-\u001b[39m x_continuous[\u001b[38;5;28mint\u001b[39m(i\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m1000\u001b[39m\u001b[38;5;241m/\u001b[39m\u001b[38;5;241m5\u001b[39m)])\n\u001b[0;32m--> 111\u001b[0m ax1\u001b[38;5;241m.\u001b[39mannotate(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mError: \u001b[39m\u001b[38;5;132;43;01m{\u001b[39;49;00m\u001b[43merror_naive\u001b[49m\u001b[38;5;132;43;01m:\u001b[39;49;00m\u001b[38;5;124;43m.2f\u001b[39;49m\u001b[38;5;132;43;01m}\u001b[39;49;00m\u001b[38;5;124m'\u001b[39m, \n\u001b[1;32m 112\u001b[0m xy\u001b[38;5;241m=\u001b[39m(i, x_naive_history[i]), \n\u001b[1;32m 113\u001b[0m xytext\u001b[38;5;241m=\u001b[39m(i\u001b[38;5;241m+\u001b[39m\u001b[38;5;241m0.1\u001b[39m, x_naive_history[i]\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m),\n\u001b[1;32m 114\u001b[0m fontsize\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m8\u001b[39m, color\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mred\u001b[39m\u001b[38;5;124m'\u001b[39m, alpha\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m0.7\u001b[39m)\n\u001b[1;32m 116\u001b[0m \u001b[38;5;66;03m# Plot 2: The continuous evolution between samples\u001b[39;00m\n\u001b[1;32m 117\u001b[0m ax2 \u001b[38;5;241m=\u001b[39m axes[\u001b[38;5;241m0\u001b[39m, \u001b[38;5;241m1\u001b[39m]\n", - "\u001b[0;31mTypeError\u001b[0m: unsupported format string passed to numpy.ndarray.__format__" - ] - }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABIoAAANECAYAAADfVMS/AAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAA6TpJREFUeJzs3Xd0FFUbx/HvphN6C0UivXcQqdIMBOlVehcbVZogvTdBioCK9A7SAkjvIFWkSO8gAqEEAgmpO+8fa/Y1JoEEkmwgv885czJz587MM3c3ZPfh3jsmwzAMREREREREREQk0bOzdQAiIiIiIiIiIpIwKFEkIiIiIiIiIiKAEkUiIiIiIiIiIvIPJYpERERERERERARQokhERERERERERP6hRJGIiIiIiIiIiABKFImIiIiIiIiIyD+UKBIREREREREREUCJIhERERERERER+YcSRSJRMJlMdOnSxdZhxFi7du3Ili1bjI+7fv06JpOJefPmxXpMryNbtmzUrl3b1mGIiIiIiIgkCkoUyVtnxYoVmEwm1qxZE2Ff0aJFMZlM7Nq1K8K+d999l3LlysVHiOFcv36d9u3bkzNnTlxcXMiYMSMVK1ZkyJAh8R6LrZw9e5ahQ4dy/fp1m8ZhGAYLFy6kYsWKpEqVCldXVwoXLszw4cPx8/OLUL9y5cqYTCZMJhN2dnakSJGCvHnz0rp1a7Zt2xbpNbJly2Y95r9LjRo1Xhjf7t27rXUXLVoUaZ3y5ctjMpkoVKhQzBsAmDFjRqTJwoTyGomIiIiISNxysHUAIrGtQoUKAOzfv58GDRpYy319ffnzzz9xcHDgwIEDVKlSxbrv1q1b3Lp1i2bNmsVrrJcvX6ZUqVIkSZKEDh06kC1bNu7cucPx48cZN24cw4YNi7dYsmbNyvPnz3F0dIy3a4Y5e/Ysw4YNo3Llyq/UGyo2hIaG0qJFC1asWMEHH3zA0KFDcXV1Zd++fQwbNoyVK1eyfft2MmTIEO64LFmyMGbMGAD8/Py4fPkyq1evZtGiRXz88ccsWrQoQpsWK1aMXr16RYghc+bM0YrVxcWFJUuW0KpVq3Dl169f57fffsPFxSUmtx7OjBkzSJcuHe3atQtXnhBeIxERERERiXtKFMlbJ3PmzGTPnp39+/eHKz948CCGYdCkSZMI+8K2w5JM8eW7777j2bNnnDhxgqxZs4bb5+3tHa+xmEym10owvOnGjx/PihUr6N27NxMmTLCWf/rpp3z88cfUr1+fdu3asWnTpnDHpUyZMkLCZuzYsXTr1o0ZM2aQLVs2xo0bF27/O++8E+GYmKhZsyZeXl48ePCAdOnSWcuXLFlChgwZyJ07Nz4+Pq98/vjk5+dH0qRJbR2GiIiIiIj8Q0PP5K1UoUIF/vjjD54/f24tO3DgAAULFuSjjz7i0KFDmM3mcPtMJhPly5ePcK61a9dSqFAhnJ2dKViwIJs3b7bu27VrV5TD3JYsWYLJZOLgwYNRxnnlyhWyZMkSIUkE4ObmFqFsxowZFCxYEGdnZzJnzkznzp15/PhxlOcPDg4mTZo0tG/fPsI+X19fXFxc6N27NxD5HEXt2rUjWbJk3L59m/r165MsWTLSp09P7969CQ0NDXe+hw8f0rp1a1KkSEGqVKlo27YtJ0+efOm8R/PmzaNJkyYAVKlSxTq0avfu3eHq7d+/n/fffx8XFxdy5MjBggULIpzr8ePH9OjRA3d3d5ydncmVKxfjxo0L91pH5vnz50yYMIE8efJYewf9W506dWjbti2bN2/m0KFDLzwXgL29PVOnTqVAgQJ8//33PHny5KXHxES9evVwdnZm5cqV4cqXLFnCxx9/jL29fYRj5s6dS9WqVXFzc8PZ2ZkCBQowc+bMcHWyZcvGmTNn2LNnj/V1qFy5crReo02bNvHBBx+QNGlSkidPTq1atThz5ky484e9n65cuULNmjVJnjw5LVu2BODSpUs0atSIjBkz4uLiQpYsWWjWrFmst52IiIiIiLyYEkXyVqpQoQLBwcEcPnzYWnbgwAHKlStHuXLlePLkCX/++We4ffny5SNt2rThzrN//36+/PJLmjVrxvjx4wkICKBRo0Y8fPgQsMxR4+7uzuLFiyPEsHjxYnLmzEnZsmWjjDNr1qzcunWLnTt3vvSehg4dSufOncmcOTMTJ06kUaNG/Pjjj1SvXp3g4OBIj3F0dKRBgwasXbuWoKCgcPvWrl1LYGDgS4fbhYaG4unpSdq0afn222+pVKkSEydO5KeffrLWMZvN1KlTh6VLl9K2bVtGjRrFnTt3aNu27Uvvq2LFinTr1g2Ab775hoULF7Jw4ULy589vrXP58mUaN25MtWrVmDhxIqlTp6Zdu3bhEhH+/v5UqlSJRYsW0aZNG6ZOnUr58uXp378/PXv2fGEM+/fvx8fHhxYtWuDgEHlHyzZt2gCwYcOGl94TWJJFzZs3x9/fP0IPtuDgYB48eBBh+Xdi80VcXV2pV68eS5cutZadPHmSM2fO0KJFi0iPmTlzJlmzZuWbb75h4sSJuLu78+WXXzJ9+nRrncmTJ5MlSxby5ctnfR0GDBjw0tdo4cKF1KpVi2TJkjFu3DgGDRrE2bNnqVChQoQ5jUJCQvD09MTNzY1vv/2WRo0aERQUhKenJ4cOHaJr165Mnz6dTz/9lKtXr74wESoiIiIiInHAEHkLnTlzxgCMESNGGIZhGMHBwUbSpEmN+fPnG4ZhGBkyZDCmT59uGIZh+Pr6Gvb29kanTp3CnQMwnJycjMuXL1vLTp48aQDGtGnTrGX9+/c3nJ2djcePH1vLvL29DQcHB2PIkCEvjPPPP/80kiRJYgBGsWLFjO7duxtr1641/Pz8wtXz9vY2nJycjOrVqxuhoaHW8u+//94AjDlz5ljL2rZta2TNmtW6vWXLFgMw1q9fH+6cNWvWNHLkyGHdvnbtmgEYc+fODXcuwBg+fHi4Y4sXL26ULFnSur1q1SoDMCZPnmwtCw0NNapWrRrhnJFZuXKlARi7du2KsC9r1qwGYOzduzdcezg7Oxu9evWylo0YMcJImjSpcfHixXDH9+vXz7C3tzdu3rwZ5fUnT55sAMaaNWuirPPo0SMDMBo2bGgtq1SpklGwYMEoj1mzZo0BGFOmTIlwP5EtY8aMifJchmEYu3btMgBj5cqVxoYNGwyTyWS9rz59+lhfz8ji8vf3j3A+T0/PcO8BwzCMggULGpUqVYpQN6rX6OnTp0aqVKki/P7cvXvXSJkyZbjysPdTv379wtX9448/rPclIiIiIiK2pR5F8lbKnz8/adOmtfbkOHnyJH5+ftanmpUrV44DBw4AlrmLQkNDI52fyMPDg5w5c1q3ixQpQooUKbh69aq1rE2bNgQGBvLLL79Yy5YvX05ISMhL56EpWLAgJ06coFWrVly/fp0pU6ZQv359MmTIwKxZs6z1tm/fTlBQED169MDO7v+/tp06dSJFihRs3LgxymtUrVqVdOnSsXz5cmuZj48P27Zto2nTpi+ML8znn38ebvuDDz4I1wabN2/G0dGRTp06Wcvs7Ozo3LlztM7/MgUKFOCDDz6wbqdPn568efOGi2HlypV88MEHpE6dOlwvHQ8PD0JDQ9m7d2+U53/69CkAyZMnj7JO2D5fX99ox50sWbJw5w9TunRptm3bFmFp3rx5tM9dvXp10qRJw7JlyzAMg2XLlr3w+CRJkljXnzx5woMHD6hUqRJXr159reFd27Zt4/HjxzRv3jxcu9vb21O6dOlInzD4xRdfhNtOmTIlAFu2bMHf3/+VYxERERERkdenyazlrWQymShXrhx79+7FbDZz4MAB3NzcyJUrF2BJFH3//fcA1oRRZImid999N0JZ6tSpw00UnC9fPkqVKsXixYvp2LEjYBl2VqZMGev1XiRPnjwsXLiQ0NBQzp49y4YNGxg/fjyffvop2bNnx8PDgxs3bgCQN2/ecMc6OTmRI0cO6/7IODg40KhRI5YsWUJgYCDOzs6sXr2a4ODgaCWKXFxcSJ8+/Qvb4MaNG2TKlAlXV9dw9aJz/9ERndfh0qVLnDp1KkKsYV40OXhYEui/CZ1/i04y6b+ePXsW6THp0qXDw8Mj2ueJjKOjI02aNGHJkiW8//773Lp1K8phZ2B5nw8ZMoSDBw9GSMY8efLEmqyJqUuXLgGWhGRkUqRIEW7bwcGBLFmyhCvLnj07PXv2ZNKkSSxevJgPPviAunXr0qpVq1eOS0REREREXo0SRfLWqlChAuvXr+f06dPW+YnClCtXjj59+nD79m32799P5syZyZEjR4RzRDYpMIBhGOG227RpQ/fu3fnrr78IDAzk0KFD1kRUdNnb21O4cGEKFy5M2bJlqVKlCosXL37thAJAs2bN+PHHH9m0aRP169dnxYoV5MuXj6JFi0YrLluLzutgNpupVq0affv2jbRunjx5ojx/2Fw7p06don79+pHWOXXqFGDp3RRdYfNgxVbC7L9atGjBDz/8wNChQylatGiUsV25coUPP/yQfPnyMWnSJNzd3XFycuLXX3/lu+++e+lk3y8SduzChQvJmDFjhP3/nfPJ2dk5XK+4MBMnTqRdu3asW7eOrVu30q1bN8aMGcOhQ4ciJJZERERERCTuKFEkb62wHkL79+/nwIED9OjRw7qvZMmSODs7s3v3bg4fPkzNmjVf61rNmjWjZ8+eLF26lOfPn+Po6BjtYV2Ree+99wC4c+cOgPWpaBcuXAiX0AoKCuLatWsvTSZVrFiRTJkysXz5cipUqMDOnTsZMGDAK8f3X1mzZmXXrl34+/uH61V0+fLlaB1vMpleO4acOXPy7NmzV0qsVahQgVSpUrFkyRIGDBgQaWIq7ClrtWvXjtY5Q0NDWbJkCa6urpH2VosNFSpU4N1332X37t2MGzcuynrr168nMDAQLy+vcL2zIhsWFtVrEVV52NBMNze3105qhiVKBw4cyG+//Ub58uX54YcfGDly5GudV0REREREok9zFMlb67333sPFxYXFixdz+/btcD2KnJ2dKVGiBNOnT8fPz++1v8inS5eOjz76iEWLFrF48WJq1KhBunTpXnrcvn37In1i2a+//gr8f6iZh4cHTk5OTJ06NVwvmtmzZ/PkyRNq1ar1wuvY2dnRuHFj1q9fz8KFCwkJCXmtRNZ/eXp6EhwcHG5eJbPZHO6JWi+SNGlSgNd6wtXHH3/MwYMH2bJlS4R9jx8/JiQkJMpjXV1d6d27NxcuXIg0gbZx40bmzZuHp6cnZcqUeWksoaGhdOvWjXPnztGtW7cIw69ii8lkYurUqQwZMoTWrVtHWS8s8fXv986TJ0+YO3duhLpJkyaN9HWI6jXy9PQkRYoUjB49OtL38v379196H76+vhFen8KFC2NnZ0dgYOBLjxcRERERkdijHkXy1nJycqJUqVLs27cPZ2dnSpYsGW5/uXLlmDhxIhD5/EQx1aZNGxo3bgzAiBEjonXMuHHj+P3332nYsCFFihQB4Pjx4yxYsIA0adJYe0GlT5+e/v37M2zYMGrUqEHdunW5cOECM2bMoFSpUi+dNBugadOmTJs2jSFDhlC4cOFwj59/XfXr1+f999+nV69eXL58mXz58uHl5cWjR4+Al/cYKlasGPb29owbN44nT57g7OxM1apVcXNzi3YMffr0wcvLi9q1a9OuXTtKliyJn58fp0+f5pdffuH69esvTN7169ePP/74g3HjxnHw4EEaNWpEkiRJ2L9/P4sWLSJ//vzMnz8/wnFPnjxh0aJFAPj7+3P58mVWr17NlStXaNasWaTvhdu3b1uP+bdkyZJFOfQtKvXq1aNevXovrFO9enWcnJyoU6cOn332Gc+ePWPWrFm4ublZe62FKVmyJDNnzmTkyJHkypULNzc3qlat+sLXaObMmbRu3ZoSJUrQrFkz0qdPz82bN9m4cSPly5d/6TDMnTt30qVLF5o0aUKePHkICQlh4cKF2Nvb06hRoxi1h4iIiIiIvCabPnNNJI7179/fAIxy5cpF2Ld69WoDMJInT26EhIRE2A8YnTt3jlCeNWtWo23bthHKAwMDjdSpUxspU6Y0nj9/Hq34Dhw4YHTu3NkoVKiQkTJlSsPR0dF49913jXbt2hlXrlyJUP/777838uXLZzg6OhoZMmQwvvjiC8PHxydcnbZt2xpZs2aNcKzZbDbc3d0NwBg5cmSE/deuXYvwKPu2bdsaSZMmjVB3yJAhxn//+bh//77RokULI3ny5EbKlCmNdu3aGQcOHDAAY9myZS9ti1mzZhk5cuQw7O3twz2GPWvWrEatWrUi1K9UqVKEx7g/ffrU6N+/v5ErVy7DycnJSJcunVGuXDnj22+/NYKCgl4aQ2hoqDF37lyjfPnyRooUKQwXFxejYMGCxrBhw4xnz55FGgP/erx9smTJjNy5cxutWrUytm7dGuk1smbNGu6Yfy+RvW7/tmvXrmg9Rr5SpUpGwYIFw5V5eXkZRYoUMVxcXIxs2bIZ48aNM+bMmWMAxrVr16z17t69a9SqVctInjy5AYRr46heo7DYPD09jZQpUxouLi5Gzpw5jXbt2hnHjh2z1onq/XT16lWjQ4cORs6cOQ0XFxcjTZo0RpUqVYzt27e/8D5FRERERCT2mQzjP7PyisgrCQkJIXPmzNSpU4fZs2fbOpwEYe3atTRo0ID9+/dTvnx5W4cjIiIiIiIiL6E5ikRiydq1a7l//z5t2rSxdSg28fz583DboaGhTJs2jRQpUlCiRAkbRSUiIiIiIiIxoTmKRF7T4cOHOXXqFCNGjKB48eJUqlTJ1iHZRNeuXXn+/Dlly5YlMDCQ1atX89tvvzF69GiSJEli6/BEREREREQkGpQoEnlNM2fOZNGiRRQrVox58+bZOhybqVq1KhMnTmTDhg0EBASQK1cupk2bRpcuXWwdmoiIiIiIiEST5igSERERScD27t3LhAkT+P3337lz5w5r1qx56RMSd+/eTc+ePTlz5gzu7u4MHDiQdu3axUu8IiIi8mbTHEUiIiIiCZifnx9FixZl+vTp0ap/7do1atWqRZUqVThx4gQ9evTgk08+YcuWLXEcqYiIiLwN1KNIRERE5A1hMple2qPo66+/ZuPGjfz555/WsmbNmvH48WM2b94cD1GKiIjIm+ytn6PIbDbz999/kzx5ckwmk63DERERkSgYhsHTp0/JnDkzdnbq9PyqDh48iIeHR7gyT09PevToEeUxgYGBBAYGWrfNZjOPHj0ibdq0+vwkIiKSgMXF56e3PlH0999/4+7ubuswREREJJpu3bpFlixZbB3GG+vu3btkyJAhXFmGDBnw9fXl+fPnkT6JcsyYMQwbNiy+QhQREZFYFpufn976RFHy5MkBS6OlSJEi1s5rNpu5f/8+6dOn1/96xiG1c/xQO8c9tXH8UDvHj7hqZ19fX9zd3a1/uyX+9O/fn549e1q3nzx5wrvvvhvrn59EREQkdsXF56e3PlEU1l06RYoUsZ4oCggIIEWKFPoyEofUzvFD7Rz31MbxQ+0cP+K6nTXU6fVkzJiRe/fuhSu7d+8eKVKkiLQ3EYCzszPOzs4RymP785OIiIjEjdj8/KRP0SIiIiJvkbJly7Jjx45wZdu2baNs2bI2ikhERETeJEoUiYiIiCRgz54948SJE5w4cQKAa9euceLECW7evAlYho21adPGWv/zzz/n6tWr9O3bl/PnzzNjxgxWrFjBV199ZYvwRURE5A2jRJGIiIhIAnbs2DGKFy9O8eLFAejZsyfFixdn8ODBANy5c8eaNALInj07GzduZNu2bRQtWpSJEyfy888/4+npaZP4RURE5M3y1s9RJCLyMqGhoQQHB8fpNcxmM8HBwQQEBGjunDikdo4fr9rOjo6O2Nvbx2Fkb6fKlStjGEaU++fNmxfpMX/88UccRiUiIiJvKyWKRCTRMgyDu3fv8vjx43i5ltls5unTp5qoNw6pnePH67RzqlSpyJgxo14fERERkQRKiSIRSbTCkkRubm64urrG6RdXwzAICQnBwcFBX5DjkNo5frxKOxuGgb+/P97e3gBkypQpLkMUERERkVekRJGIJEqhoaHWJFHatGnj/HpKYMQPtXP8eNV2Dns0u7e3N25ubhqGJiIiIpIAKVEUQwEhAaw8s5I1e3/k4bnjpM1fggYVP6NJwSa4OLjYOjwRiaawOYlcXV1tHIlI4hL2OxccHKxEkYiIiEgCpERRDHhd8KLd2nb4BPhgZ4A5A9g9PMCatQfovrk78+vPp07eOrYOU0RiQL1OROKXfudEREREEjabPhJm79691KlTh8yZM2MymVi7dm24/YZhMHjwYDJlykSSJEnw8PDg0qVLNonV64IX9ZfV53HAYwDM/3zODfv5OOAx9ZbVw+uCl03iExERERERERF5XTZNFPn5+VG0aFGmT58e6f7x48czdepUfvjhBw4fPkzSpEnx9PQkICAgXuMMCAmg3dp2ABhE/njasPJ2a9sREBK/8YmIJEbz5s0jVapUtg4jwYjsP1xeRbZs2Zg8efJrn0dERERE3kw2TRR99NFHjBw5kgYNGkTYZxgGkydPZuDAgdSrV48iRYqwYMEC/v7771j5IBwTK8+sxCfAJ8okURgDA58AH345+0s8RSYiiY3JZHrhMnTo0HiN5/Lly7Rv354sWbLg7OxM9uzZad68OceOHYvV60SWvGjatCkXL16M1evYypo1ayhTpgwpU6YkefLkFCxYkB49esTpNaNKtB09epRPP/00Tq8tIiIiIglXgp2j6Nq1a9y9excPDw9rWcqUKSldujQHDx6kWbNmkR4XGBhIYGCgddvX1xcAs9mM2Wx+pVjWnF+DnckOs/Hy4+1Mdqw+t5oWhVq80rUkPLPZjGEYr/zaSfQkxnYOu+ewJT6EXed1rvf3339b15cvX86QIUM4f/68tSxZsmThrhMaGoqDQ9z8U3/s2DE8PDwoVKgQP/zwA/ny5ePp06esW7eOXr16sXv37li93n9fKxcXF1xcXCK0Z2y0c3zasWMHTZs2ZeTIkdStWxeTycTZs2fZtm1bjO8hJu/nqNopXbp0kZZH9/joxhjZ3+XE9G+QiIiISEKVYBNFd+/eBSBDhgzhyjNkyGDdF5kxY8YwbNiwCOX3799/5SFrd5/cjVaSCMBsmLn75C7e3t6vdC0Jz2w28+TJEwzDwM7Oph3g3mqJsZ2Dg4Mxm82EhIQQEhIS59cLS9rA603mG/YlHiB58uSYTCZr2Z49e6hWrRpeXl4MGTKEP//8k19//ZUFCxbw+PFjVq1aZT22V69enDx5ku3btwOW98CECROYPXs2d+/eJXfu3HzzzTc0atQoyvtp164duXLlYufOneHeN4UKFaJz587Wdj19+jS9evXi0KFDuLq60qBBAyZMmECyZMkA6NixI48fP6Z8+fJMnjyZoKAgPv74YyZOnIijoyMeHh7cuHGDnj170rNnTwCCgoJYsGABvXr14v79+wAMHz4cLy8vunXrxogRI/Dx8cHT05MffviB5MmTA5A7d266du1Kt27drPG+99571K1bl8GDBwNw8+ZNevTowa5du7Czs6N69epMnjzZ+vcoLN4XteeqVasYOXIkV65cwdXVlWLFirFq1SqSJk0aoS29vLwoV64cX331lbUsR44c1K5dO9x788cff+S7777j1q1bZMuWjf79+9OqVatw5woNDSUkJMT6XvD29rb2Gjpx4gTvv/8+Fy9e5MaNG3To0AHA+toNHDiQwYMHR2ijyNrju+++s77vRowYgZeXFz169GDYsGGRtvu/hYSEYDabefjwIY6OjuH2PX36NEJ9EREREYlfCTZR9Kr69+9v/SIBlh5F7u7upE+fnhQpUrzSOTOmzIjd3ej3KMqYMiNubm6vdC0Jz2w2YzKZSJ8+faJJYNhCYmzngIAAnj59ioODQ5z1uInMf78Yv46w1yos/rBHjQ8cOJAJEyaQI0cOUqdOzaJFi7Czswt3n2FD1cLKRo0axeLFi5k5cya5c+dm7969tGvXjowZM1KpUqUI1/7jjz84e/YsixcvxsnJKcL+sCSCn58ftWvXpmzZshw5cgRvb286derEV199xdy5c633sWfPHjJnzszOnTu5fPkyzZo1o3jx4nTq1InVq1dTrFgxOnXqRKdOnaz3/N/7t7Oz4+rVq2zYsIH169fj4+ND06ZN+fbbbxk1alS4dvtvW4SVmc1mGjduTLJkydi9ezchISF06dKFVq1asWvXLuvxL2rPO3fu0Lp1a8aNG0eDBg14+vQp+/btw97ePtL3WqZMmVi2bBnnz5+nUKFCkb7Wa9asoWfPnnz33Xd4eHiwYcMGOnXqRNasWalSpYq1Xtg1wt4L/35///vnBx98wHfffReuR1qyZMnCteWL2qN169Zs3boVR0fHaLd7mLDXLm3atLi4uITb999tEREREYl/CTZRlDFjRgDu3btHpkyZrOX37t2jWLFiUR7n7OyMs7NzhPKwD/avokG+Bqw5vyZadc2GmYb5GyaaL9vxIexLnNo0biW2drazsws3t0+Y996DF3RafA0GEJYkCt+jKGNGeJUpfcLi/u/P4cOHU7169Sjr//eYwMBAxowZw/bt2ylbtiwAOXPm5MCBA/z0009Urlw5wrkuX74MQP78+V/YQ2rp0qUEBASwYMECa2+a77//njp16jBu3DhrL53UqVMzffp07O3tyZ8/P7Vq1WLnzp18+umnpE2bFnt7e1KkSBHu70Fk9282m5k9ezapU6fGZDLRunVrdu7cGeHe/xtzWNnOnTs5ffo0165dw93dHYAFCxZQsGBBjh07RqlSpV7annfv3iUkJIRGjRqRNWtWAIoUKRJlG3Xr1o39+/dTpEgRsmbNSpkyZahevTotW7a0/j2bOHEi7dq1o3PnzgDkzZuXw4cPM3HiRKpWrRrhPv4dT2Tt5OzsTKpUqTCZTOHaNCbt8fvvv1OmTBlru8+bN8/agyiydv/vuSP79yax/PsjIiIikpAl2ERR9uzZyZgxIzt27LAmhnx9fTl8+DBffPFFvMbSpEBjuq9oz2PHUIwXjBgxGZAq2J7G+SMfqiEiCd/du3D7dlyc+dWHm8XUe++9F6P6ly9fxt/fn2rVqoUrDwoKonjx4pEeE915ac6dO0fRokXDDbkqX748ZrOZCxcuWBNFBQsWtPaCAUsvm9OnT8foPsAy6fW/hztlypQpRkOBz507h7u7uzUpAlCgQAFSpUrFuXPnwiWKolK0aFE+/PBDChcujKenJ9WrV6dx48akTp060vpJkyZl48aNXLlyhV27dnHo0CF69erFlClTOHjwIK6urpw7dy7CBNPly5dnypQp0b63V/Gy9ihTpgzw+u0uIiIiIgmHTRNFz549s/6vNFgmsD5x4gRp0qTh3XffpUePHowcOZLcuXOTPXt2Bg0aRObMmalfv368xumycy/zfwmlXnNLMiiqZJEBzP8lFJeKe8HTM15jFJHY8U9nxjjw78RKxB5Fsem/8+DY2dlFSOwEBwdb1589ewbAxo0beeedd8LVi6yHJkCePHkAOH/+fJTJpJj475C8sF4qsX2el7VFdLzsHPb29mzbto3ffvuNrVu3Mm3aNAYMGMDhw4fJnj17lOfNmTMnOXPm5JNPPmHAgAHkyZOH5cuX0759+xjFFxYjhE/oxfQ+YyK2Xj8RERERsT2bJoqOHTsWbm6FsLmF2rZty7x58+jbty9+fn58+umnPH78mAoVKrB58+b4ncPAMGDQIOpctmPtMjPt6oNPErAzg9nu/z8BMIGjYYJBg6B6dXiNCWtFxDZi+anuVoZhmcTXwcEh3v9pSJ8+PX/++We4shMnTli/3BcoUABnZ2du3rwZ6XxEkSlWrBgFChRg4sSJNG3aNMKQocePH5MqVSry58/PvHnz8PPzsyawDhw4gJ2dHXnz5o32PTg5OVknA38d6dOn586dO9ZtX19frl27Zt3Onz8/t27d4tatW9ZeNGfPnuXx48cUKFDAeo4XtSdYEiXly5enfPnyDB48mKxZs1rnGYqObNmy4erqip+fnzWuAwcO0LZtW2udAwcOWGOK7D4B7ty5Y+3JdOLEiXB1otOm0WkPEREREXm72HQygMqVK4d7PHXYMm/ePMDyQXv48OHcvXuXgIAAtm/fbv1f7HgTFAQ3b4LZTN0L8PdEWLga6p+HytcsPxeuhi+OWKp3qGvw0PuG5TgRkQSgatWqHDt2jAULFnDp0iXrE9HCJE+enN69e/PVV18xf/58rly5wvHjx5k2bRrz58+P9Jwmk4m5c+dy8eJFPvjgA3799VeuXr3KqVOnGDVqFPXq1QOgZcuWuLi40LZtW/7880927dpF165dad26dYSnWr5ItmzZ2Lt3L7dv3+bBgwev1RYLFy5k3759nD59mrZt24Yb8ubh4UHhwoVp2bIlx48f58iRI7Rp04ZKlSpZh/S9rD0PHz7M6NGjOXbsGDdv3mT16tXcv3+f/PnzRxrT0KFD6du3L7t37+batWv88ccfdOjQgeDgYOtwwD59+jBv3jxmzpzJpUuXmDRpEqtXr6Z3796RnjNXrly4u7szdOhQLl26xMaNG5k4cWKENn327Bk7duzgwYMH+Pv7RzjPi9qjZMmSMWt8EREREXkjaNbIl3F2hqNH4fff4fffcTn8O63m/s7KPkdZ2WILK/scpXj3Q/TYlpl89+FOcvhk0PsYkTwFSETEFjw9PRk0aBB9+/alVKlSPH36lDZt2oSrM2LECAYNGsSYMWPInz8/NWrUYOPGjS8cKvX+++9z7NgxcuXKRadOncifPz9169blzJkzTJ48GQBXV1e2bNnCo0ePKFWqFI0bN+bDDz/k+++/j9E9DB8+nOvXr5MzZ05rb5lX0b9/fypVqkTt2rWpVasW9evXJ2fOnNb9JpOJdevWkTp1aipWrIiHhwc5cuRg+fLl1jova88UKVKwd+9eatasSZ48eRg4cCATJ07ko48+ijSmSpUqcfXqVdq0aUO+fPn46KOPuHv3Llu3brX2uqpfvz5Tpkzh22+/pWDBgvz444/MnTs30onGwTIUbOnSpZw/f54iRYowbtw4Ro4cGa5OuXLl+Pzzz2natCnp06dn/PjxEc4TVXssW7Ys2m0uIiIiIm8WkxHdGUnfUL6+vqRMmZInT56QIkWKWDuv2WzG29sbNzc37OzsmFJhJeWvfUzZjhBiD4saLKJlkZaxdr3E6r/tLHEjMbZzQEAA165dI3v27PEynNUwjH8NPdOw1Liido4fr9POL/rdi6u/2RJzei1ERETeDHHxNztxfCOMB/UWNsZ8530G77Fsf7GhM7ee3LJtUCIiIiIiIiIiMaBEUSzJlt3E8WYT6L8f3v8LngY/od26dpgNPfVFRERERERERN4MShTFouYzK7LNvi6LVoNzkD07r+1k6uGptg5LRERERERERCRalCiKRSlTwqM+Y8jxyI7JWy2PHO63vR9n75+1cWQiIiIiIiIiIi+nRFEs+3hoAVal6shnx6DMpVQEhgbSanUrgkKDbB2aiIiIiIiIiMgLKVEUyxwdwW36UPxxZfW6xzg/T8Yfd/9g2O5htg5NREREREREROSFlCiKA5VbZGZ97l5kegYTvJIBMPbAWH679ZuNIxMRERERERERiZoSRXGk5LI+eJOerufukvfU+5gNM63XtOZZ0DNbhyYiIiIiIiIiEikliuJI7hLJOfDhEADWb7xK0iB3rvpcpdeWXjaOTEREREREREQkckoUxaGqyz7lin1ucgc+oPGSKpgw8dPxn9hwcYOtQxOR2LZ9OxQoYPn5BqlcuTI9evSI8+s8fPgQNzc3rl+/Hqvnja34+/XrR9euXV8/IBERERGRN5wSRXEoZTpHrnUaA8D067/gfuFTADp6deS+331bhiYisckw4Jtv4Nw5y0/DiNPLtWvXDpPJxNixY8OVr127FpPJFKNzrV69mhEjRsRmeJEaNWoU9erVI1u2bABcv34dk8kU6XLo0KE4j+e/evfuzfz587l69eprnWfz5s2YTCbu3r0brjxTpkzWew8T1gY7duwALEmvsDZwcXEhT548jBkzBiOS99P8+fMpVaoUrq6uJE+enEqVKrFhQ/j/hNi9ezcmk4mCBQsSGhoabl+qVKmYN29euLI//viDpk2bkilTJpydncmaNSu1a9dm/fr1kcYgIiIiIm8nJYriWJVpDTnlWoak+NNnZTCZHQri7efNpxs+1QdvkbfF1q1w9Khl/ehRy3Ycc3FxYdy4cfj4+LzWedKkSUPy5MljKarI+fv7M3v2bDp27Bhh3/bt27lz5064pWTJknEaz7+FhoZiNptJly4dnp6ezJw587XOV6FCBRwcHNi9e7e17Ny5czx//hwfH59wPap27dqFs7Mz5cuXt5Z16tSJO3fucOHCBfr378/gwYP54Ycfwl2jd+/efPbZZzRt2pRTp05x5MgRKlSoQL169fj+++8jxHT16lUWLFjwwrjXrVtHmTJlePbsGfPnz+fcuXNs3ryZBg0aMHDgQJ48efJqDSIiIiIibxwliuKYvYMJY/wEAD4PmU+6+cNxtHNk7fm1zD8538bRichrMwwYNAjs7S3b9vaW7ThOBHt4eJAxY0bGjBkTZZ2HDx/SvHlz3nnnHVxdXSlcuDBLly4NV+ffQ7e++eYbSpcuHeE8RYsWZfjw4dbtn3/+mfz58+Pi4kK+fPmYMWPGC2P99ddfcXZ2pkyZMhH2pU2blowZM4ZbHB0dAUvPqfr164er36NHDypXrhzltQIDA/n666/JkiULSZMmpXTp0uGSNvPmzSNVqlR4eXlRoEABnJ2duXnzJgB16tRh2bJlL7yXl0mWLBmlSpUKd83du3dToUIFypcvH6G8TJkyuLi4WMtcXV3JmDEjWbNmpX379hQpUoRt27ZZ9x86dIiJEycyYcIEevfuTa5cucifPz+jRo2iR48e9OzZk1u3boWLqWvXrgwZMoTAwMBIY/bz86Njx47UqlWLjRs3Ur16dXLkyEH+/Pnp2LEjJ0+eJGXKlK/VLiIiIiLy5lCiKB4U7VyBo+/Uw4FQhl1aQGl/yxeubpu6cf3xddsGJyL/Zxjg5xezxcvL0osobGhPaKhl28sr+ud4haSSvb09o0ePZtq0afz111+R1gkICKBkyZJs3LiRP//8k08//ZTWrVtz5MiRSOu3bNmSI0eOcOXKFWvZmTNnOHXqFC1atABg8eLFDB48mFGjRnHu3DlGjx7NoEGDmD8/6sT3vn374q2XUJcuXTh06BBLly7l1KlTNGnShBo1anDp0iVrHX9/f8aNG8fPP//MmTNncHNzA+D999/nr7/+eu15lKpUqcKuXbus27t27aJy5cpUqlQpXPnu3bupUqVKpOcwDIN9+/Zx/vx5nJycrOVLly4lWbJkfPbZZxGO6dWrF8HBwaxatSpceY8ePQgJCWHatGmRXmvr1q08fPiQvn37RnlPMR3SKCIiIiJvLiWK4sk7C8YSgj31WYfdt2Uoka48T4Oe0mZNG0LNoS8/gYjEPX9/SJYsZst/erxY1a8frp4peXIcU6fGlDx5xHP4+79SuA0aNKBYsWIMGTIk0v3vvPMOvXv3plixYuTIkYOuXbtSo0YNVqxYEWn9ggULUrRoUZYsWWItW7x4MaVLlyZXrlwADBkyhIkTJ9KwYUOyZ89Ow4YN+eqrr/jxxx+jjPPGjRtkzpw50n3lypUjWbJk4ZZXdfPmTebNm8fSpUv54IMPyJkzJ71796ZChQrMnTvXWi84OJgZM2ZQrlw58ubNi6urK4A1xhs3brxyDGBJFF28eJE7d+4AsGfPHipVqkTFihXZs2cPYBkOdvPmzQiJohkzZpAsWTKcnZ2pWLEiZrOZbt26WfdfvHiRnDlzhksehcmcOTMpUqTg4sWL4cpdXV0ZMmQIY8aMiXQIWVj9vHnzWsuOHj0a7jX57/xHIiIiIvL2UqIonmSumo8TJT8BYFxIP1x+nU8yp2Tsu7mPSQcn2Tg6EXlTjRs3zjqnzH+FhoYyYsQIChcuTJo0aUiWLBlbtmyxDrWKTMuWLa2JIsMwWLp0KS1btgQsQ5SuXLlCx44dwyURRo4cGa4X0n89f/483PCqf1u+fDknTpwIt7yq06dPExoaSsGCBUmePLk1vj179oSLz8nJiSJFikQ4PkmSJIClx1FkRo8eHe6+o2rHcuXK4eTkxO7duzl79izPnz+nRIkSvPfee9y/f59r166xe/dukiRJEmE4XsuWLTlx4gQHDhzgo48+YsCAAZQrVy5cnVeZ365jx46kTZuWcePGRat+kSJFrK+Hn58fISEhMb6miIiIiLyZHGwdQGJScMUQ/HMtpIxxmEy//kHhepP58c4nDNw1EM9cnhTJEPGLi4jEI1dXePYsenUNAypVgpMn/z/s7N/s7aFoUdizB0wmDMMgJCQEBweHiMN4/unR8ioqVqyIp6cn/fv3p127duH2TZgwgSlTpjB58mQKFy5M0qRJ6dGjB0FBQVGer3nz5nz99dccP36c58+fc+vWLZo2bQrAs3/aZtasWRHmMrIPm6MpEunSpYty0m13d3drb6X/srOzi5AUCQ4OjvI6z549w97enkOHDuHs7Byunf/dUylJkiSRDqV69OgRAOnTp4/0/J9//jkff/yxdTuqXlKurq68//777Nq1i0ePHlGhQgXs7e2xt7enXLly7Nq1i127dlG+fPkIPYNSpkxpbY8VK1aQK1cuypQpg4eHBwB58uRh//79BAUFRTj277//xtfXlzx58kSIycHBgVGjRtGuXTu6dOkSbl/u3LkBuHDhgjVx5ezsHOXrIiIiIiJvN/UoikdJcmTi1se9ARhDfzYNa03NnHUJCg2i1epWBIZEPtGoiMQTkwmSJo3ecuAAHD8eeZIILOXHj1vqvexcrzn/y9ixY1m/fj0HDx4MV37gwAHq1atHq1atKFq0KDly5IgwLOm/smTJQqVKlVi8eDGLFy+mWrVq1jl8MmTIQObMmbl69Sq5cuUKt2TPnj3KcxYvXpyzZ8/G+L7Sp09vHb4V5kU9jooXL05oaCj379+PEF/GjBlfer0///wTR0dHChYsGOn+NGnShDung0PU/9dSpUoVdu/eze7du8NNvl2xYkV2797Nnj17opyfKEyyZMno3r07vXv3tibMmjVrxrNnzyId6vftt9/i6OhIo0aNIj1fkyZNKFiwIMOGDQtXXr16ddKkSRPt3kYiIiIi8nZToiie5fmpNz5ObuTmMrX/nkX207NI75qe096nGbRrkK3DE5Ho+O+TzqIST09AK1y4MC1btmTq1KnhynPnzs22bdv47bffOHfuHJ999hn37t176flatmzJsmXLWLlypXXYWZhhw4YxZswYpk6dysWLFzl9+jRz585l0qSoh9B6enpy5syZSHsVPXz4kLt374ZbAgICAKhatSrHjh1jwYIFXLp0iSFDhvDnn39GeZ08efLQsmVLOnTowOrVq7l27RpHjhxhzJgxbNy48aX3vW/fPj744APrELTXUaVKFS5dusSWLVuoVKmStbxSpUqsXbuWW7duvTRRBPDZZ59x8eJF6wTVZcuWpXv37vTp04eJEydy5coVzp8/z8CBA5kyZQoTJ07E3d09yvONHTuWOXPm4OfnZy1LliwZP//8Mxs3bqRWrVps2bKFq1evcurUKcaPHw+8uMeYiIiIiLxdlCiKZ6YUyQkZMBSAIQxjyUQXBhefBcC3v33L3ht7bRidiETL1q3hn3QWlbAnoG3dGuchDR8+HLPZHK5s4MCBlChRAk9PTypXrkzGjBkjPG4+Mo0bN+bhw4f4+/tHqP/JJ5/w888/M3fuXAoXLkylSpWYN2/eC3sUFS5cmBIlSkQ6ibaHhweZMmUKt6xduxawJJgGDRpE3759KVWqFE+fPqVNmzYvjH3OnDm0bNmS3r17kzdvXurXr8/Ro0d59913X3rfy5Yto1OnTi+tFx1ly5bF2dkZwzDCPfGtdOnSBAcHkyxZMkqVKvXS86RJk4Y2bdowdOhQ6+s7efJkZsyYwdKlSylUqBDvvfcee/fuZe3atXTt2vWF56tatSpVq1aNMOdQgwYN+O2333B1daVNmzbkzZuXqlWrsnPnTpYtW0bt2rVfoRVERERE5E1kMl5lVsw3iK+vLylTpuTJkyekSJEi1s5rNpvx9vbGzc0NO7sY5tuCg3mQqRDpHl5kBAPZ8+EI3u3akbkn5pA1ZVZOfXGKFM6xF+ub7LXaWaItMbZzQEAA165dI3v27FFOtBwpw4DSpeH33+E/iZlI2dlByZIYhw4REhoa+RxFicDGjRvp06cPf/75Z5y+x144F9QLbNq0iV69enHq1KkXDikTi1dtZ3jx715c/c2WmNNrISIi8maIi7/ZieMbYULj6EjyaWMA6Mkkzu74m0r+k8meKjs3ntyg++buNg5QRKIUFAQ3b0YvSQSWerduWY5LxGrVqsWnn37K7du3bR1KpPz8/Jg7d66SRCIiIiKS6OkTsY04N2vAo2FlSXPhIEMZyoA+PzFn+wJqLK/IvBPzqJunLg3yN7B1mCLyX87OluFk9+9H/xg3N8txifwR4z169LB1CFFq3LixrUMQEREREUkQlCiyFZOJ1D9PgA8q0JHZTL7dgx1zK9C3Wl/GHRjHpxs+pax7WTIme/mTekQknrm7W5aYeLtH+YqIiIiIyFtCQ89syFShPM+qN8AeM2Ppx6RJ0DzjMIpmKMoD/wd84vUJb/kUUiIiIiIiIiKSgChRZGPJpo4h1GRPXdZTNmQvPbs7s7DBIpzsndh4aSM/H//Z1iGKiIiIiIiISCKhRJGt5c2L0dHyOOYJ9GHnToOT2woxuupoAL7a8hVXHl2xZYQiIiIiIiIikkgoUZQAOIwYQohLUkpzhMb8Qs+e0DbvV1TOVhm/YD9ar2lNiDlxT4IrIiIiIiIiInFPiaKEIGNGHPr1AWAM/Xl8P4hv+tsxr948Ujin4OBfBxl/YLyNgxQRERERERGRt50SRQlFr16Eps9ALq7wGT8yaxbc+jMr0z6aBsCQ3UM4fue4jYMUERERERERkbeZEkUJRbJk2A8fCsBghpMcXz7/HJrma02j/I0IMYfQek1rngc/t2mYIhJeQEgAC08upNGKRlSeV5lGKxqx8ORCAkICbB3aG61ixYosWbLE1mFw9uxZsmTJgp+fn61DERERERGJF0oUJSQdO2LkzUt6HtCX8Zw5A5Mmmfih9g9kSJqBs/fP8s2Ob2wdpYj8w+uCF5knZqbN2jasPb+WPTf2sPb8WtqsbUPmiZlZf2F9nF377t27dO3alRw5cuDs7Iy7uzt16tRhx44dcXbN1zFv3jxSpUoVrbpeXl7cu3ePZs2aATB06FBMJlOUy7Bhw6zHPn/+nGHDhpE3b16cnZ1Jly4dTZo04cyZM+GuMXToUIoVKxbh2tevX8dkMnHixAkAChQoQJkyZZg0adIr3beIiIiIyJtGiaKExNER09ixAPRkEpm5zfDh4Hs3HXPqzQFg8uHJ7LiaML8IiiQmXhe8qL+sPo8DHgNgNszhfj4OeEy9ZfXwuuAV69e+fv06JUuWZOfOnUyYMIHTp0+zefNmqlSpQufOnV/5vEFBQZGWBwcHv/I5X8XUqVNp3749dnaWP1G9e/fmzp07EZZ27dqRKlUqWrRoAUBgYCDVqlVj/vz5jBgxgosXL/Lrr78SEhJC6dKlOXTo0CvF0759e2bOnElIiB4qICIiIiJvPyWKEpp69aB8eVx5zjCGEBAAX34JH+WqyWclPwOg3bp21i+nIhL/AkICaLe2HQAGRqR1wsrbrW0X68PQvvzyS0wmE0eOHKFRo0bkyZOHggUL0rNnz3DJkJs3b1KvXj2SJUtGihQp+Pjjj7l37551f1ivmp9//pns2bPj4uICgMlkYubMmdStW5ekSZMyatQoANatW0eJEiVwcXEhR44cDBs2LFzy5PHjx3z22WdkyJABFxcXChUqxIYNG9i9ezft27fnyZMn1l5AQ4cOjfTe7t+/z86dO6lTp461LFmyZGTMmDHcsmPHDhYuXMiyZcvInTs3AJMnT+bgwYOsWbOGjz/+mKxZs/L++++zatUq8ufPT8eOHTGMyF+vF6lWrRqPHj1iz549MT5WRERERORNo0RRQmMywYQJALRnLgU4w5YtsGIFfFv9W3KlycVfvn/R5dcuNg5U5O1jGAZ+QX4vXRadXIRPgE+USSLr+TDwCfBh8anFLzxfTJIXjx49YvPmzXTu3JmkSZNG2B82vMtsNlOvXj1rgmPbtm1cvXqVpk2bhqt/+fJlVq1axerVq63DrcCSRGrQoAGnT5+mQ4cO7Nu3jzZt2tC9e3fOnj3Ljz/+yLx586xJJLPZzEcffcSBAwdYtGgRZ8+eZezYsdjb21OuXDkmT55MihQprL2BevfuHen97d+/H1dXV/Lnzx9lG/z+++906tSJsWPH4unpaS1fsmQJ1apVo2jRouHq29nZ8dVXX3H27FlOnjz5wvaNjJOTE8WKFWPfvn0xPlZERERE5E3jYOsAJBJly0LDhtivXs1Y+lGX9XTvDp6eyVjYYCHl55Rn8enF1M1bl48LfmzraEXeGv7B/iQbkyzWz/vJ+k/4ZP0nUe5/1v8ZSZ0iJn0ic/nyZQzDIF++fC+st2PHDk6fPs21a9dwd3cHYMGCBRQsWJCjR49SqlQpwDLcbMGCBaRPnz7c8S1atKB9+/bW7Q4dOtCvXz/atm0LQI4cORgxYgR9+/ZlyJAhbN++nSNHjnDu3Dny5MljrRMmZcqUmEwmMmbM+MK4b9y4QYYMGazDzv7L29ubBg0a0KhRowjJposXL1K5cuVIjwtLPF28eNE6N9Hp06dJliz86x1V0i5z5szcuHHjhbGLiIiIiLwN1KMooRozBuztqcMGKrGbe/egXz8ok6UM31SwTGj9+YbPue1728aBikh8im7vo3PnzuHu7m5NEoFlYuZUqVJx7tw5a1nWrFkjJIkA3nvvvXDbJ0+eZPjw4SRLlsy6dOrUiTt37uDv78+JEyfIkiWLNUn0qp4/f24dAvdfwcHBNG7cmAwZMjBr1qxI68Skd1bevHk5ceJEuOXXX3+NtG6SJEnw9/eP9rlFRERERN5U6lGUUOXJA599BjNmMNGuD6XMh/nxRzuaN4fBlQaz6fImfr/zOx29OrKp5SZMJpOtIxZ547k6uvKs/7OX1muxugUbLm6wTlz9InYmO2rnqc3iBosJCQnBwcEhwu+rq6NrtGPMnTs3JpOJ8+fPR/uYF4ls+Fpk5c+ePWPYsGE0bNgwQl0XFxeSJEkSK/GkS5cOHx+fSPd169aNS5cucfTo0UiTSXny5ImyXcKSY/9OZDk5OZErV65w9RwcIv+z+OjRI3LmzBmtexAREREReZOpR1FCNngwJEtGSfMxmrASgE8+gZAgRxY2WIiLgwtbrmxh5rGZNg5U5O1gMplI6pT0pUvj/I2jlSQCy1PQmhRo8sLzxSTRmyZNGjw9PZk+fTp+fn4R9j9+/BiwDLW6desWt27dsu47e/Ysjx8/pkCBAtG+XpgSJUpw4cIFcuXKFWGxs7OjSJEi/PXXX1y8eDHS452cnAgNDX3pdYoXL87du3cjJIt++ukn5syZw6pVq8iSJUukxzZr1ozt27dHmIfIbDbz3XffUaBAgQjzF0XXn3/+SfHixV/pWBERERGRN4kSRQlZhgzQpw8Ak5y/wZEgLl+GYcMgf/r8jPMYB0Dvrb258OCCLSMVSVSaFGxCapfUmHhxgseEidQuqWlcoHGsXn/69OmEhoZan+h16dIlzp07x9SpUylbtiwAHh4eFC5cmJYtW3L8+HGOHDlCmzZtqFSpUoRhZdExePBgFixYwLBhwzhz5gznzp1j2bJlDBw4EIBKlSpRsWJFGjVqxLZt27h27RqbNm1i8+bNAGTLlo1nz56xY8cOHjx4EOUwruLFi5MuXToOHDhgLTtw4ABdu3Zl8ODB5MiRg7t374Zbnjx5AsBXX33F+++/T8OGDVm5ciU3b97k6NGjNGrUiHPnzjF79uxX6n15/fp1bt++jYeHR4yPFRERERF50yhRlND17AkZM/JO4FW62P8AwLffwvHj0OX9Lnjk8OB5yHNar2lNcGiwjYMVSRxcHFyYX38+QJTJorDy+fXn4+IQ+Zw7rypHjhwcP36cKlWq0KtXLwoVKkS1atXYsWMHM2daehiaTCbWrVtH6tSpqVixIh4eHuTIkYPly5e/0jU9PT3ZsGEDW7dupVSpUpQpU4bvvvuOrFmzWuusWrWKUqVK0bx5cwoUKEDfvn2tvYjKlSvH559/TtOmTUmfPj3jx4+P9Dr29va0b9+exYsXW8t+/vlngoKCGDhwIJkyZYqwdO/eHbAMgduxYwctW7ZkwIAB5MqVixo1amBvb8+hQ4coU6bMK9370qVLqV69erh7FRERERF5W5mMmMz8aQNPnz5l0KBBrFmzBm9vb4oXL86UKVOsT+x5GV9fX1KmTMmTJ09IkSJFrMVlNpvx9vbGzc0tyqfzxJqffoLPPsPfNS2Z/K/gS0qKFYMjR+De878oPLMwjwMeM7TSUIZUHhK3scSzeG3nRCwxtnNAQADXrl0je/bsUU6e/DJeF7xot7YdPgE+2JnsMBtm68/ULqmZX38+dfLWASyTLEc1R5GEd/fuXQoWLMjx48djnJyJ7XYOCgoid+7cLFmyhPLly7/2+d4Wr9POL/rdi6u/2W+D6dOnM2HCBO7evUvRokWZNm0a77//fpT1J0+ezMyZM7l58ybp0qWjcePGjBkzJtr/3um1EBEReTPExd/sBP+N8JNPPmHbtm0sXLiQ06dPU716dTw8PLh9OxE97atDB8iXD1f/h0xMbxluduKEpWdRlhRZmFFzBgAj9o7gyO0jNgxUJHGpm7cuf/f6m4UNFlI/X30qZ61M/Xz1WdhgIX/3+tuaJJKYyZgxI7Nnz+bmzZu2DoWbN2/yzTffKEkkNrV8+XJ69uzJkCFDOH78OEWLFsXT0xNvb+9I6y9ZsoR+/foxZMgQ67DL5cuX880338Rz5CIiIvImStA9ip4/f07y5MlZt24dtWrVspaXLFmSjz76iJEjR770HG9FjyIALy+oVw+zswvZgi5xy8iCszOcPAl580LzVc1Z9ucy8qTNwx+f/RGjpyglZImxp4stJMZ2jo0eRTGhHkXxQ+0cP9SjKH6VLl2aUqVK8f333wOWf7Pd3d3p2rUr/fr1i1C/S5cunDt3jh07dljLevXqxeHDh9m/f3+0rqnXQkRE5M2Q6HoUhYSEEBoaGuGDZJIkSaL9QeetUacOVKiAXWAAKwtahpcFBkKnTmA2w/Sa08mcPDMXH16k77a+Ng5WREREYkNQUBC///57uMnU7ezs8PDw4ODBg5EeU65cOX7//XeOHLH0Mr569Sq//vorNWvWjPI6gYGB+Pr6hltEREQkcXKwdQAvkjx5csqWLcuIESPInz8/GTJkYOnSpRw8eJBcuXJFekxgYCCBgYHW7bAPOmazGbM5eo+zjg6z2YxhGLF6zpcaNw678uV5/+w8amTpzua/irBvH8ycaeaLL1Ixp+4caiyuwfSj06mZqyY1ctWIv9jiiE3aORFKjO0cds9hS3wIu04C7sj5VlA7x49Xbeew37nI/i4npn+DouvBgweEhoaSIUOGcOUZMmTg/PnzkR7TokULHjx4QIUKFay9vz7//PMXDj0bM2YMw4YNi9XYRURE5M2UoBNFAAsXLqRDhw6888472NvbU6JECZo3b87vv/8eaf2oPujcv3+fgICAWIvLbDbz5MkTDMOIv6E6OXKQqnZtXDZsYE763mT+aysA/fpBmTIPKPpOUToW6sjsP2fTYV0HdjbZSRqXNPETWxyxSTsnQomxnYODgzGbzQQHB+PgEPf/FBqGYX0CmIZExR21c/x4nXYO+917+PAhjo6O4fY9ffo01mJMzHbv3s3o0aOZMWMGpUuX5vLly3Tv3p0RI0YwaNCgSI/p378/PXv2tG77+vri7u4eXyGLiIhIApKg5yj6Nz8/P3x9fcmUKRNNmzbl2bNnbNy4MUK9yHoUubu74+PjE+tzFN2/f5/06dPH7xfrS5cwFSqEKSSEbz/aTp9NHwJQrZrBpk0Gz0P8eW/We1x4eIHG+RuzrNGyN/rLks3aOZFJjO0cGhrKpUuXcHNzI23atPFyzeDg4AhfjCX2qZ3jx6u288OHD/H29iZ37tzY29uH2+fr60vq1Kk1L86/BAUF4erqyi+//EL9+vWt5W3btuXx48esW7cuwjEffPABZcqUYcKECdayRYsW8emnn/Ls2bNo/TuvOYpERETeDHHxNzvB9ygKkzRpUpImTYqPjw9btmxh/PjxkdZzdnbG2dk5QrmdnV2sfwE2mUxxct4XypsXPvsMpk+nx52vmZL5CH/9bce2bSbmzDHRqVMyFjVcRNnZZfnl3C8sO7OMlkVaxl98ccAm7ZwIJbZ2trOzI3Xq1Ny/fx+TyYSrq2ucJlXDhn+Ehoa+0cnbhE7tHD9epZ0Nw8Df35/79++TOnXqSJNMieXfn5hwcnKiZMmS7Nixw5ooMpvN7Nixgy5dukR6jL+/f4S2DEvKvSH/PygiIiI2lOATRVu2bMEwDPLmzcvly5fp06cP+fLlo3379rYOzXYGD4b583E48Tvrv15B8XHNAOjZE6pXh/eyvsegioMYsnsInX/tTMWsFXFPqe7jIv+VMWNGgCgfMR2bwuZksbOzUwIjDqmd48frtHOqVKmsv3sSPT179qRt27a89957vP/++0yePBk/Pz/rZ6E2bdrwzjvvMGbMGADq1KnDpEmTKF68uHXo2aBBg6hTp06EXlwiIiIi/5XgE0VPnjyhf//+/PXXX6RJk4ZGjRoxatSoxD2swM0Nvv4aBg2i2Ipv+Lx9A36Y68yzZ9ChA2zbBt988A0bL23kyO0jtFvXjm2tt2Fn0v/UivybyWQiU6ZMuLm5ERwcHKfXCpuTJW3atOo1EYfUzvHjVdvZ0dFRiYpX0LRpU+7fv8/gwYO5e/cuxYoVY/PmzdYJrm/evBnudRg4cCAmk4mBAwdy+/Zt0qdPT506dRg1apStbkFERETeIG/MHEWvKq7G2JvNZry9vXFzc7PNlxE/P8idG+7cIWDMd+SZ0YNbtyy7ZsyAL76Aiw8vUvzH4vgH+/Od53f0KNMj/uN8TTZv50RC7Rz31MbxQ+0cP+KqnTUvTsKh10JEROTNEBd/s/Up+k2VNCkMHw6Ay4QRLJj62LqrTx+4ehXypM3Dt9W+BaDf9n6cvX/WFpGKiIiIiIiIyBtCiaI3Wbt2kD8/PHpE5cPj+PxzS7Gfn2UImtkMn7/3OTVy1SAwNJBWq1sRFBpk05BFREREREREJOFSouhN5uAA48ZZ1idPZkK3W2TLZtncswemT7fMwTKn7hzSJEnDH3f/YNjuYTYLV0REREREREQSNiWK3nS1a0PFihAQQLLxg5kz5/+7vv4aLl+GTMkz8WPtHwEYe2Asv936zUbBioiIiIiIiEhCpkTRm85kggkTLOvz51Ml7Sm6dLFsPn9uGZ0WGgqNCzSmdZHWmA0zrde05lnQM5uFLCIiIiIiIiIJkxJFb4P334ePPwbDgK+/ZuxYyJHDsuvAAfjuO8v6tI+m4Z7Cnas+V+m1pZft4hURERERERGRBEmJorfFqFGWOYs2bybpoR3Mm2fpbAQwYACcOgUpXVIyv/58AH46/hMbLm6wXbwiIiIiIiIikuAoUfS2yJULvvjCst63Lx+UN9Prn05DQUHQqhUEBECV7FXoWaYnAB29OnLf776NAhYRERERERGRhEaJorfJoEGQPDkcPw7LljFyJBQpYtl1+jQMHGhZH/XhKAqmL4i3nzefbvgUwzBsF7OIiIiIiIiIJBhKFL1N0qeHfv0s6998gzOBLFoETk6WookTYedOcHFwYWGDhTjaObL2/Frmn5xvu5hFREREREREJMFQouht06MHZM4MN27A9OkULgxjxvx/d9u24OMDxTMVZ3iV4QB029SN64+v2yRcEREREREREUk4lCh627i6wnBLAoiRI8HHhx49oGpVS9Fff0Hnzpb1PuX6UN69PE+DntJmTRtCzaE2CVlEREREREREEgYlit5GbdtCwYKWrkNjx2JnB/PmQapUlt1Ll1oWezt7FjRYQDKnZOy7uY9JByfZMmoRERERERERsTElit5GDg4wbpxlfcoUuHkTd3eYMeP/Vb74Am7dghypc/Cd53cADNw1kFP3TtkgYBERERERERFJCJQoelvVrAmVK0NgoOVpaEDz5pYF4MkTS8cjsxk6Fu9InTx1CAoNotXqVgSGBNoubhERERERERGxGSWK3lYmE4wfb1lfuBBOngRg+nTIksVSvGsXTJoEJpOJWXVmkd41Pae9TzNo1yAbBS0iIiIiIiIitqRE0dusVClo2hQMA77+GoDUqWH+/P9X+eYb+P13yJAsA7PqzALg29++Ze+NvbaIWERERERERERsSImit92oUeDoCFu2wLZtgOUJaH36WHYHB0OzZvD0KdTLV48OxTpgYNBmTRt8A31tGLiIiIiIiIiIxDclit52OXPCl19a1vv2tUxKBIwcaelwBHD5MnTpYlmfXGMy2VNl58aTG3Tf3N0GAYuIiIiIiIiIrShRlBgMHAgpUsCJE7BkCQBOTrB0KSRPbqmyYAEsWgTJnZMzv/58TJiYd2Iea86tsV3cIiIiIiIiIhKvlChKDNKlg379LOsDB0JAAGDpbDRz5v+rffGFpXfRB1k/oG/5vgB8uuFT7j67G98Ri4iIiIiIiIgNKFGUWHTvDu+8AzduWB599o+WLaFtW8v6s2fQvDkEBcGwysMokqEID/wf8InXJxiGYaPARURERERERCS+KFGUWLi6wogRlvWRI+HRI+uuadMgd27L+rFjlk5Hzg7OLGqwCCd7JzZe2sjPx3+2QdAiIiIiIiIiEp+UKEpM2rSBQoXg8WMYM8ZanDy5Zb4iR0fL9oQJloekFc5QmNFVRwPw1ZavuPzosg2CFhEREREREZH4okRRYmJvD+PGWdanTrUMQ/tHyZL/3wWWnNK9e/BV2a+olLUSfsF+tFnThhBzSDwHLSIiIiIiIiLxRYmixOajj6BKFctERIMGhdvVvbtlN4C3tyVZhGHH/PrzSe6UnIN/HWT8gfHxH7OIiIiIiIiIxAslihIbkwnG/5PsWbQI/vjDusvODubNg4wZLdtbt8Lo0ZA1VVa+r/k9AEN2D+H4nePxHLSIiIiIiIiIxAclihKj996zPN7MMODrr8PtcnODJUssSSOAIUNg505oXaQ1DfM3JMQcQus1rXke/NwGgYuIiIiIiIhIXFKiKLEaNcoye/W2bZauQ/9SpQoMG2ZZN5uhRQu4e9fEj7V/JEPSDJy9f5Zvdnxjg6BFREREREREJC4pUZRYZc8OnTtb1vv2tWSE/uWbb8DT07J+756lA1Iqp3TMrjsbgMmHJ7Pj6o74jFhERERERERE4pgSRYnZwIGQMiWcPAmLF4fbZWcHCxfCO+9YtvfsgaFDoVaeWnxW8jMA2q1rx+OAx/Ebs4iIiIiIiIjEGSWKErO0aaF/f8v6gAEQEBBud/r0sHw52NtbtkeNgk2b4Nvq35IrTS7+8v2LLr92ieegRURERERERCSuKFGU2HXrBlmywK1bMG1ahN3ly8PYsf/fbt0afO4lY2GDhdiZ7Fh8ejErzqyIx4BFREREREREJK4oUZTYJUkCI0ZY1kePhkePIlTp1Qvq1LGsP3wITZtCyQxl+KaCZULrzzd8zm3f2/EVsYiIiIiIiIjEESWKxNJNqHBhePzYkiz6D5MJ5s+HbNks2wcPQr9+MLjSYEpmKolPgA8dvDpgGEa8hi0iIiIiIiIisUuJIrFMQjR+vGV92jS4fj1CldSpYcUKcHS0bE+aBGtXO7KwwUJcHFzYemUrM47OiL+YRURERERERCTWKVEkFp6e8OGHEBRkeRpaJEqVgu+++/92+/Zg3M/POI9xAPTZ1ocLDy7ER7QiIiIiIiIiEgeUKBILk+n/vYoWL4bjxyOt9uWXlpFqAH5+0KABtM7bhQ+zf8jzkOe0XtOa4NDgeApaRERERERERGKTEkXyfyVKQIsWlvW+fSGSOYdMJvjhByhWzLJ98SK0b2fHnLrzSOWSiqN/H2X0vojzHImIiIiIiIhIwqdEkYQ3ciQ4OcGOHbB1a6RVXF1h9WrLvEUA69bBwulZmFHTMkfRiL0jOHL7SHxFLCIiIiIiIiKxRIkiCS97dujSxbLety+EhkZZbelSSw8jgEGDIPXt5jQt2JRQI5TWa1rjH+wfT0GLiIiIiIiISGxQokgiGjAAUqWCU6dg0aIoq3l6wogRlnXDsIxa61twBpmTZ+biw4v03dY3fuIVERERERERkVihRJFElCYN9O9vWR84EJ4/j7Jq//5Qr55l3ccHOjRPww815gEw/eh0Nl/eHMfBioiIiIiIiEhsUaJIIte1K7i7w19/wbRpUVazs4P58yFPHsv2yZOwfHQ1OpeyDF/rsK4DD/0fxkfEIiIiIiIiIvKaEnSiKDQ0lEGDBpE9e3aSJElCzpw5GTFiBEYkT+OSWJYkiWVia4DRo+Fh1MmelClhzRpImtSyvXgxZDk3jrxp83Ln2R2+2PiFXjMRERERERGRN0CCThSNGzeOmTNn8v3333Pu3DnGjRvH+PHjmfaCHi4Si1q2hKJF4ckTGDXqhVULFIB58/6/PaCvK19kXIi9yZ6VZ1ey5PSSuI1VRERERERERF5bgk4U/fbbb9SrV49atWqRLVs2GjduTPXq1TlyRI9ejxf29jB+vGX9++/h2rUXVm/c2PL0MwCzGYZ8UoovCw4GoPOvnbn15FZcRisiIiIiIiIirylBJ4rKlSvHjh07uHjxIgAnT55k//79fPTRRzaOLBGpXh08PCA42PI0tJcYOhTq17esP3kCmwd8Q8kM7/Mk8Ant1rXDbJjjNFwREREREREReXUOtg7gRfr164evry/58uXD3t6e0NBQRo0aRcuWLaM8JjAwkMDAQOu2r68vAGazGbM59pIUZrMZwzBi9ZwJ1tix2L33Hixdivmrr6BkyRdWnz8fKlQwcfq0iUsXHCi/aQFJ3i/Ozms7mXJoCt1Ld4/2pRNVO9uQ2jnuqY3jh9o5fsRVO+t1ExEREbG9BJ0oWrFiBYsXL2bJkiUULFiQEydO0KNHDzJnzkzbtm0jPWbMmDEMGzYsQvn9+/cJCAiItdjMZjNPnjzBMAzs7BJ0x6zX9847pGzUiCSrVhH81Vf4rFwJJtMLD/n5Z3tq1EiLj48dB7zyUiHnaPan/Ir+O/pTIlUJ8qbOG61LJ6p2tiG1c9xTG8cPtXP8iKt2fvr0aaydS0RERERejclIwI+jcnd3p1+/fnTu3NlaNnLkSBYtWsT58+cjPSayHkXu7u74+PiQIkWKWIvNbDZz//590qdPnzi+jNy4gSlfPkxBQZg3bIBoDP/bsweqVzcREmICDAqN/Yg/A7ZQPGNxfuvwG072Ti89R6JrZxtRO8c9tXH8UDvHj7hqZ19fX1KnTs2TJ09i9W+2xJyvry8pU6bUayEiIpLAxcXf7ATdo8jf3z/CB1B7e/sXdk13dnbG2dk5QrmdnV2sf2kwmUxxct4EKXt26NoVJk7Erl8/qFHDMtn1C1SpYpkD+/PPAUxcGDeXFL0L8cfdPxixdwSjPnzxk9TCJKp2tiG1c9xTG8cPtXP8iIt21msmIiIiYnsJ+hNZnTp1GDVqFBs3buT69eusWbOGSZMm0aBBA1uHljh98w2kSgV//gkLF0brkM8+gy+/tKwH+2TCtOFHAMYeGMtvt36Lo0BFRERERERE5FUk6ETRtGnTaNy4MV9++SX58+end+/efPbZZ4wYMcLWoSVOadL8/8lnAwfC8+fROmzyZKhc2bL+5GBjUt1shdkw03pNa54FPYuTUEVEREREREQk5hJ0oih58uRMnjyZGzdu8Pz5c65cucLIkSNxcnr53DYSR7p0gXffhdu3YcqUaB3i6AgrV0KOHJbtx0um4RLozlWfq/Tc0jMOgxURERERERGRmEjQiSJJgFxcYNQ/cwuNGQMPHkTrsHTpYONGy8g1AlIRsHQ+ALOOz2LDxQ1xE6uIiIiIiIiIxIgSRRJzLVpAsWLg6wsjR0b7sHz5YPVqcHAArleBg18B0NGrI/f97sdNrCIiIiIiIiISbUoUSczZ2cH48Zb1GTPg6tVoH1qlCsya9c/GjtHgXQBvP28+3fAphmHEfqwiIiIiIiIiEm1KFMmrqVYNqleH4OD/T3AdTe3aWR6gRogLrF4EoY6sPb+W+Sfnx0moIiIiIiIiIhI9ShTJqxs3DkwmWLYMjh6N0aEjRkCTJsDd4rBrGABdf+3G9cfXYz9OERGRN9z06dPJli0bLi4ulC5dmiNHjryw/uPHj+ncuTOZMmXC2dmZPHny8Ouvv8ZTtCIiIvImU6JIXl2xYtCqlWW9b1+IwdAxOzuYPx/KlAEO9IWb5XgW/JSWv7Qh1BwaJ+GKiIi8iZYvX07Pnj0ZMmQIx48fp2jRonh6euLt7R1p/aCgIKpVq8b169f55ZdfuHDhArNmzeKdd96J58hFRETkTaREkbyeESPA2Rl274ZNm2J0aJIksG4dZMtqD2sWQFBSfru9j28PTIqbWEVERN5AkyZNolOnTrRv354CBQrwww8/4Orqypw5cyKtP2fOHB49esTatWspX7482bJlo1KlShQtWjSeIxcREZE3kRJF8nqyZoVu3SzrfftCaMx6A7m5wcaNkNKcEzZPBuCbHQM58tdRFp5cSOOVjWno1ZDGKxuz8ORCAkICYvkGREREEq6goCB+//13PDw8rGV2dnZ4eHhw8ODBSI/x8vKibNmydO7cmQwZMlCoUCFGjx5N6Av+RgcGBuLr6xtuERERkcRJiSJ5ff37Q+rUcOaMZTxZDBUoAGvWgOOfHeFCHcymIMrMLkObtW1Yd2EdB+8cZN2FdbRZ24bMEzOz/sL6OLgJERGRhOfBgweEhoaSIUOGcOUZMmTg7t27kR5z9epVfvnlF0JDQ/n1118ZNGgQEydOZOTIkVFeZ8yYMaRMmdK6uLu7x+p9iIiIyJtDiSJ5falTw8CBlvVBg8DfP8anqFIFFi4wwZnGYICBGQCzEf7n44DH1FtWD68LXrETu4iIyFvGbDbj5ubGTz/9RMmSJWnatCkDBgzghx9+iPKY/v378+TJE+ty69ateIxYREREEhIliiR2dO5sGYb2998wefIrnaJeowCSNOwBmKKsY2CZMLvd2nYahiYiIm+9dOnSYW9vz71798KV37t3j4wZM0Z6TKZMmciTJw/29vbWsvz583P37l2CgoIiPcbZ2ZkUKVKEW0RERCRxUqJIYoezM4waZVkfOxbu34/xKVaeWclzfMD04qenGRj4BPjwy9lfXiVSERGRN4aTkxMlS5Zkx44d1jKz2cyOHTsoW7ZspMeUL1+ey5cvYzabrWUXL14kU6ZMODk5xXnMIiIi8maLUaLIbDaza9cuhg8fTseOHWnevDndunVj7ty56qIs0Lw5FC8OT5/CC+ZBiMraC2uxM0XvLWlnsmPN+TUxvoaIiMibpmfPnsyaNYv58+dz7tw5vvjiC/z8/Gjfvj0Abdq0oX///tb6X3zxBY8ePaJ79+5cvHiRjRs3Mnr0aDp37myrWxAREZE3SLS+lT9//pyRI0fi7u5OzZo12bRpE48fP8be3p7Lly8zZMgQsmfPTs2aNTl06FBcxywJlZ0dTJhgWZ8xAy5fjtHhD/0fWuciehmzYeaR/6OYRigiIvLGadq0Kd9++y2DBw+mWLFinDhxgs2bN1snuL558yZ37tyx1nd3d2fLli0cPXqUIkWK0K1bN7p3706/fv1sdQsiIiLyBnGITqU8efJQtmxZZs2aRbVq1XB0dIxQ58aNGyxZsoRmzZoxYMAAOnXqFOvByhvgww/B0xO2bIEBA2D58mgfmtY1LXYmu2gli+xMdqRxTfM6kYqIiLwxunTpQpcuXSLdt3v37ghlZcuW1X/eiYiIyCuJVo+irVu3smLFCmrWrBlpkggga9as9O/fn0uXLlG1atVYDVLeMOPGgckEK1bAkSPRPqx+3vox6lHUIF+DV41QRERERERERCIRrURR/vz5res3b97EMCJONmwYBjdv3sTR0ZGcOXPGXoTy5ilaFNq0saz37QuRvF8i06RgE1K7pMb0gqeeAWBAcqfkNC7Q+DUDFREREREREZF/i/FTz7Jnz879SJ5o9ejRI7Jnzx4rQclbYMQIy5PQ9uyBjRujdYiLgwvz688HiDpZZAAm8AsIYvuVnbEUrIiIiIiIiIjAKySKDMPAZIr4Jf7Zs2e4uLjESlDyFnB3hx49LOtffw0hIdE6rE7eOqxttpZULqkArE9Bsz4NLTAV3CmK2S6Qukvr8MOxH2M3bhEREREREZFELFqTWYPl0awAJpOJQYMG4erqat0XGhrK4cOHKVasWKwHKG+wfv1g1iw4exbmzYNPPonWYXXz1uXvXn/zy9lfWH1uNXef3CVjyow0zN8Q+wuNadnCHqPWpxjF5/HFxs+58fg6oz4c9f9kkoiIiIiIiIi8kmgniv744w/A0qPo9OnTODk5Wfc5OTlRtGhRevfuHfsRypsrVSoYOBB69oQhQ6B5c0iaNFqHuji40KpIK1oUaoG3tzdubm7Y2dlBEfB7Ap06zYHH2aHKEMYeGMv1J9eZV28ezg7OcXtPIiIiIiIiIm+xaCeKdu3aBUD79u2ZMmUKKVKkiLOg5C3y5ZcwdSpcvw6TJ8OAAa99yk8+gUePTHz99WB4nBXqfsKyP5fx99O/Wdt0LamTpH7ta4iIiIiIiIgkRjEeqzN37lwliST6nJ1h9GjL+rhx4O0dK6ft29eycLItLN4EASnYe2Mv5eeU5/rj67FyDREREREREZHEJlqJos8//5y//vorWidcvnw5ixcvfq2g5C3TtCmULAlPn1qehhZLxo6FTp2Aqx4wZz/4vsO5B+co83MZjv19LNauIyIiIiIiIpJYRCtRlD59egoWLEjNmjWZOXMmR48e5fbt2zx8+JDLly/j5eVF3759effdd/nuu+8oXLhwXMctbxI7Oxg/3rL+ww9w6VKsnNZkgpkzoUULwLsw/HwIk3cR7vndo9K8Smy4uCFWriMiIiIiIiKSWEQrUTRixAguXrxI+fLlmTFjBmXKlOHdd9/Fzc2NvHnz0qZNG65evcpPP/3EoUOHKFKkSFzHLW+aqlXho48gJCRW5ikKY28P8+dDo0aAbxaM2fuwv1Yd/2B/6i2rx8yjM2PtWiIiIiIiIiJvu2jPUZQhQwYGDBjA6dOnefDgAcePH+fAgQNcuHABHx8ffvnlF2rUqBGXscqbbtw4SzeglSvh8OFYO62DAyxZArVrA4EpCF24Acc/O2A2zHz565d8ve1rzIY51q4nIiIiIiIi8raK8WTWAKlTp6Zo0aKUKVOGXLlyYTKZYjsueRsVLgzt2lnW+/QBw4i1Uzs5WfJP1aoBZkeCf/kZl4PDARj/23harGpBQEhArF1PRERERERE5G0U40TR0KFDMZsj9s548uQJzZs3j5Wg5C02fDi4uMC+fbB+faye2sUF1q6FihUBTARsGUTy7fNxMDmw/Mxyqi+szqPnj2L1miIiIiIiIiJvkxgnimbPnk2FChW4evWqtWz37t0ULlyYK1euxGpw8hbKkgV69LCs9+tnmbMoFrm6woYNULq0Zfvp/jakWL+ZZI4p2HdzH+Vml+Oaz7VYvaaIiIiIiIjI2yLGiaJTp06RJUsWihUrxqxZs+jTpw/Vq1endevW/Pbbb3ERo7xt+vWDtGnh3DmYOzfWT588OWzeDMWLW7YfHfsQ16UHyOiahQsPL1BmdhmO3j4a69cVERERERERedPFOFGUOnVqVqxYQZcuXfjss8+YMmUKmzZtYtSoUTg4OMRFjPK2SZkSBg2yrA8eDH5+sX6JVKlg61YoVMiy7f1nIYyfDpEvVVG8/bypPL8yXhe8Yv26IiIiIiIiIm+yV5rMetq0aUyZMoXmzZuTI0cOunXrxsmTJ2M7NnmbffEF5MgBd+/CpElxcol06WDnTssc2gD3Lr+Dz6R9VMjoiX+wPw2WN2D6kelxcm0RERERERGRN1GME0U1atRg2LBhzJ8/n8WLF/PHH39QsWJFypQpw/jx4+MiRnkbOTnBqFGW9fHjwds7Ti6TPr0lWVSkiGX73s3kXB6xniY5PsFsmOmyqQt9tvbBbEScoF1EREREREQksYlxoig0NJRTp07RuHFjAJIkScLMmTP55Zdf+O6772I9QHmLffwxvPcePHtmeRpaHAnrWVS0qGX77m1H9vb9ie6FRwLw7cFvab6qOQEhAXEWg4iIiIiIiMibIMaJom3btpE5c+YI5bVq1eL06dOxEpQkEnZ2lt5EAD/+CBcvxtml0qaFHTugWDHL9r27JpZ9OYCxpRfiaOfIijMrqLawGg/9H8ZZDCIiIiIiIiIJ3SvNURSVdOnSxebpJDGoUgVq1YKQEPjmmzi9VFiyKOxpaPfuwaS2rfjxgy2kdE7J/pv7KTenHFd9rsZpHCIiIiIiIiIJVawmikReydixlt5Fq1bBwYNxeqk0aWD7dihRwrLt7Q39mlZhToUDuKdw5+LDi5T5uQxHbh+J0zhEREREREREEiIlisT2ChWCdu0s6336gGHE6eXCkkUlS1q2vb3hk7oFmVnyEMUzFue+/30qz6vMuvPr4jQOERERERERkYRGiSJJGIYNgyRJ4MAB8PKK88ulTm1JFpUpY9n28YFmtTIzOvceauSqwfOQ5zRY3oDvj3wf57GIiIiIiIiIJBSvnCgKCgriwoULhISExGY8klhlyQJffWVZ//pry5xFcSxVKti6FSpXtmw/ewYNayena9r1dCrRCQODrpu60ntrb8yGOc7jEREREREREbG1GCeK/P396dixI66urhQsWJCbN28C0LVrV8aOHRvrAUoi0rev5Vn2Fy7A7NnxcsnkyeHXX6FmTcv28+dQv64DNYJ/ZHTV0QBMPDiRZr80IyAkIF5iEhEREREREbGVGCeK+vfvz8mTJ9m9ezcuLi7Wcg8PD5YvXx6rwUkikzIlDBpkWR8yxNLFZ/t20lWsaBknFkeSJIE1a6BxY8t2cDB8/LEJ9xv9WdRgEY52jqw8uxKPBR489H8YZ3GIiIiIiIiI2FqME0Vr167l+++/p0KFCphMJmt5wYIFuXLlSqwGJ4nQ559DjhyWZ9dPnIhpwAAcLl3CNGBAnE5y7eQES5dC27aW7dBQaNMG/A61ZEurLaR0TsmBWwcoO7ssVx7pfS4iIiIiIiJvpxgniu7fv4+bm1uEcj8/v3CJo9iSLVs2TCZThKVz586xfi1JAJycYMwYy/qYMZiOHQOw/Ny6NU4v7eAAc+bAl19atg0DPvsMfl9VhQMdDvBuyne59OgSZWeX5fBfh+M0FhERERERERFbiHGi6L333mPjxo3W7bDk0M8//0zZsmVjL7J/HD16lDt37liXbdu2AdCkSZNYv5YkEE2awHvvQWAgxj/vL8Pe3jIsLQ57FQHY2cH331umSwrTpw8smFiQgx0OUSJTCe7736fK/CqsPb82TmMRERERERERiW8OMT1g9OjRfPTRR5w9e5aQkBCmTJnC2bNn+e2339izZ0+sB5g+ffpw22PHjiVnzpxUqlQp1q8lCYTJBB9/DMeOYfonMWQKDYWjRy29ijw94/zyY8daJroOmzJp/Hi4fz8TO77fQ8u1Tfn10q80XN6QyTUm0610tziNR0RERERERCS+xDhRVKFCBU6cOMHYsWMpXLgwW7dupUSJEhw8eJDChQvHRYxWQUFBLFq0iJ49e0Y5zC0wMJDAwEDrtq+vLwBmsxmzOfYecW42mzEMI1bPKf8wDEwrVgDw71fZsLeHgQMxPDws2Zw49s03kCYNdOliwjBMzJ0L9+8nZfHiNXy9pys/Hf+J7pu7c83nGhOqTcDOFOMOegmG3s9xT20cP9TO8SOu2lmvm4iIiIjtxThRBJAzZ05mzZoV27G81Nq1a3n8+DHt2rWLss6YMWMYNmxYhPL79+8TEBB7jzc3m808efIEwzCws3tzEwQJkdOuXaT5Z26ifzOFhsKxY/isWEFQlSrxEkvDhuDk5EznzqkICjKxYYMJz2owb94w0jumZ9ThUUw+PJlL3peYVnUaSRySxEtcsU3v57inNo4fauf4EVft/PTp01g7l4iIiIi8GpNhxGzSF3t7e+7cuRNhQuuHDx/i5uZGaGhorAb4b56enjg5ObF+/foo60TWo8jd3R0fHx9SpEgRa7GYzWbu379P+vTp9WUkNhkGpjJl4I8/LImh/+62t4fixTEOHYqXXkVhdu6Ehg1NPH1quWahQgabNhnsebSUDl4dCAoNomyWsqxtupZ0runiLa7Yovdz3FMbxw+1c/yIq3b29fUlderUPHnyJFb/ZkvM+fr6kjJlSr0WIiIiCVxc/M2OcY+iqPJKgYGBODk5vXZAUblx4wbbt29n9erVL6zn7OyMs7NzhHI7O7tY/9JgMpni5LyJ2pYtEElvojBhvYpM27fH+VxF/+bhAbt3w0cfgbc3/PmniQoVTGzd2pKtrbJQf3l9Dv51kPJzy7Op5SZypckVb7HFFr2f457aOH6oneNHXLSzXjMRERER24t2omjq1KmA5YPhzz//TLJkyaz7QkND2bt3L/ny5Yv9CP8xd+5c3NzcqFWrVpxdQ2zMMCyzR9vbw8t6pn39NVSvHq+9ikqUgAMHLJe9dg1u3IDy5WHjxkr81uE3Plr8EZcfXabs7LJ4NfOirHvsPwVQREREREREJC5FO1H03XffAZYeRT/88AP29vbWfU5OTmTLlo0ffvgh9iPE0sV97ty5tG3bFgeHV5pWSd4EW7danmwWHSdPwrx50L59nIb0X7lyWZJFNWrAqVPw4AFUrgzLluXn0CeHqL2kNr/f+Z2qC6qyuOFiGuZvGK/xiYiIiIiIiLyOaPfxvnbtGteuXaNSpUqcPHnSun3t2jUuXLjAli1bKF26dJwEuX37dm7evEmHDh3i5PySAIT1JorJsINPP4U//oi7mKKQKRPs2QOVKlm2nz+HBg1g1fyM7G63m9p5ahMQEkDjFY2ZcmhKvMcnIiIiIiIi8qpiPBnArl27SJ06dVzEEqXq1atjGAZ58uSJ1+tKPAoKgps3ISaPRg4JsWRr9uyJu7iikCqVZTql5s0t22YzdOkCQ79Jxqoma/jivS8wMOixpQdfbf6KUHPcTfIuIiIiIiIiElteaRzXX3/9hZeXFzdv3iQoKCjcvkmTJsVKYJLIODtbhp3dvx+u2Gw28+jRI9KkSRN+ktOnT6FvXzhyxDKp9fLlUK9evIe8aBFkywZjxljKJk6EmzcdmD9/OtlSZePr7V8z+fBkbvreZFGDRSRxTBKvMYqIiIiIiIjERIwTRTt27KBu3brkyJGD8+fPU6hQIa5fv45hGJQoUSIuYpTEwt3dsvyb2UyItze4uUUclrZ7NzRrBl5e0LAhzJoF8Tw80c4ORo+GrFnhyy8tPYtWroS//zaxbl1f3k35Lm3XtmX1udVUfVoVr2ZepE+aPl5jFBEREREREYmuGA8969+/P7179+b06dO4uLiwatUqbt26RaVKlWjSpElcxCgSuSRJYNUqy4TWZjN07Ajjx1vmO4pnn30G69dD0qSW7QMHoFw5KJWkGdtabyO1S2oO/XWIsrPLcunhpXiPT0RERERERCQ6YpwoOnfuHG3atAHAwcGB58+fkyxZMoYPH864ceNiPUCRF3JwgNmzLcPQAL7+Gvr0idlcR7GkZk3YuxcyZrRsX7wIZcqA3a2K/NbxN7KlysYVnyuUnV2W3279Fu/xiYiIiIiIiLxMjBNFSZMmtc5LlClTJq5cuWLd9+DBg9iLTCS6TCYYNw4mTLBsT5xoGYIWHBzvoZQoAYcOQf78lu0HD+DDD+Hwxnwc6niI9zK/x8PnD6k6vyqrzq6K9/hEREREREREXiTGiaIyZcqwf/9+AGrWrEmvXr0YNWoUHTp0oEyZMrEeoEi09e4N8+aBvT3Mn2+Zt8jfP97DyJrVMvTMw8OyHRQE7drBdyMzsLP1burkqUNgaCBNVjbhu4PfxXt8IiIiIiIiIlGJcaJo0qRJlC5dGoBhw4bx4Ycfsnz5crJly8bs2bNjPUCRGGnbFtasARcX2LABqlcHH594DyN1avj1V/jii/+XjRsHrZslZWGtNXz53pcYGPTc2pPum7oTag6N9xhFRERERERE/ivGTz3LkSOHdT1p0qT88MMPsRqQyGurUwe2bYPatS1deypVgs2bIXPmeA3D0RFmzIACBaB7d8u0SevWQaWK9qxb9z3ZUmWj7/a+TD0ylZu+N1nccDGujq7xGqOIiIiIiIjIv8W4R1GOHDl4+PBhhPLHjx+HSyKJ2FSFCv+fWfr0aShfHi7Z5mljXbrApk2QMqVl++RJKF3axAf2fVjWaBlO9k6sPb+WqvOr4u3nbZMYRUREREREROAVEkXXr18nNDTiMJnAwEBu374dK0GJxIoiReC33yBXLrh+3ZI8On7cJqFUrw4HD0LOnJbte/egcmUIPdWU7a23k9olNYdvH6bs7LJcfHjRJjGKiIiIiIiIRHvomZeXl3V9y5YtpAzrHgGEhoayY8cOsmXLFqvBiby27Nlh/3746CP44w9LdmbdOqhSJd5DyZ8fDh+GRo1gzx4IDISWLaFPnw/Y3/sgtZd+xFWfq5SdXRavZl6Uf7d8vMcoIiIiIiIiiVu0E0X169cHwGQy0bZt23D7HB0dyZYtGxMnTozV4ERiRYYMsGsX1K8Pu3dDjRqwdKnlqWjxLG1a2LrVMhxt1ixL2YQJcOJEXjbOPkjbzXU4+vdRPlzwIYsaLqJxgcbxHqOIiIiIiIgkXtEeemY2mzGbzbz77rt4e3tbt81mM4GBgVy4cIHatWvHZawiry5lSstEQQ0aWJ5X36TJ/zM18czJCX78Eb7/Hhz+SdVu2wa1KmVgaold1M1bl8DQQD5e+TGTDk7CMAybxCkiIiIiIiKJT4znKLp27Rrp0qWLi1hE4paLC6xYAZ98YnkE2aefwujRYINEjMkEnTvDjh3g5mYpu3YNqn6QlKbGajqX6oyBQa+tvei2qRuh5ojzgomIiIiIiIjEtmgnig4ePMiGDRvClS1YsIDs2bPj5ubGp59+SmBgYKwHKBKrHBzgp5+gf3/L9oAB0LOnJXFkAxUrwrFj8N57lu3nz6FlC3uS7J7G+A+/BeD7o9/TaEUj/IP9bRKjiIiIiIiIJB7RThQNHz6cM2fOWLdPnz5Nx44d8fDwoF+/fqxfv54xY8bESZAiscpksvQkmjTJsj15MrRtC8HBNgnH3R327YN27f5f9u0EE1uH9WJ2jRU42zuz7sI6qsyvgreft01iFBERERERkcQh2omiEydO8OGHH1q3ly1bRunSpZk1axY9e/Zk6tSprFixIk6CFIkTX30FCxdaehktWgT16oGfn01CcXGBOXNg2rT/z1u0fTuMaNaE6aV3kCZJGo7cPkKZn8tw4cEFm8QoIiIiIiIib79oJ4p8fHzIkCGDdXvPnj189NFH1u1SpUpx69at2I1OJK61agXr1kGSJJbJrqtVg0ePbBKKyWR5GtqOHZA+vaXs+nX4snZ5uic9SI7UObj2+Brl5pRj/839NolRRERERERE3m7RThRlyJCBa9euARAUFMTx48cpU6aMdf/Tp09xdHSM/QhF4lrNmpbuO6lSwcGDlomDbt+2WTgVK8Lvv8P771u2g4JgSJc8lDh+kFKZSvPo+SM8Fniw8sxKm8UoIiIiIiIib6doJ4pq1qxJv3792LdvH/3798fV1ZUPPvjAuv/UqVPkzJkzToIUiXPlylkmCsqcGc6csWxfsN0Qr7B5i7p2/X/ZL/PdeDZ9Jx++U4/A0EA+/uVjvv3tWwwbPLVNRETi1/Tp08mWLRsuLi6ULl2aI0eOROu4ZcuWYTKZqF+/ftwGKCIiIm+NaCeKRowYgYODA5UqVWLWrFnMmjULJycn6/45c+ZQvXr1OAlSJF4UKgQHDkDu3HDzJlSoYHkkmY04OcHUqbBsGSRLZik7d8qVg1+twjOVJYPUZ1sfum7qSqg51GZxiohI3Fq+fDk9e/ZkyJAhHD9+nKJFi+Lp6Ym394sfcHD9+nV69+4d7j/2RERERF4m2omidOnSsXfvXnx8fPDx8aFBgwbh9q9cuZIhQ4bEeoAi8SpbNti/H0qUgAcPoEoVy6RBNtS0qSVfVbCgZdv/mT1bekyh/LNJmDAx/eh0GixvgF+QbSbiFhGRuDVp0iQ6depE+/btKVCgAD/88AOurq7MmTMnymNCQ0Np2bIlw4YNI0eOHPEYrYiIiLzpop0oCpMyZUrs7e0jlKdJkyZcDyORN5abG+zaBVWrwrNnljmMfvnFpiHlzQuHD0Pr1mElJg58+xU5fl+Bk50z6y+up/L8ytx7ds+GUYqISGwLCgri999/x8PDw1pmZ2eHh4cHBw8ejPK44cOH4+bmRseOHaN1ncDAQHx9fcMtIiIikjjFOFEkkiikSAG//gqNG1tmk/74Y/jhB5uGlDQpzJ8PP/0Ezs6WsivrG+OweCfJ7dNy7O9jlJldhvMPzts0ThERiT0PHjwgNDQ03JNnwfKQkbt370Z6zP79+5k9ezazZs2K9nXGjBlDypQprYu7u/trxS0iIiJvLiWKRKLi7GyZIOizz8Aw4IsvYMQIy7qNmEzQqZPl4Wxhc8f7XyjH0+8Okjw4J9cfX6fc7HLsu7HPZjGKiIjtPH36lNatWzNr1izSpUsX7eP69+/PkydPrMutW7fiMEoRERFJyJQoEnkRe3uYORMGDbJsDx4M3buD2WzTsIoXh+PHoUWLfwoe5ebpdwdxeVAanwAfPBZ6sPzP5TaNUUREXl+6dOmwt7fn3r3wQ4vv3btHxowZI9S/cuUK169fp06dOjg4OODg4MCCBQvw8vLCwcGBK1euRHodZ2dnUqRIEW4RERGRxEmJIpGXMZlg+HCYMsWyPW0atGplGZJmQylSwKJFMHcuuLoC/ukJ+HEndhcaEBQaRLNVzRh/YDyGDXtAiYjI63FycqJkyZLs+NeDFcxmMzt27KBs2bIR6ufLl4/Tp09z4sQJ61K3bl2qVKnCiRMnNKRMREREXkqJIpHo6tYNFi8GBwdYuhTq1gU/2z5pzGSCdu0svYuKFQOCXTEvWwmHugPw9fav6fxrZ0LMIbYMU0REXkPPnj2ZNWsW8+fP59y5c3zxxRf4+fnRvn17ANq0aUP//v0BcHFxoVChQuGWVKlSkTx5cgoVKqQHj4iIiMhLKVEkEhMtWsD69ZYuPFu2wIcfwsOHto6KvHnh0CFLLgvDHjZPhs3fgWFi5rGZNFjeAL8g2ya1RETk1TRt2pRvv/2WwYMHU6xYMU6cOMHmzZutE1zfvHmTO3fu2DhKEREReVuYjLd8XIqvry8pU6bkyZMnsTre3mw24+3tjZubG3Z2yrfFlQTbzocOQa1a8OgR5M9vSRolkO7869dD+/b/5K/yr4aGLcExgBIZS7Kx5QYyJos4p0WCbee3iNo4fqid40dctXNc/c2WmNNrISIi8maIi7/Z+hQt8irKlIF9+yBLFjh3DsqXh/MJ47H0derAyZNQpQpwriHM3wl+6Th+93dKzizDufvnbB2iiIiIiIiIJFBKFIm8qgIF4MABy7ivW7egQgU4csTWUQHwzjuwfTt8+y04eZeF2QfhYS7+9r9ByRnl2H1tLwABIQEsPLmQxisb09CrIY1XNmbhyYUEhATY+A5ERERERETEFpQoEnkd774L+/dDqVKWsV5Vq8K2bbaOCgA7O+jVC44dg8Lv5LIki26V5TmPqTqvGp/+0pvMEzPTZm0b1l1Yx8E7B1l3YR1t1rYh88TMrL+w3ta3ICIiIiIiIvFMiSKR15UuHezYAR4elqeg1aoFy5fbOiqrwoXh6FHo/WU6WLADzjbEsAti1pmJ+AT4AGA2zOF+Pg54TL1l9fC64GWzuEVERERERCT+KVEkEhuSJ4cNG+DjjyE4GJo3hxkzbB2VlbMzTJgAu7Ym4Z2jCyDE+YX1DSxz3Ldb207D0ERERERERBIRJYpEYouzMyxZAl9+CYYBnTvD0KGW9QSicmUYuHg1OAS+tK6BgU+AD7+c/SXuAxMREREREZEEQYkikdhkbw/ff29JEAEMGwZdukBoqE3D+rdtt9ZiZ4rer76dyY4159fEcUQiIiIiIiKSUChRJBLbTCYYMsSSMDKZLEPQWrSAwJf34okPD/0fWuciehmzYeaR/6M4jkhEREREREQSCiWKROJK586wdCk4OsKKFVC7Njx7ZuuoSOuaNto9ikyYSJMkTRxHJCIiIiIiIgmFEkUicalpU8sk10mTwvbtULUqPHhg05Dq560f7R5FBgaHrp7l4K1DcRyViIiIiIiIJARKFInEterVYedOSJvW8pz6ChXg5k2bhdOkYBNSu6TGhOnFFQ3L8nfQecrNKYvn3Pqc8T4TLzGKiIiIiIiIbShRJBIf3n8f9u0Dd3e4cAHKlYOzZ20SiouDC/PrzweIMllkKTfButnw+ydgtmPrzXUUnlmYNqvbcv3x9fgLWEREREREROKNEkUi8SV/fjhwwPLz9m344AM4ZJshXXXy1mFts7WkckkFYJ2zKOxnKpdUrGu2jhXfdCDDkVkw4wycaYyBwcLTC8g9NQ/dN3XH28/bJvGLiIiIiIhI3FCiSCQ+ubtbehaVLg2PHsGHH8LmzTYJpW7euvzd628WNlhIvbz1KJupLPXy1mNhg4X83etv6uarQ5MmcO4cdGqQD1auhJ+OwpVqhBjBTD0ylRxTcjB412CeBDyxyT2IiIiIiIhI7ErwiaLbt2/TqlUr0qZNS5IkSShcuDDHjh2zdVgiry5tWsvE1p6e4O8PdepYno5mAy4OLrQq0opfmvzC6rqr+aXJL7Qq0goXBxdrndSp4aefYM8eyJv8PVi4FeZvh9ul8Av2Y8TeEeScmpNJBycREBJgk/sQERERERGR2JGgE0U+Pj6UL18eR0dHNm3axNmzZ5k4cSKpU6e2dWgirydZMvDygubNISQEWrSAadNsHdULVawIJ07A4MHg+NeHMOswLF8F9/Px8PlDem3tRe5puZl9fDYh5hBbhysiIiIiIiKvIEEnisaNG4e7uztz587l/fffJ3v27FSvXp2cOXPaOjSR1+fkBIsWQZculu1u3SxZGMOwbVwv4OICw4ZZEkblypngXEOYeRrWzsH01J2/fP/ik/WfUGhGIVadXYWRgO9FREREREREInKwdQAv4uXlhaenJ02aNGHPnj288847fPnll3Tq1CnKYwIDAwkMDLRu+/r6AmA2mzGbzbEWm9lsxjCMWD2nRJQo2nnyZEifHrshQ2DECIx79zC+/x7s7eMthJi2c758lqFoP/4IA/7X3p3HVVXnfxx/X0AumoC4sClq7lsuaBKaORVlZZY1lZWj5q9laqxU2jRTNEuszLHSsmzRZqY0HTUnDRfMFsVcMVfcl0xQU8EVhHt+f5wrSoABcldez8fjPJTDOfd87ver3K9vv+d7hvkqM7W/jE0PSR0+kN+NY5T2e5rum3mfOkR00Os3va64BnEOfgfur0L8WXYDtLNzOKqd6TcAAADXsxhu/F/+AQHmOinx8fG6//77tXr1ag0cOFCTJ09Wv379ijxn5MiRGjVqVKH927dvV2BgYLnVZrPZlJmZqeDgYPn4uPXELI9Wkdq58rRpCho6VBbD0Lk779SJiRMlq9Up176Sdj5yxEejRwdq5szK5g5rlhT7tvxuGK9cn1OSpC61u+jlji+rbWjbcq7cc1SkP8uuRDs7h6Pa+eTJk2rSpIkyMzMVFBRUbq+L0svKylJwcDB9AQCAm3PEZ7ZbB0X+/v7q0KGDVqxYkb/v2Wef1erVq5WSklLkOUXNKIqKitLx48fLdaBjs9l05MgR1apVi3+MOFCFa+dZs2Tp00eWnBwZN90kY/ZsqRwDzuKURzv/+KP09NMWbdpkMXdcdVgBt7yu3LaTlascSdI9ze7R6L+MVvNazcurdI9R4f4suwjt7ByOauesrCyFhIQQTrgBgiIAADyDIz6z3frWs4iICLVo0aLAvubNm+u///1vsedYrVZZi5iF4ePjU+7/aLBYLA55XRRUodr5gQfMp6L17CnL0qWy3HyztGCBFBrq8EtfaTt37SqtWydNnGgutXTqVKjOzX1H+i5ete4fqd/rfK452+bo67Sv1a9NP438y0jVDa5bzu/CvVWoP8suRDs7hyPamT4DAABwPbcekXXu3FlpaWkF9m3fvl316tVzUUWAE9x8s/Tdd1LNmtLatVKXLtLeva6uqkQqVZIGD5bS0qQHH7TvzKynIx9/Jtv7v6jemZ6yGTZ9lvqZGr/XWPEL43X0zFGX1gwAAAAAuMitg6LBgwdr5cqVGjNmjHbu3KkvvvhCH330kQYMGODq0gDH6tBBWr5cqltX2r5d6txZ2rTJ1VWVWGSk9OWX0pIl5sLXkqTDLbXvzTmq8kWKGvj8RTl5Ofrnyn+qwTsN9Or3r+pk9kmX1gwAAAAAcPOg6Nprr9WcOXP05ZdfqlWrVho9erQmTJig3r17u7o0wPGaNJFWrJBatpR++0264Qbzaw9y883Shg3SuHHShdtlz2y/TrtHLFXEkoVqUDlaJ3NOKmFZghq+21DvrHxH2bnZl39RAAAAAIDDuHVQJEl33nmnNm7cqHPnzmnr1q16/PHHXV0S4Dy1a0s//CDFxkrHj0txceaaRR7E31967jlpxw7piScki0WSLDr0063aPWS1Wm39SvWqNtaRM0c0aOEgNZnYRFNTpyrPlufq0gEAAACgwnH7oAio8KpXN+/huv126exZ6a67pH//29VVlVpoqPThh9L69dJf/mLfafho04z7tX/IZnXN+kjhV0Vqf+Z+9f+6v1pPbq252+bKjR/MCAAAAABeh6AI8ARVqkhffy397W9SXp7Up480YYKrqyqTNm2kpUulWbOk+vXNfUZuJX0//nGdTtyp233fUkhAiLYc2aJ7Ztyj2E9i9d2e71xaMwAAAABUFARFgKeoVEmaNk0aNMj8evBg6eWXJQ+ccWOxSH/9q7R1qzRmjHTVVeb+k8cq69vhz6vyR7t1Z9AwValURT8f/Fk3fX6Tuv27m9b+tta1hQMAAACAlyMoAjyJj480fryZrkhSYqK58E9urmvrKqOAAGnoUHP9osceM9+eJP22u5q+iX9NdefsUo+wp1XJp5IW7VqkDlM6qNesXtr++3bXFg4AAAAAXoqgCPA0FouZrnz0kZmsfPyx9MAD0rlzrq6szCIipClTpF9+ke688+L+bWvC9b+n3tO1K9PUvU4fWWTRV5u/UotJLfTE/57Qr1m/uq5oAAAAAPBCBEWAp3r8cWnmTPOxYnPmmItdZ2W5uqor0rKl9L//Sd99J3XocHH/igVXa/5jn+v2fRt0c50eyjPyNGXdFDV+r7FeXPyifj/zu+uKBgAAAAAvQlAEeLJ775WSkqTAQGnZMvNxYhkZrq7qiv3lL9LPP0vTp0tXX31x/4LPrtEPT83TPcd/Ukx4F53LPae3VrylBu820Os/vK5TOadcVjMAAAAAeAOCIsDT3XijGRLVqmU+e/7666U9e1xd1RXz8ZF69TIXvP7nP6Xq1c39589Lc97prNRB3+veswvUskYbZWVn6ZXvXlGjdxtp4qqJysnLcW3xAAAAAOChCIoAbxAdLS1fbj5vfudOqVMnc8EfL2C1mg9627VLGjbs4hPSss9ZNPuN27Xv5XW61/aFrg5uqIzTGXrm22fUbGIz/fuXfyvPlufS2gEAAADA0xAUAd6icWMzLGrVSkpPl264QfrpJ1dXVW6qVZNee03avVsaPNgMkCTp1EkfzX71IZ14fat6+n2gsKvCtefEHvWZ00ftPmynb7Z/I8MwXFo7AAAAAHgKgiLAm0RGSj/8IHXuLGVmSrfcYq4O7UVCQ6Xx482JU3//u+TnZ+4/frSS5r7ypIwJu9Sj8lhVs1bTxsMb1ePLHrr+s+v1474fXVs4AAAAAHgAgiLA24SESIsWSd27S+fOSffcI02b5uqqyl2dOtLkydK2bVKfPpLFYu4/fLCK/vfSS7JO3q2brUNU2a+yVhxYoRum3qDuX3RXanqqS+sGAAAAAHdGUAR4oypVpDlzpL59pbw86ZFHpLffdnVVDtGwofT559KmTdJf/3pxf8a+ECUPTdRVn+xUrN+T8vPx04IdC9Tuw3Z6+L8Pa+exna4rGgAAAADcFEER4K0qVZI++0x67jnz6+efl156SfLS9XpatJBmzZLWrTMnUV1wdE+kUl75QIHTtqqNz0OSpC83fanmk5rrqW+e0m8nf3NRxQAAAADgfgiKAG/m4yONGye98Yb59ZtvSo8+KuXmurYuB2rXTpo9W9qwQbr//ou3pB3f1UgbRnyh4C/Xq4nlDuXacjV57WQ1ereRhi4ZquNnj7u2cAAAAABwAwRFQEXw4ovSJ5+YwdFnn5n3aJ09e/H7S5ao5g03SEuWuK7Gcta6tfTVV9LGjdJDD10MjDLT2mp7wnxVnfm96hiddDb3rMYuH6sG7zbQGz+9oTPnz7i2cAAAAABwIYIioKL4v/8zp9pYrdK8edJtt5lPRjMMWYYNk9+OHbIMG+Z1t6a1bCl98YW0ZYu56LWP/afeqc036NdRP8l/1jzVyG2lE+dOaEjyEDV6t5Emr5ms83nnXVs4AAAAALgAQRFQkdx9t7RwoRQUJP3wg9S1qzR9uixr1kiS+euiRS4u0jGaNTMXvd62zVzb289PkizK2dRDv7+eKsvcf+mq8/V16NQhPTX/KTWf1FxfbvxSNsPm4soBAAAAwHkIioCKpmtX6fvvpbAwcyGfRx6RYZ9mY/j6SsOHe92soks1bmzefbdrlzRwoPmAOBm+MlL/ptNj06QF78n/fKh2Hd+lh2c/rOgPo/Xtjm9leHGbAAAAAMAFBEVARdS2rbR8uRQeLuXkyGIzZ81Y8vKk1au9dlbRperWlSZMkPbvl0aOlKpXl5TnL616Wjlv7ZKSX5Pv+SBtyNigO764Q12ndtXy/ctdXDUAAAAAOBZBEVBRNWhgzir6Ix8f6ZVXvHpW0aVq1JASEszA6J13pKgoSTlVpR+HKW/8bmn587LkBujH/T/q+s+u111f3qWNGRtdXTYAAAAAOARBEVBRLVpk3nr2RzabtGaN9Mwz0pmK8wSwq66Snn3WvCVt2jRzEWydrSEtfkvGuzuktY9LNl/9b/v/1GZyG/WZ00e7j+92ddkAAAAAUK4IioCKyDDMtYh8fYs/ZtIk8/6sUaOko0edV5uLVaok9e0r/fKL+XC4rl0lZdWR/veRNGmLtOkBGTL071/+rabvNdMzC55RxqkMV5cNAAAAAOWCoAioiBYtMtciysu7/HG//24u4FO3rjRggDndpoLw8ZF69JCWLZPWrpX69JEqZTWRZs2QPlwj7bxVucZ5TVw9UfX/2VAvL3lFmecyC73Oudxz+teGf+m+mffp3nn36r6Z9+lfG/6lc7nnnP+mAAAAAOBPEBQBFU1JZhNJ5vcbNJDatZPOnpXef19q0kR64AEzZKpAoqOlzz+X9u6VXn5Zqp7dXvr3QmlasvRrR52znVbi8tcV+UYDvbpknM6ePytJmpc2T5FvR6rv3L76Ou1rpRxK0ddpX6vv3L6KfDtS/0v7n2vfGAAAAAD8AUERUNGUdDZRXp60e7c0ZoyUnCzddpu5ftHMmVLHjtJf/iLNn2/uqyAiI6XXX5cOHJAmT5aa+t8kfbxSmj5HOtJcZ3RMCctfUM1XG+uBqQPUc3pPnTh3QpJkM2wFfj1x7oTunn635qXNc9XbAQAAAIBCCIqAiuTCbCKfEv7V9/GRRoyQbrxR+vZbc/HrPn0kPz/p+++lO++UWreWpk6VcnIcWro7qVJF+vvfpS1bpPnzLYqr01N6f6M09zPpRF2d8Tuomfvel2EYMlT00+Mu7H9k7iPchgYAAADAbRAUARVJTo75HPiSzgKy2czpMxdCoNatzXuwdu+WnntOCgyUNm+W+veXrr5aevNNKbPwOj3eysdHuuMOafFiaesWXz3T5REFTtsubehtHmC5/PmGDB0/d1yztsxyfLEAAAAAUAIERUBFYrWat52tXVtgs61eraMLF8pWxPe0erV53qWioqRx48zQ6Y03pIgI6bffpJdeMr/3wgvSr7+65j26SLNm0rvvSr/tt6ptx7OSUbIfrz4WH83ZNsfB1QEAAABAyfi5ugAAThYVZW6XstmUe/iwFBpa8tvSJKlaNenFF6WBA6UvvjDDoy1bzF8nTJAeflh6/nnpmmvK8x24tapVpeDw36V9JZu1ZTNsOnbmmIOrAgAAAICSYUYRgCtntZq3n23cKH3zjdS1q5Sba96m1rq1eX/Wd9+ZayRVADWq1JCPpYQ/Xg1p+c6NevTj8dp5dK9D6wIAAACAP0NQBKD8+PhI3btLy5ZJP/8s3Xefue/bb6WbbpKuvVaaMcMMkbxYz6Y9859u9qcs0vlKv+vTg8+p8aSrFTGyvZ7/+nVtO7rNsUUCAAAAQBEIigA4RseO0syZ0vbt0j/+IVWubK559OCDUpMm0sSJ0unTrq7SIe5veb9CAkJk+dPVrC2yZAdLSW9Le7tKNh+lW9bp7dRX1HxSc9VJbKEhi17R+kPrZVSQ2VgAAAAAXIugCIBjNWwoTZok7dsnJSRINWpIe/ZIzzwj1a0rjRghHT7s6irLVYBfgKb1nCZJxYZFFllksUiz+/xL37wSr7uPL5PPPw9J86ZIO26T8irpYM5WvZHyuqI/ilbtNxsofuFzWnFgRclnKwEAAABAKREUAXCOWrWkkSPNJ6VNmiQ1aCAdOyaNHi3Vqyc9+aS0Y4erqyw3PZr20NwH56paQDVJyl+z6MKv1QKq6esHv1bP5j3Uvbs0d670a1qoxtz3mBqkfCu9dVj677+lrfdI5yvr0Lm9+ufK8er8aWdFvFVH/5j/DyXvTtb5vPMueocAnGnSpEmqX7++AgICFBMTo1WrVhV77JQpU9SlSxeFhIQoJCREcXFxlz0eAADgUhbDy+9nyMrKUnBwsDIzMxUUFFRur2uz2XT48GGFhobKpzRPiUKp0M7O4ZJ2zsuTZs+W3npLWr3a3GexSPfcI73wgnTddc6pw8HO5Z7TrC2zNHvrbKVnpis8OFz3Nr9X97W4TwF+AUWeY7OZyzxNmybNmiWdOX9aarRQav5fqck3UkBW/rHVA6rrrmZ36a/N/6q4BnHFvmZFwc8M53BUOzvqM9vTzZgxQ3379tXkyZMVExOjCRMmaObMmUpLS1NoaGih43v37q3OnTurU6dOCggI0BtvvKE5c+Zo8+bNql27domuSV8AAOAZHPGZTVBURvxjxDloZ+dwaTsbhvTDD2ZgNH/+xf3XXy+9+KK5OLYX9H1Z2/jkSem//zVDo2XLJPlmS1cvlZrPlprNla46mn9sVf+q6t64u/7a/K+6vfHtqupftdzfh7vjZ4ZzEBQ5V0xMjK699lpNnDhRktn+UVFReuaZZzRkyJA/PT8vL08hISGaOHGi+vbtW6Jr0hcAAHgGR3xmM4oG4FoWi9S1q/TNN9KmTVL//lKlStJPP0l33SW1bCl98omUne3qSl0iMFB65BHpu+/MpZ1GjbCqoXG79L8p0tuHpKnfST8/I2XV1qmcU5qxeYYemPWAar5ZU3dPv1ufb/hcx88ed/XbAFBGOTk5Wrt2reLi4vL3+fj4KC4uTikpKSV6jTNnzuj8+fOqXr16scdkZ2crKyurwAYAAComgiIA7qNlS+nTT81E5MUXpaAgads26bHHpPr1pbFjpRMnXF2ly9Svb679vWOH9OOP0mP/56egY3+Rvn1X+ud+acpK6acXpWMNlZ2XrXlp89Rvbj+FjgvVrf+6VR+u+VDpp9Jd/TYAlMLRo0eVl5ensLCwAvvDwsKUnl6yv88vvfSSIiMjC4RNf5SYmKjg4OD8LSoq6orqBgAAnougCID7qV1beuMN6cABadw48+v0dGnoUCkqSoqPNxfFrqAsFvPOvClTpEOHpC+/lO7q4aNKh2OkJW9I7+6QPtggLUuQMlop15arxbsX68n5Tyry7Uh1+ayLJqycoH0n9rn6rQBwsLFjx2r69OmaM2eOAgKKX8Ns6NChyszMzN8OHDjgxCoBAIA7ISgC4L6CgqTnnpN27zYX6WnVSjp1SvrnP6WGDaU+faQNG1xdpUtVqSI9+KD09ddmljZlinTjjRZZDreWlo2UPtgovZcmLR4rHbxWhgz9tP8nDV44WPXfqa8OH3XQmB/HKO1omqvfCoAi1KxZU76+vsrIyCiwPyMjQ+Hh4Zc9d9y4cRo7dqwWLVqk1q1bX/ZYq9WqoKCgAhsAAKiYCIoAuD9/f6lvX+mXX6Rvv5VuuknKzZX+/W+pbVupWzdpyRJzYewKrHp18y69pUvNyVjjx0sdOkj6vYm0/CVpyirpn/ukb9+R9t4gGRatPbRWw5YOU7NJzdTy/ZYavnS4UtNT5eXPOQA8hr+/v9q3b6/k5OT8fTabTcnJyYqNjS32vDfffFOjR49WUlKSOnTo4IxSAQCAlyAoAuA5LBbpttuk5GRpzRqpVy/ziWiLFkm33CK1b2/eh5Wb6+pKXa52bWnwYGn1amn7dmnUKKlpU0mZdaWfn5Wmfi+NOyTN+0ja2U0Wm5+2HNmi1358Te0+bKeG7zbU84ueV8qBFNkMm6vfDlChxcfHa8qUKZo2bZq2bt2qp556SqdPn1b//v0lSX379tXQoUPzj3/jjTc0fPhwffrpp6pfv77S09OVnp6uU6dOueotAAAAD+LWQdHIkSNlsVgKbM2aNXN1WQDcQfv20vTp0s6d0jPPmPdgrV8vPfyw1KiR9M475m1qUOPG5iLYW7ead+q98oo9NDodJq17XPp3kow3j0iz/yVt7SlLXoD2nNijt1PeVqdPO6nO+DoaMH+Alu5ZqlwbIRzgbL169dK4ceM0YsQItW3bVqmpqUpKSspf4Hr//v06dOhQ/vEffPCBcnJydN999ykiIiJ/GzdunKveAgAA8CAWw43vLxg5cqRmzZqlJUuW5O/z8/NTzZo1S/waWVlZCg4OVmZmZrneb2+z2XT48GGFhobKx8et8zaPRjs7h1e08++/S++/L733nnTkiLkvJER66ikzSPqTtTwczd3a2DCkzZulWbOkmTOlLVsu+Wal01KjJKnFf+XT7BvZKp3M/1aNyjV0V9O79Nfmf1VcgzhZ/azOL/4y3K2dvZWj2tlRn9koPfoCAADP4IjPbLcfRfv5+Sk8PDx/K01IBKACqVFDGj5c2rdPmjzZnEZz/Lg0Zoz5XPknnpDSWLD5AovFXBt85EgzMNq8WXr1VemaaySdv0ra+lfpv1/INvaI9J/50rr/k8+5Gvr97O/6LPUz3fnlnar1Vi099N+HNGvLLJ3KYfYWAAAA4A3cPijasWOHIiMj1aBBA/Xu3Vv7K/AjsQGUQOXK0t//bt5nNXu2dN11Una2+Tiw5s2lnj2l5ctdXaXbadHCzNl++UXatk16/XUpOlpSnlXacYc07xPZ3kyXpi6VVg2Q5VSkTuac1PRN03X/zPtV661a6jm9p/614V86fva4q98OAAAAgDJy61vPvv32W506dUpNmzbVoUOHNGrUKB08eFCbNm1SYGBgkedkZ2crOzs7/+usrCxFRUXp+PHj5X7r2ZEjR1SrVi1ub3Ag2tk5vLqdDUNavlyWceNk+d//Lu7u1EnGc89Jd91lLojtYJ7axgcOSP/7n/T11xYtWybl5lrMb1hsUu1VUvPZUov/SiG788/x8/HTjfVv1L3N79XdTe5WWNUwp9Xrqe3saRzVzllZWQoJCeF2JzfArWcAAHgGR3xmu3VQ9EcnTpxQvXr1NH78eD366KNFHjNy5EiNGjWq0P7t27cXGy6Vhc1mU2ZmpoKDg/nHiAPRzs5RUdrZd/t2XfXhh6o8a5YsOTmSpNyGDXX673/X2fvvlwICHHZtb2jjzEyLli61auFCq5YuterkyQvvw5DCfrkYGoVuzj/HIos6hndU9wbddXv921UnsI5Da/SGdvYEjmrnkydPqkmTJoQTboCgCAAAz1DhgyJJuvbaaxUXF6fExMQiv8+MIu9COztHhWvnQ4dkmThRmjxZlhMnJElGWJiMp5+WnnxSql693C/pbW2ckyMtWybNm2fRvHnSwYOWi9+skSY1nyM1/69Ue02B8zpEdNC9ze/VPc3uUZMaTcq9Lm9rZ3fFjCLvR1AEAIBnqPBB0alTp1S3bl2NHDlSzz77bInO4alnno12do4K284nT0offyz985/mPVaSdNVV0qOPSoMHm4tglxNvbmPDkNatk775RlqwQFq92twnSQreLzWbY840qvuTZLn4kdOyVkv9tflfdW/ze9U6rLUsFkvRFygFb25nd8JTz7wffQEAgGeocE89e/755/X9999r7969WrFihe655x75+vrqoYcecnVpALxBYKAZCO3aJf3731KbNtLp09K770qNGkkPPyytX+/qKt2exSK1by8lJEg//yylp0uffy49+KAU4lNX+nmg9NkP0rhD0v8+lHbeKuX5afORzXr1h1fV9sO2uvqfjfXi4he18teVshk2V78lAAAAoMJy6xlFDz74oH744Qf9/vvvqlWrlq6//nq9/vrratiwYYlfgxlFno12dg7a2c4wpCVLpLfekhYvvrj/5pulF1+UbrnFTEXKoKK2cW6uGR4tWGBuqan2bwQcl5p8Y840arhQqnQu/5walWrrry3v0YOt71WXel3k5+P3p9c5l3tOMzfP1Jxtc5Sema7w4HDd0+we3d/yfgX4OW7tqYqKGUXej74AAMAzVPhbz8qCoMiz0c7OQTsXYf16adw4acYMKS/P3NemjfT881KvXlKlSqV6OdrYdPCglJQkzZ8vJSdLWVmS/E9JjZLMNY2afCNZT+UfX0U1dXPtu/X49ffq1kY3y+pnLfSa89Lm6ZG5j+j4uePysfjIZtjyfw0JCNG0ntPUo2kPJ75L70dQ5P3oCwAAPANBURkQFHk22tk5aOfL2LdPmjBBmjLFvC1NkqKipEGDpMcfN29fKwHauLDz56VVq8zJW4sXmzOP8iznpKuTzZlGTb+WqhzLP94vL0htq9yp3tH36rGut6mq9SrNS5unntN7SpIMFf44s8icATb3wbm6q+ldTnlfFQFBkfejLwAA8AwERWVAUOTZaGfnoJ1L4Phx6YMPzPWLMjLMfcHB0lNPSc8+K0VEXPZ02vjPZWaaT1JbtMgMjnbsypXq/WDONGo+Rwo8dPHg3ABFnrlFR4OSdV5niwyJLrDIomoB1fTbc79xG1o5ISjyfvQFAACeocItZg0AbiMkRHr5ZWnvXnN2UdOmZrIxdqz5dLRHH5W2bi3+/CVLVPOGG8w1kFCk4GDp7rulSZOk7dulPbv89NHQm3T/VZMU8tmv0scrpBXPScevlvzO6beg/ylHZy4bEknmTKPj545r1pZZTnonAAAAgOciKAKA0ggIkB57TNqyRZo7V+rcWcrJkT79VGrRQurRQ/rxx0ueDy/JMGQZNkx+O3bIMmxYwe+hWPXrm3f3ffWVdOSwj9bMjdW4W8fpjp27VOXz9dKRZvqTjCifj8VHc7bNcWi9AAAAgDcgKAKAsvDxMae//PSTtGKFdM895hPRvvlGuuEGKTZW+u9/zYWwFy2SZc0aSTJ/XbTIxcV7Hl9fqX176bnnpPnfWJSZ1lbRTcOkEj6EzmbYNP+XFbo3cZI+TkrR8VOnHVswAAAA4KH+/JnDAIDLi42VZs8275d6+21p2jRzZeb77pMaNpRyc2X4+sqSl2f+Ony4dOutZrCEMvHzk+qH1VDqcfPpZiWRXSldc3Ke1pyfpcdTfFTlbDM1qBytzldH666O0ercoK2CA4IdXDkAAADg3giKAKC8NGkiffih9Oqr0sSJ5mI7u3ZJujjxxZKXJ61ebc4q6tbNdbV6gZ5Ne2r21tklP2Hr3ZJvrhSxTgo8pDNXbdEmbdGmff/Wh/vMQ4LzGql5tWh1bdJONzaLVvvIaNWsUtMxbwAAAABwQzz1rIx4gpFz0M7OQTs7yKlTUsuW0v79hb9Xtar05JPS9ddLnTpJtWo5vz4Pdy73nCLfjtSJcydK9NSzJbf/ptUpAfrxR2nZ2kM6mLfeDI0ubNX2FXl+sKLUqka0/tI0WtfVi1Z0RLQiqkbIUoFnhPHUM+9HXwAA4Bkc8ZnNjCIAcJTly4sOiSQzRBo3ztwkqXFjc2HsTp3MX5s1M9dBQrEC/AI0rec03T39bllkKTIsstjnck3rOU3RTQMU3Vr6+98lKUIHDkRo+fI79OOP0k9LpV92/i6F/yE8qrFDmTqg5b8f0PIVX0srzNcN8glTm7BodWlozjpqF95O9avVr9DhEQAAALwDM4rKiBkYzkE7Owft7ACGIcXESOvWmQta/5HFItWoYc4k2rq18PdDQsy1jy6ERx07SlWqOL5uDzQvbZ4emfuIjp87Lh+LuWbRhV9DAkI0rec09Wja409f58QJc13ylSvN7eefpazsLCk8tWCAVGuL5FN4XaSqviFqG95O19U1Zx1FR0SrcY3G8rF4398pZhR5P/oCAADP4IjPbIKiMuIf1s5BOzsH7ewACxdKt93258clJZkhUEqKmVIsX24mFGfPFjzOz09q29YMji6ER7VrO6R0T3Qu95xmbZml2VtnKz0zXeHB4bq3+b26r8V9CvALKNNr2mzStm1md1wIjzZtkmy+Z6TQjQVnHoVtlHzPF3qNyj5V1TqsrWKiLoZHzWs1l5+PZ0/oJSjyfvQFAACegaCoDAiKPBvt7By0czn7s9lEF/j6StHRZhJx6S1L589LGzaYodGF8OjgwcLn16tX8Ha1a64xX7MCc/Sf5ZMnpTVrLgZHK1dKhw9L8s2Ram0uGB6Fb5AqnS30Gv4+AWpVq7U61olWu4h2io6IVqvQVmUOtFyBoMj70RcAAHgGgqIyICjybLSzc9DO5ayks4kuSEq6/BPQDEM6cMAMjC6ERxs2mFNeLlW1qnTddRfDo+uukyrYP/Cc/WfZMKR9+8zAaO1aM0Rat07KypLkkyvVSLMHR5fcumY9Weh1fC1+alq9pTpGRau9feZRm7A2usr/Koe/h7IgKPJ+9AUAAJ6BoKgMCIo8G+3sHLRzObowm2jt2sJBTlF8fKT27QvPKvozJ0+a51yYcZSSYu67lMVizjK69Ha1+vVLdx0P4w5/lm02aedOMzS6sK1bJ50+Lclik0J2F5x5FLFOqvJ7odexyKKrA5sppl60OkSa4VHb8LaqFlDN6e/pjwiKvB99AQCAZyAoKgOCIs9GOzsH7VyOsrPNW8IyMkp+Tni4tHevZLWW/bp5edLmzQVvV9uzp/BxEREXb1Xr3Nlc98jfv+zXdTPu+mc5L0/avr1geLR+/YWlqAwp+EDh8CjwUJGvVbtyQ11bp506XrLuUa2rajn1/RAUeT/6AgAAz0BQVAYERZ6NdnYO2rmcHTggHTlSYJfNZtOxY8dUvXr1wm0cGirVqVP+dRw6dDE0WrHCnNZy/g8LLgcEmItpXwiPOnWSqlcv/1qcxJP+LOfmmotlb9ggpaZe3I4etR9Q9dDFW9YuPHUtZG+Rr1Xdr47a1IrW9Q2jdW0dMzyKDIyUxUGzxwiKvB99AQCAZyAoKgOCIs9GOzsH7ex4btHGZ8+aU1kuXevo2LHCxzVrVvB2tSZNPOZ2Nbdo5ytgGGa+d2lwtGGDtGOH+T1VPnYxNLqw1dxe5GtdpVA1CYzWdXWjdWOzaHWoHa361epfUXh0LvecZm6eqTnb5uQ/Xe6eZvfo/pb3l8ti3IQT7oO+AADAMxAUlQFBkWejnZ2DdnY8t2xjw5DS0grerpaWVvi4GjUK3q7Wvr1UubLz6y0Bt2zncnDqlLRxY8HwaONG6cwZSdYsKWxDwfCo1hbJp/AaWf551RRVqZ3ahEara5NoxbWMVtOajeXr8+dPy5uXNk+PzH1Ex88dl4/FRzbDlv9rSECIpvWcph5Ne1zR+ySccB/0BQAAnoGgqAwIijwb7ewctLPjeUwbHz1qLox9ITxavVo6d67gMZUqmWHRpberhYe7pt4/8Jh2Lgc2m7m01aZNBbdt26TzOiOFbrwkPFpvfu2XU+h1fHKvUvWctmpYJVrtI6N1U/No3dquuQKvqpR/zLy0eeo5vackyVDhYYNF5iyluQ/O1V1N7yrzeyKccB/0BQAAnoGgqAwIijwb7ewctLPjeWwb5+SYqy5fuF1t+fKiF+pu0OBiaNS5s9SypflENyfz2HYuR+fPm7eqXRoebd4s7didI6PmloIzj8JTpUpnC79IrlXWE60VZotWw6BWSqn8srKNU0WGRBdYZFG1gGr67bnfynwbGuGE+6AvAADwDARFZUBQ5NloZ+egnR3Pa9rYMMynqV24VW35cjOJ+ONHSXCwdN11F8OjmBipalWHl+c17ewAZ8+as40unXm0NS1Pu06kyRZ2aXi0XgrIKvN1/nXPv/S31n8r07mEE+6DvgAAwDM44jPbr1xeBQBQMVgs5uyhBg2kv9nDgMxMaeXKi+HRypXmvoULzU2SfH2lNm0KrnUUFeW691EBVa4stWtnbhf5KienhXbubKFt2/6mbdukLVtt2rB/t3acXqfskHVS28+kqw5LJVgD28fioznb5pQ5KAIAAIDrERQBAK5McLDUrZu5SeZz3zduLPh0tf37pXXrzG3iRPO4OnUK3q7Wpo3kd4UfS0uWqObTT5vXuPXWK3utCsLfX2rRwtxMPpIayTAa6eDBB9R95kr9knW4RK9lM2w6dqaIJ+kBAADAYxAUAQDKl5/fxakrTz9t7vv114szjlasMNc9+vVXacYMc5OkKlXMW9QuhEexsVK1aiW/rmHIMmyY/HbskDFsmHTLLeYMKJSJxWJmeY1q19Cmk+bTzf6Mj8VH1atUd0J1AAAAcBSCIgCA49WpIz3wgLlJ0unT0qpVF8OjlBTpxAnpu+/MTTKTipYtCz5drWHD4sOfRYtkWbPGPHXNGmnRoouznFBmPZv21Oyts0t0rM2w6Z5m9zi4IgAAADgSQREAwPmuukq68UZzk8xnvW/denHG0fLl0s6dF1de/ugj87iwMDMwuhAeRUdLVqu5mPbw4TJ8fWXJyzN/HT7cvP2MWUVX5P6W92tg0kCdOHeiRE89u6/FfU6sDgAAAOWNoAgA4Ho+PubsoZYtpSeeMPdlZJgzjS6ER2vWmPvmzDE3yQyJOnSQIiOl1avz11u25OVJq1czq6gcBPgFaFrPabp7+t2yyFJkWGSxt/y0ntMU4Bfg7BIBAABQjgiKAADuKSxM6tnT3CTp3DlzMexLF8k+csT8fVEsFqlfP2noUKluXXOLipJq1WKWUSn1aNpDcx+cq0fmPqLj547Lx2KuWXTh12oB1TSt5zT1aNrD1aUCAADgChEUAQA8Q0DAxdvOXnjBvN1s505p8mRp/PjCxxuGOQNp0KCC+61WMzC6dLsQIl34fVCQU96SJ7mr6V367bnfNGvLLM3eOlvpmekKDw7Xvc3v1X0t7mMmEQAAgJcgKAIAeCaLRWrUSPrxR8nXV8rLK/qYatXM4379VUpPl7KzzYBp587iXzsoqHCAdOnXdeqYwVUFE+AXoL+1/psebvWwDh8+rNDQUPn4+Li6LAAAAJQjgiIAgOdatMhci6g4hiEdPy6NHm2uVZSTIx08KB04cHHbv7/g748fl7KypM2bza04oaGXn5UUEWEGWAAAAIAHISgCAHgm+5POip1NdIGvr3ncrbdK/v7S1VebW3FOny4cIP3x6zNnpMOHzW3t2uKvGxlZOEC6NFyqWZP1kgAAAOBWCIoAAJ7pz2YTXVDaJ6BddZXUrJm5FcUwpGPHip+RdOCAOWspN/fi/uIEBBQ/I+nC7wMD/7xmZ1uyRDWfflqaONEM4AAAAOA1CIoAAJ7nwmwiHx/JZvvz4318Ls4qutIZPBaLVKOGubVtW/QxeXnmQtpFhUgXfp+RYT7JbccOcytOcHDxIVLdulLt2uYC3c5iGLIMGya/HTtkDBsm3XILs6IAAAC8CEERAMDz5OSYYUtJQiLJPO7AAfM8Z4QqF247i4yUrruu6GOys82ZR5e7xe3ECSkzU9q40dyKExZ2+VlJ4eHlt17SokWyrFkjSeavJZ2pBQAAAI9AUAQA8DxWq3k72ZEjBXbbbDYdO3ZM1atXL/w0rtBQ5868+TNWq9SggbkV5+TJogOkS78+d86cnZSRIdkDnEL8/MyZR5e7za169T+fGWSfyWX4+sqSl2f+Wl4ztQAAAOAWCIoAAJ7pQtBxKZtNuYcPm6GQNzy2PTBQatHC3IpiGNLvv1/+FrfffjPXS9q3z9yKU7ly4cW2//j18uXS6tW6EAlZSrv+EwAAANweQREAAJ7KYjGfnFazphQdXfQxublSenrxC28fOGA+ve3sWSktzdyKU9Tta5c+VY5ZRQAAAB6PoAgAAG/m5yfVqWNusbFFH3PunPTrr5d/kltWlrlI9x8xqwgAAMCrEBQBAFDRBQRIjRqZW1EMQ2rfXtqwoegFxJlVBAAA4DW8YAEHAADgUIsWSevXF/+UuUtnFQEAAMCjERQBAIDi2Z90VuT6RJe6MKvIMJxTFwAAAByCoAgAABRv0SJztlBR6xNdillFAAAAXoGgCAAAFO3CbCKfEg4XfHyYVQQAAODhPCooGjt2rCwWiwYNGuTqUgAA8H45OeZTz4pbm+iPbDbzCWk5OY6tCwAAAA7jMU89W716tT788EO1bt3a1aUAAFAxWK3m7WRHjhTYbbPZdOzYMVWvXl0+f5xtFBpqngcAAACP5BFB0alTp9S7d29NmTJFr732mqvLAQCg4oiKMrdL2WzKPXzYDIVKelsaAAAAPIJHjO4GDBig7t27Ky4uztWlAAAAAAAAeC23n1E0ffp0rVu3TqtXry7R8dnZ2crOzs7/OisrS5I5Td5W0jUWSsBms8kwjHJ9TRRGOzsH7ex4tLFz0M7O4ah2pt8AAABcz62DogMHDmjgwIFavHixAgICSnROYmKiRo0aVWj/kSNHdO7cuXKrzWazKTMzU4ZhFF6fAeWGdnYO2tnxaGPnoJ2dw1HtfPLkyXJ7LQAAAJSNxTDc9xm2c+fO1T333CNfX9/8fXl5ebJYLPLx8VF2dnaB70lFzyiKiorS8ePHFRQUVG612Ww2HTlyRLVq1eIfIw5EOzsH7ex4tLFz0M7O4ah2zsrKUkhIiDIzM8v1M9sbTJo0SW+99ZbS09PVpk0bvffee+rYsWOxx8+cOVPDhw/X3r171bhxY73xxhu64447Sny9rKwsBQcH0xcAALg5R3xmu/WMoptvvlkbN24ssK9///5q1qyZXnrppUIhkSRZrVZZi3jaio+PT7n/o+FCYMU/RhyLdnYO2tnxaGPnoJ2dwxHtTJ8VbcaMGYqPj9fkyZMVExOjCRMmqFu3bkpLS1NoaGih41esWKGHHnpIiYmJuvPOO/XFF1+oZ8+eWrdunVq1auWCdwAAADyJW4/IAgMD1apVqwLbVVddpRo1ajDQAQAAFcL48eP1+OOPq3///mrRooUmT56sKlWq6NNPPy3y+HfeeUe33XabXnjhBTVv3lyjR49WdHS0Jk6c6OTKAQCAJ3LrGUXl4cKddRcWtS4vNptNJ0+eVEBAAP8D6kC0s3PQzo5HGzsH7ewcjmrnC5/VbnxXvNPl5ORo7dq1Gjp0aP4+Hx8fxcXFKSUlpchzUlJSFB8fX2Bft27dNHfu3GKv88db9zMzMyWV//gJAACUL0eMnzwuKFq2bFmpjr+wMGZUVJQDqgEAAOXt5MmTCg4OdnUZbuHo0aPKy8tTWFhYgf1hYWHatm1bkeekp6cXeXx6enqx1ynuYSCMnwAA8Ay///57uY2fPC4oKq3IyEgdOHBAgYGBslgs5fa6FxbJPnDgAIs8OhDt7By0s+PRxs5BOzuHo9rZMAydPHlSkZGR5faaKJmhQ4cWmIV04sQJ1atXT/v37ye0cyF+prkP+sJ90BfugX5wH5mZmapbt66qV69ebq/p9UGRj4+P6tSp47DXDwoK4i+GE9DOzkE7Ox5t7By0s3M4op0JJQqqWbOmfH19lZGRUWB/RkaGwsPDizwnPDy8VMdLxT8MJDg4mL9LboCfae6DvnAf9IV7oB/cR7k+YKTcXgkAAADlyt/fX+3bt1dycnL+PpvNpuTkZMXGxhZ5TmxsbIHjJWnx4sXFHg8AAHApr59RBAAA4Mni4+PVr18/dejQQR07dtSECRN0+vRp9e/fX5LUt29f1a5dW4mJiZKkgQMHqmvXrnr77bfVvXt3TZ8+XWvWrNFHH33kyrcBAAA8BEFRGVmtViUkJBQ5TRvlh3Z2DtrZ8Whj56CdnYN2dq5evXrpyJEjGjFihNLT09W2bVslJSXlL1i9f//+AtPNO3XqpC+++EKvvPKKXn75ZTVu3Fhz585Vq1atSnxN+tg90A/ug75wH/SFe6Af3Icj+sJi8AxaAAAAAAAAiDWKAAAAAAAAYEdQBAAAAAAAAEkERQAAAAAAALAjKAIAAAAAAIAkgqIymzRpkurXr6+AgADFxMRo1apVri7Jq/zwww/q0aOHIiMjZbFYNHfuXFeX5HUSExN17bXXKjAwUKGhoerZs6fS0tJcXZbX+eCDD9S6dWsFBQUpKChIsbGx+vbbb11dllcbO3asLBaLBg0a5OpSvMrIkSNlsVgKbM2aNXN1WbgCpR3LzJw5U82aNVNAQICuueYaLViwwEmVerfS9MOUKVPUpUsXhYSEKCQkRHFxcYxBy1FZx/fTp0+XxWJRz549HVtgBVLavjhx4oQGDBigiIgIWa1WNWnShJ9R5aC0/TBhwgQ1bdpUlStXVlRUlAYPHqxz5845qVrvVJZ/Fy9btkzR0dGyWq1q1KiRpk6dWurrEhSVwYwZMxQfH6+EhAStW7dObdq0Ubdu3XT48GFXl+Y1Tp8+rTZt2mjSpEmuLsVrff/99xowYIBWrlypxYsX6/z587r11lt1+vRpV5fmVerUqaOxY8dq7dq1WrNmjW666Sbdfffd2rx5s6tL80qrV6/Whx9+qNatW7u6FK/UsmVLHTp0KH/76aefXF0Syqi0Y5kVK1booYce0qOPPqr169erZ8+e6tmzpzZt2uTkyr1Lafth2bJleuihh/Tdd98pJSVFUVFRuvXWW3Xw4EEnV+59yjq+37t3r55//nl16dLFSZV6v9L2RU5Ojm655Rbt3btXs2bNUlpamqZMmaLatWs7uXLvUtp++OKLLzRkyBAlJCRo69at+uSTTzRjxgy9/PLLTq7cu5T238V79uxR9+7ddeONNyo1NVWDBg3SY489poULF5buwgZKrWPHjsaAAQPyv87LyzMiIyONxMREF1blvSQZc+bMcXUZXu/w4cOGJOP77793dSleLyQkxPj4449dXYbXOXnypNG4cWNj8eLFRteuXY2BAwe6uiSvkpCQYLRp08bVZaCclHYs88ADDxjdu3cvsC8mJsb4+9//7tA6vd2Vjilzc3ONwMBAY9q0aY4qscIoS1/k5uYanTp1Mj7++GOjX79+xt133+2ESr1fafvigw8+MBo0aGDk5OQ4q8QKobT9MGDAAOOmm24qsC8+Pt7o3LmzQ+usSEry7+IXX3zRaNmyZYF9vXr1Mrp161aqazGjqJRycnK0du1axcXF5e/z8fFRXFycUlJSXFgZcGUyMzMlSdWrV3dxJd4rLy9P06dP1+nTpxUbG+vqcrzOgAED1L179wI/n1G+duzYocjISDVo0EC9e/fW/v37XV0SyqAsY5mUlJRCf7e6devG2OcKlMeY8syZMzp//jyf3VeorH3x6quvKjQ0VI8++qgzyqwQytIX8+bNU2xsrAYMGKCwsDC1atVKY8aMUV5enrPK9jpl6YdOnTpp7dq1+ben7d69WwsWLNAdd9zhlJphKq/Pa7/yLKoiOHr0qPLy8hQWFlZgf1hYmLZt2+aiqoArY7PZNGjQIHXu3FmtWrVydTleZ+PGjYqNjdW5c+dUtWpVzZkzRy1atHB1WV5l+vTpWrdunVavXu3qUrxWTEyMpk6dqqZNm+rQoUMaNWqUunTpok2bNikwMNDV5aEUyjKWSU9PL/L49PR0h9Xp7cpjTPnSSy8pMjKSgPwKlaUvfvrpJ33yySdKTU11QoUVR1n6Yvfu3Vq6dKl69+6tBQsWaOfOnfrHP/6h8+fPKyEhwRlle52y9MPDDz+so0eP6vrrr5dhGMrNzdWTTz7JrWdOVtzndVZWls6ePavKlSuX6HUIigBowIAB2rRpE+uNOEjTpk2VmpqqzMxMzZo1S/369dP3339PWFRODhw4oIEDB2rx4sUKCAhwdTle6/bbb8//fevWrRUTE6N69erpq6++4n/TARcYO3aspk+frmXLlvGzz8lOnjypPn36aMqUKapZs6ary6nwbDabQkND9dFHH8nX11ft27fXwYMH9dZbbxEUOdGyZcs0ZswYvf/++4qJidHOnTs1cOBAjR49WsOHD3d1eSglgqJSqlmzpnx9fZWRkVFgf0ZGhsLDw11UFVB2Tz/9tL755hv98MMPqlOnjqvL8Ur+/v5q1KiRJKl9+/ZavXq13nnnHX344Ycursw7rF27VocPH1Z0dHT+vry8PP3www+aOHGisrOz5evr68IKvVO1atXUpEkT7dy509WloJTKMpYJDw9n7FPOrmRMOW7cOI0dO1ZLlixh8f5yUNq+2LVrl/bu3asePXrk77PZbJIkPz8/paWlqWHDho4t2kuV5e9FRESEKlWqVOCzvnnz5kpPT1dOTo78/f0dWrM3Kks/DB8+XH369NFjjz0mSbrmmmt0+vRpPfHEExo2bJh8fFj1xhmK+7wOCgoq8WwiiaeelZq/v7/at2+v5OTk/H02m03JycmsOQKPYhiGnn76ac2ZM0dLly7V1Vdf7eqSKgybzabs7GxXl+E1br75Zm3cuFGpqan5W4cOHdS7d2+lpqYSEjnIqVOntGvXLkVERLi6FJRSWcYysbGxBY6XpMWLFzP2uQJlHVO++eabGj16tJKSktShQwdnlOr1StsXzZo1K/S5c9ddd+U/ZSgqKsqZ5XuVsvy96Ny5s3bu3Jkf1knS9u3bFRERQUhURmXphzNnzhQKgy6Mwcx1mOEM5fZ5Xaqlr2EYhmFMnz7dsFqtxtSpU40tW7YYTzzxhFGtWjUjPT3d1aV5jZMnTxrr16831q9fb0gyxo8fb6xfv97Yt2+fq0vzGk899ZQRHBxsLFu2zDh06FD+dubMGVeX5lWGDBlifP/998aePXuMX375xRgyZIhhsViMRYsWubo0r8ZTz8rfc889ZyxbtszYs2ePsXz5ciMuLs6oWbOmcfjwYVeXhjL4s7FMnz59jCFDhuQfv3z5csPPz88YN26csXXrViMhIcGoVKmSsXHjRle9Ba9Q2n4YO3as4e/vb8yaNavAZ/fJkydd9Ra8Rmn74o946ln5KW1f7N+/3wgMDDSefvppIy0tzfjmm2+M0NBQ47XXXnPVW/AKpe2HhIQEIzAw0Pjyyy+N3bt3G4sWLTIaNmxoPPDAA656C17hz/5dPGTIEKNPnz75x+/evduoUqWK8cILLxhbt241Jk2aZPj6+hpJSUmlui5BURm99957Rt26dQ1/f3+jY8eOxsqVK11dklf57rvvDEmFtn79+rm6NK9RVPtKMj777DNXl+ZV/u///s+oV6+e4e/vb9SqVcu4+eabCYmcgKCo/PXq1cuIiIgw/P39jdq1axu9evUydu7c6eqycAUuN5bp2rVroc/cr776ymjSpInh7+9vtGzZ0pg/f76TK/ZOpemHevXqFfnZnZCQ4PzCvVBp/05ciqCofJW2L1asWGHExMQYVqvVaNCggfH6668bubm5Tq7a+5SmH86fP2+MHDnSaNiwoREQEGBERUUZ//jHP4zjx487v3Av8mf/Lu7Xr5/RtWvXQue0bdvW8Pf3Nxo0aFCmf99ZDIN5YAAAAAAAAGCNIgAAAAAAANgRFAEAAAAAAEASQREAAAAAAADsCIoAAAAAAAAgiaAIAAAAAAAAdgRFAAAAAAAAkERQBAAAAAAAADuCIgAAAAAAAEgiKAJwiUceeUQ9e/Z02fX79OmjMWPGuOz65WHq1KmqVq1aiY5NSkpS27ZtZbPZHFsUAAAAAJQQQRFQQVgslstuI0eO1DvvvKOpU6e6pL4NGzZowYIFevbZZ11yfVe47bbbVKlSJf3nP/9xdSkAAAAAIEnyc3UBAJzj0KFD+b+fMWOGRowYobS0tPx9VatWVdWqVV1RmiTpvffe0/333+/SGlzhkUce0bvvvqs+ffq4uhQAAAAAYEYRUFGEh4fnb8HBwbJYLAX2Va1atdCtZ3/5y1/0zDPPaNCgQQoJCVFYWJimTJmi06dPq3///goMDFSjRo307bffFrjWpk2bdPvtt6tq1aoKCwtTnz59dPTo0WJry8vL06xZs9SjR48C+99//301btxYAQEBCgsL03333Zf/PZvNpsTERF199dWqXLmy2rRpo1mzZhU4f/PmzbrzzjsVFBSkwMBAdenSRbt27co//9VXX1WdOnVktVrVtm1bJSUl5Z+7d+9eWSwWzZ49WzfeeKOqVKmiNm3aKCUlpcA1pk6dqrp166pKlSq655579Pvvvxf4/oYNG3TjjTcqMDBQQUFBat++vdasWZP//R49emjNmjX5dQEAAACAKxEUAbisadOmqWbNmlq1apWeeeYZPfXUU7r//vvVqVMnrVu3Trfeeqv69OmjM2fOSJJOnDihm266Se3atdOaNWuUlJSkjIwMPfDAA8Ve45dfflFmZqY6dOiQv2/NmjV69tln9eqrryotLU1JSUm64YYb8r+fmJiozz//XJMnT9bmzZs1ePBg/e1vf9P3338vSTp48KBuuOEGWa1WLV26VGvXrtX//d//KTc3V5L0zjvv6O2339a4ceP0yy+/qFu3brrrrru0Y8eOArUNGzZMzz//vFJTU9WkSRM99NBD+a/x888/69FHH9XTTz+t1NRU3XjjjXrttdcKnN+7d2/VqVNHq1ev1tq1azVkyBBVqlQp//t169ZVWFiYfvzxx7J0DwAAAACUK4thGIariwDgXFOnTtWgQYN04sSJAvsfeeQRnThxQnPnzpVkzijKy8vLDzHy8vIUHByse++9V59//rkkKT09XREREUpJSdF1112n1157TT/++KMWLlyY/7q//vqroqKilJaWpiZNmhSqZ+7cubrvvvt0/vx5WSwWSdLs2bPVv39//frrrwoMDCxwfHZ2tqpXr64lS5YoNjY2f/9jjz2mM2fO6IsvvtDLL7+s6dOnKy0trUAwc0Ht2rU1YMAAvfzyy/n7OnbsqGuvvVaTJk3S3r17dfXVV+vjjz/Wo48+KknasmWLWrZsqa1bt6pZs2Z6+OGHlZmZqfnz5+e/xoMPPqikpKT8tg0KCtJ7772nfv36Fdsf0dHRuvvuu5WQkFDsMQAAAADgDMwoAnBZrVu3zv+9r6+vatSooWuuuSZ/X1hYmCTp8OHDksxbrb777rv8NY+qVq2qZs2aSVKxt1edPXtWVqs1PySSpFtuuUX16tVTgwYN1KdPH/3nP//Jn7W0c+dOnTlzRrfcckuB63z++ef510hNTVWXLl2KDImysrL022+/qXPnzgX2d+7cWVu3bi32/UdERBR4r1u3blVMTEyB4y8NriQpPj5ejz32mOLi4jR27Ngi26By5cr57w0AAAAAXInFrAFc1h+DFovFUmDfhXDnwiPeT506pR49euiNN94o9FoXgpY/qlmzps6cOaOcnBz5+/tLkgIDA7Vu3TotW7ZMixYt0ogRIzRy5EitXr1ap06dkiTNnz9ftWvXLvBaVqtVkhm+lIfLvdeSGDlypB5++GHNnz9f3377rRISEjR9+nTdc889+cccO3ZMtWrVKpd6AQAAAOBKMKMIQLmKjo7W5s2bVb9+fTVq1KjAdtVVVxV5Ttu2bSWZt3Zdys/PT3FxcXrzzTf1yy+/aO/evVq6dKlatGghq9Wq/fv3F7pGVFSUJHMm0I8//qjz588Xul5QUJAiIyO1fPnyAvuXL1+uFi1alPi9Nm/eXD///HOBfStXrix0XJMmTTR48GAtWrRI9957rz777LP87507d067du1Su3btSnxdAAAAAHAUgiIA5WrAgAE6duyYHnroIa1evVq7du3SwoUL1b9/f+Xl5RV5Tq1atRQdHa2ffvopf98333yjd999V6mpqdq3b58+//xz2Ww2NW3aVIGBgXr++ec1ePBgTZs2Tbt27dK6dev03nvvadq0aZKkp59+WllZWXrwwQe1Zs0a7dixQ//617+UlpYmSXrhhRf0xhtvaMaMGUpLS9OQIUOUmpqqgQMHlvi9Pvvss0pKStK4ceO0Y8cOTZw4scCT086ePaunn35ay5Yt0759+7R8+XKtXr1azZs3zz9m5cqVslqthW5ZAwAAAABXICgCUK4uzNTJy8vTrbfeqmuuuUaDBg1StWrV5ONT/I+cxx57TP/5z3/yv65WrZpmz56tm266Sc2bN9fkyZP15ZdfqmXLlpKk0aNHa/jw4UpMTFTz5s112223af78+br66qslSTVq1NDSpUt16tQpde3aVe3bt9eUKVPybyV79tlnFR8fr+eee07XXHONkpKSNG/ePDVu3LjE7/W6667TlClT9M4776hNmzZatGiRXnnllfzv+/r66vfff1ffvn3VpEkTPfDAA7r99ts1atSo/GO+/PJL9e7dW1WqVCnxdQEAAADAUXjqGQC3cPbsWTVt2lQzZsyoMLNrjh49qqZNm2rNmjX5ARcAAAAAuBIzigC4hcqVK+vzzz/X0aNHXV2K0+zdu1fvv/8+IREAAAAAt8GMIgAAAAAAAEhiRhEAAAAAAADsCIoAAAAAAAAgiaAIAAAAAAAAdgRFAAAAAAAAkERQBAAAAAAAADuCIgAAAAAAAEgiKAIAAAAAAIAdQREAAAAAAAAkERQBAAAAAADAjqAIAAAAAAAAkgiKAAAAAAAAYEdQBAAAAAAAAEkERQAAAAAAALAjKAIAAAAAAIAkgiIAAAAAAADYERQBAAAAAABAEkERAAAAAAAA7AiKAAAAAAAAIImgCAAAAAAAAHYERQAAAAAAAJBEUAQAAAAAAAA7giIAAAAAAABIcnJQ9MMPP6hHjx6KjIyUxWLR3Llz//ScZcuWKTo6WlarVY0aNdLUqVMdXicAAIC7YPwEAACcyalB0enTp9WmTRtNmjSpRMfv2bNH3bt314033qjU1FQNGjRIjz32mBYuXOjgSgEAANwD4ycAAOBMFsMwDJdc2GLRnDlz1LNnz2KPeemllzR//nxt2rQpf9+DDz6oEydOKCkpyQlVAgAAuA/GTwAAwNH8XF3A5aSkpCguLq7Avm7dumnQoEHFnpOdna3s7Oz8r202m44dO6YaNWrIYrE4qlQAAHCFDMPQyZMnFRkZKR8fllEsK8ZPAABUHI4YP7l1UJSenq6wsLAC+8LCwpSVlaWzZ8+qcuXKhc5JTEzUqFGjnFUiAAAoZwcOHFCdOnVcXYbHYvwEAEDFU57jJ7cOispi6NChio+Pz/86MzNTdevW1YEDBxQUFOTCygAAwOVkZWUpKipKgYGBri6lwmH8BACAZ3LE+Mmtg6Lw8HBlZGQU2JeRkaGgoKAi/zdMkqxWq6xWa6H9QUFBDHQAAPAA3Op0ZRg/AQBQ8ZTn+MmtFwCIjY1VcnJygX2LFy9WbGysiyoCAABwb4yfAADAlXBqUHTq1CmlpqYqNTVVkvn41tTUVO3fv1+SOe25b9+++cc/+eST2r17t1588UVt27ZN77//vr766isNHjzYmWUDAAC4DOMnAADgTE4NitasWaN27dqpXbt2kqT4+Hi1a9dOI0aMkCQdOnQof9AjSVdffbXmz5+vxYsXq02bNnr77bf18ccfq1u3bs4sGwAAwGUYPwEAAGeyGIZhuLoIR8rKylJwcLAyMzO5xx4AADfGZ7b7oC8AAPAMjvjMdus1igAAAAAAAOA8BEUAAAAAAACQRFAEAAAAAAAAO4IiAAAAAAAASCIoAgAAAAAAgB1BEQAAAAAAACQRFAEAAAAAAMCOoAgAAAAAAACSCIoAAAAAAABgR1AEAAAAAAAASQRFAAAAAAAAsCMoAgAAAAAAgCSCIgAAAAAAANgRFAEAAAAAAEASQREAAAAAAADsCIoAAAAAAAAgiaAIAAAAAAAAdgRFAAAAAAAAkERQBAAAAAAAADuCIgAAAAAAAEgiKAIAAAAAAIAdQREAAAAAAAAkERQBAAAAAADAjqAIAAAAAAAAkgiKAAAAAAAAYEdQBAAAAAAAAEkERQAAAAAAALAjKAIAAAAAAIAkgiIAAAAAAADYERQBAAAAAABAEkERAAAAAAAA7AiKAAAAAAAAIImgCAAAAAAAAHYERQAAAAAAAJBEUAQAAAAAAAA7giIAAAAAAABIIigCAAAAAACAHUERAAAAAAAAJBEUAQAAAAAAwI6gCAAAAAAAAJIIigAAAAAAAGBHUAQAAAAAAABJBEUAAAAAAACwIygCAAAAAACAJIIiAAAAAAAA2BEUAQAAAAAAQBJBEQAAAAAAAOwIigAAAAAAACCJoAgAAAAAAAB2BEUAAAAAAACQRFAEAAAAAAAAO4IiAAAAAAAASCIoAgAAAAAAgB1BEQAAAAAAACQRFAEAAAAAAMCOoAgAAAAAAACSCIoAAAAAAABgR1AEAAAAAAAASQRFAAAAAAAAsHN6UDRp0iTVr19fAQEBiomJ0apVqy57/IQJE9S0aVNVrlxZUVFRGjx4sM6dO+ekagEAAFyP8RMAAHAWpwZFM2bMUHx8vBISErRu3Tq1adNG3bp10+HDh4s8/osvvtCQIUOUkJCgrVu36pNPPtGMGTP08ssvO7NsAAAAl2H8BAAAnMmpQdH48eP1+OOPq3///mrRooUmT56sKlWq6NNPPy3y+BUrVqhz5856+OGHVb9+fd1666166KGH/vR/0QAAALwF4ycAAOBMTguKcnJytHbtWsXFxV28uI+P4uLilJKSUuQ5nTp10tq1a/MHNrt379aCBQt0xx13FHud7OxsZWVlFdgAAAA8EeMnAADgbH7OutDRo0eVl5ensLCwAvvDwsK0bdu2Is95+OGHdfToUV1//fUyDEO5ubl68sknLzt1OjExUaNGjSrX2gEAAFyB8RMAAHA2t37q2bJlyzRmzBi9//77WrdunWbPnq358+dr9OjRxZ4zdOhQZWZm5m8HDhxwYsUAAACuxfgJAABcCafNKKpZs6Z8fX2VkZFRYH9GRobCw8OLPGf48OHq06ePHnvsMUnSNddco9OnT+uJJ57QsGHD5ONTOOeyWq2yWq3l/wYAAACcjPETAABwNqfNKPL391f79u2VnJycv89msyk5OVmxsbFFnnPmzJlCgxlfX19JkmEYjisWAADADTB+AgAAzua0GUWSFB8fr379+qlDhw7q2LGjJkyYoNOnT6t///6SpL59+6p27dpKTEyUJPXo0UPjx49Xu3btFBMTo507d2r48OHq0aNH/oAHAADAmzF+AgAAzuTUoKhXr146cuSIRowYofT0dLVt21ZJSUn5CzTu37+/wP+AvfLKK7JYLHrllVd08OBB1apVSz169NDrr7/uzLIBAABchvETAABwJovh5XOQs7KyFBwcrMzMTAUFBbm6HAAAUAw+s90HfQEAgGdwxGe2Wz/1DAAAAAAAAM5DUAQAAAAAAABJBEUAAAAAAACwIygCAAAAAACAJIIiAAAAAAAA2BEUAQAAAAAAQBJBEQAAAAAAAOwIigAAAAAAACCJoAgAAAAAAAB2BEUAAAAAAACQRFAEAAAAAAAAO4IiAAAAAAAASCIoAgAAAAAAgB1BEQAAAAAAACQRFAEAAAAAAMCOoAgAAAAAAACSCIoAAAAAAABgR1AEAAAAAAAASQRFAAAAAAAAsCMoAgAAAAAAgCSCIgAAAAAAANgRFAEAAAAAAEASQREAAAAAAADsCIoAAAAAAAAgiaAIAAAAAAAAdgRFAAAAAAAAkERQBAAAAAAAADuCIgAAAAAAAEgiKAIAAAAAAIAdQREAAAAAAAAkERQBAAAAAADAjqAIAAAAAAAAkgiKAAAAAAAAYEdQBAAAAAAAAEkERQAAAAAAALAjKAIAAAAAAIAkgiIAAAAAAADYERQBAAAAAABAEkERAAAAAAAA7AiKAAAAAAAAIImgCAAAAAAAAHYERQAAAAAAAJBEUAQAAAAAAAA7giIAAAAAAABIIigCAAAAAACAHUERAAAAAAAAJBEUAQAAAAAAwI6gCAAAAAAAAJIIigAAAAAAAGBHUAQAAAAAAABJBEUAAAAAAACwIygCAAAAAACAJIIiAAAAAAAA2BEUAQAAAAAAQBJBEQAAAAAAAOwIigAAAAAAACCJoAgAAAAAAAB2BEUAAAAAAACQRFAEAAAAAAAAO6cHRZMmTVL9+vUVEBCgmJgYrVq16rLHnzhxQgMGDFBERISsVquaNGmiBQsWOKlaAAAA12P8BAAAnMXPmRebMWOG4uPjNXnyZMXExGjChAnq1q2b0tLSFBoaWuj4nJwc3XLLLQoNDdWsWbNUu3Zt7du3T9WqVXNm2QAAAC7D+AkAADiTxTAMw1kXi4mJ0bXXXquJEydKkmw2m6KiovTMM89oyJAhhY6fPHmy3nrrLW3btk2VKlUq0zWzsrIUHByszMxMBQUFXVH9AADAcfjMLhrjJwAAUBxHfGY77daznJwcrV27VnFxcRcv7uOjuLg4paSkFHnOvHnzFBsbqwEDBigsLEytWrXSmDFjlJeXV+x1srOzlZWVVWADAADwRIyfAACAszktKDp69Kjy8vIUFhZWYH9YWJjS09OLPGf37t2aNWuW8vLytGDBAg0fPlxvv/22XnvttWKvk5iYqODg4PwtKiqqXN8HAACAszB+AgAAzubWTz2z2WwKDQ3VRx99pPbt26tXr14aNmyYJk+eXOw5Q4cOVWZmZv524MABJ1YMAADgWoyfAADAlXDaYtY1a9aUr6+vMjIyCuzPyMhQeHh4kedERESoUqVK8vX1zd/XvHlzpaenKycnR/7+/oXOsVqtslqt5Vs8AACACzB+AgAAzua0GUX+/v5q3769kpOT8/fZbDYlJycrNja2yHM6d+6snTt3ymaz5e/bvn27IiIiihzkAAAAeBPGTwAAwNmceutZfHy8pkyZomnTpmnr1q166qmndPr0afXv31+S1LdvXw0dOjT/+KeeekrHjh3TwIEDtX37ds2fP19jxozRgAEDnFk2AACAyzB+AgAAzuS0W88kqVevXjpy5IhGjBih9PR0tW3bVklJSfkLNO7fv18+Phezq6ioKC1cuFCDBw9W69atVbt2bQ0cOFAvvfSSM8sGAABwGcZPAADAmSyGYRiuLsKRsrKyFBwcrMzMTAUFBbm6HAAAUAw+s90HfQEAgGdwxGe2Wz/1DAAAAAAAAM5DUAQAAAAAAABJBEUAAAAAAACwIygCAAAAAACAJIIiAAAAAAAA2BEUAQAAAAAAQBJBEQAAAAAAAOwIigAAAAAAACCJoAgAAAAAAAB2BEUAAAAAAACQRFAEAAAAAAAAO4IiAAAAAAAASCIoAgAAAAAAgB1BEQAAAAAAACQRFAEAAAAAAMCOoAgAAAAAAACSCIoAAAAAAABgR1AEAAAAAAAASQRFAAAAAAAAsCMoAgAAAAAAgCSCIgAAAAAAANgRFAEAAAAAAEASQREAAAAAAADsCIoAAAAAAAAgiaAIAAAAAAAAdgRFAAAAAAAAkERQBAAAAAAAADuCIgAAAAAAAEgiKAIAAAAAAIAdQREAAAAAAAAkERQBAAAAAADAjqAIAAAAAAAAkgiKAAAAAAAAYEdQBAAAAAAAAEkERQAAAAAAALAjKAIAAAAAAIAkgiIAAAAAAADYERQBAAAAAABAEkERAAAAAAAA7AiKAAAAAAAAIImgCAAAAAAAAHYERQAAAAAAAJBEUAQAAAAAAAA7giIAAAAAAABIIigCAAAAAACAHUERAAAAAAAAJBEUAQAAAAAAwI6gCAAAAAAAAJIIigAAAAAAAGBHUAQAAAAAAABJBEUAAAAAAACwIygCAAAAAACAJIIiAAAAAAAA2BEUAQAAAAAAQBJBEQAAAAAAAOwIigAAAAAAACCJoAgAAAAAAAB2BEUAAAAAAACQRFAEAAAAAAAAO6cHRZMmTVL9+vUVEBCgmJgYrVq1qkTnTZ8+XRaLRT179nRsgQAAAG6G8RMAAHAWpwZFM2bMUHx8vBISErRu3Tq1adNG3bp10+HDhy973t69e/X888+rS5cuTqoUAADAPTB+AgAAzuTUoGj8+PF6/PHH1b9/f7Vo0UKTJ09WlSpV9OmnnxZ7Tl5ennr37q1Ro0apQYMGTqwWAADA9Rg/AQAAZ3JaUJSTk6O1a9cqLi7u4sV9fBQXF6eUlJRiz3v11VcVGhqqRx99tETXyc7OVlZWVoENAADAEzF+AgAAzua0oOjo0aPKy8tTWFhYgf1hYWFKT08v8pyffvpJn3zyiaZMmVLi6yQmJio4ODh/i4qKuqK6AQAAXIXxEwAAcDa3ferZyZMn1adPH02ZMkU1a9Ys8XlDhw5VZmZm/nbgwAEHVgkAAOA+GD8BAIAr5eesC9WsWVO+vr7KyMgosD8jI0Ph4eGFjt+1a5f27t2rHj165O+z2WySJD8/P6Wlpalhw4aFzrNarbJareVcPQAAgPMxfgIAAM7mtBlF/v7+at++vZKTk/P32Ww2JScnKzY2ttDxzZo108aNG5Wampq/3XXXXbrxxhuVmprKlGgAAOD1GD8BAABnc9qMIkmKj49Xv3791KFDB3Xs2FETJkzQ6dOn1b9/f0lS3759Vbt2bSUmJiogIECtWrUqcH61atUkqdB+AAAAb8X4CQAAOJNTg6JevXrpyJEjGjFihNLT09W2bVslJSXlL9C4f/9++fi47bJJAAAATsf4CQAAOJPFMAzD1UU4UlZWloKDg5WZmamgoCBXlwMAAIrBZ7b7oC8AAPAMjvjM5r+fAAAAAAAAIImgCAAAAAAAAHYERQAAAAAAAJBEUAQAAAAAAAA7giIAAAAAAABIIigCAAAAAACAHUERAAAAAAAAJBEUAQAAAAAAwI6gCAAAAAAAAJIIigAAAAAAAGBHUAQAAAAAAABJBEUAAAAAAACwIygCAAAAAACAJIIiAAAAAAAA2BEUAQAAAAAAQBJBEQAAAAAAAOwIigAAAAAAACCJoAgAAAAAAAB2BEUAAAAAAACQRFAEAAAAAAAAO4IiAAAAAAAASCIoAgAAAAAAgB1BEQAAAAAAACQRFAEAAAAAAMCOoAgAAAAAAACSCIoAAAAAAABgR1AEAAAAAAAASQRFAAAAAAAAsCMoAgAAAAAAgCSCIgAAAAAAANgRFAEAAAAAAEASQREAAAAAAADsCIoAAAAAAAAgiaAIAAAAAAAAdgRFAAAAAAAAkERQBAAAAAAAADuCIgAAAAAAAEgiKAIAAAAAAIAdQREAAAAAAAAkERQBAAAAAADAjqAIAAAAAAAAkgiKAAAAAAAAYEdQBAAAAAAAAEkERQAAAAAAALAjKAIAAAAAAIAkgiIAAAAAAADYERQBAAAAAABAEkERAAAAAAAA7AiKAAAAAAAAIImgCAAAAAAAAHYERQAAAAAAAJBEUAQAAAAAAAA7giIAAAAAAABIIigCAAAAAACAHUERAAAAAAAAJBEUAQAAAAAAwI6gCAAAAAAAAJIIigAAAAAAAGBHUAQAAAAAAABJLgiKJk2apPr16ysgIEAxMTFatWpVscdOmTJFXbp0UUhIiEJCQhQXF3fZ4wEAALwR4ycAAOAsTg2KZsyYofj4eCUkJGjdunVq06aNunXrpsOHDxd5/LJly/TQQw/pu+++U0pKiqKionTrrbfq4MGDziwbAADAZRg/AQAAZ7IYhmE462IxMTG69tprNXHiREmSzWZTVFSUnnnmGQ0ZMuRPz8/Ly1NISIgmTpyovn37luiaWVlZCg4OVmZmpoKCgq6ofgAA4Dh8ZheN8RMAACiOIz6znTajKCcnR2vXrlVcXNzFi/v4KC4uTikpKSV6jTNnzuj8+fOqXr16scdkZ2crKyurwAYAAOCJGD8BAABnc1pQdPToUeXl5SksLKzA/rCwMKWnp5foNV566SVFRkYWGCz9UWJiooKDg/O3qKioK6obAADAVRg/AQAAZ/OYp56NHTtW06dP15w5cxQQEFDscUOHDlVmZmb+duDAASdWCQAA4D4YPwEAgNLyc9aFatasKV9fX2VkZBTYn5GRofDw8MueO27cOI0dO1ZLlixR69atL3us1WqV1Wq94noBAABcjfETAABwNqfNKPL391f79u2VnJycv89msyk5OVmxsbHFnvfmm29q9OjRSkpKUocOHZxRKgAAgFtg/AQAAJzNaTOKJCk+Pl79+vVThw4d1LFjR02YMEGnT59W//79JUl9+/ZV7dq1lZiYKEl64403NGLECH3xxReqX79+/r34VatWVdWqVZ1ZOgAAgEswfgIAAM7k1KCoV69eOnLkiEaMGKH09HS1bdtWSUlJ+Qs07t+/Xz4+Fyc5ffDBB8rJydF9991X4HUSEhI0cuRIZ5YOAADgEoyfAACAM1kMwzBcXYQjZWVlKTg4WJmZmQoKCnJ1OQAAoBh8ZrsP+gIAAM/giM9sj3nqGQAAAAAAAByLoAgAAAAAAACSCIoAAAAAAABgR1AEAAAAAAAASQRFAAAAAAAAsCMoAgAAAAAAgCSCIgAAAAAAANgRFAEAAAAAAEASQREAAAAAAADsCIoAAAAAAAAgiaAIAAAAAAAAdgRFAAAAAAAAkERQBAAAAAAAADuCIgAAAAAAAEgiKAIAAAAAAIAdQREAAAAAAAAkERQBAAAAAADAjqAIAAAAAAAAkgiKAAAAAAAAYEdQBAAAAAAAAEkERQAAAAAAALAjKAIAAAAAAIAkgiIAAAAAAADYERQBAAAAAABAEkERAAAAAAAA7AiKAAAAAAAAIImgCAAAAAAAAHYERQAAAAAAAJBEUAQAAAAAAAA7giIAAAAAAABIIigCAAAAAACAHUERAAAAAAAAJBEUAQAAAAAAwI6gCAAAAAAAAJIIigAAAAAAAGBHUAQAAAAAAABJBEUAAAAAAACwIygCAAAAAACAJIIiAAAAAAAA2BEUAQAAAAAAQBJBEQAAAAAAAOwIigAAAAAAACCJoAgAAAAAAAB2BEUAAAAAAACQRFAEAAAAAAAAO4IiAAAAAAAASCIoAgAAAAAAgB1BEQAAAAAAACQRFAEAAAAAAMCOoAgAAAAAAACSCIoAAAAAAABgR1AEAAAAAAAASQRFAAAAAAAAsCMoAgAAAAAAgCSCIgAAAAAAANgRFAEAAAAAAEASQREAAAAAAADsCIoAAAAAAAAgiaAIAAAAAAAAdk4PiiZNmqT69esrICBAMTExWrVq1WWPnzlzppo1a6aAgABdc801WrBggZMqBQAAcA+MnwAAgLM4NSiaMWOG4uPjlZCQoHXr1qlNmzbq1q2bDh8+XOTxK1as0EMPPaRHH31U69evV8+ePdWzZ09t2rTJmWUDAAC4DOMnAADgTBbDMAxnXSwmJkbXXnutJk6cKEmy2WyKiorSM888oyFDhhQ6vlevXjp9+rS++eab/H3XXXed2rZtq8mTJ5fomllZWQoODlZmZqaCgoLK540AAIByx2d20Rg/AQCA4jjiM9uvXF6lBHJycrR27VoNHTo0f5+Pj4/i4uKUkpJS5DkpKSmKj48vsK9bt26aO3dusdfJzs5WdnZ2/teZmZmSzMYDAADu68JntRP/D8vtMX4CAACX44jxk9OCoqNHjyovL09hYWEF9oeFhWnbtm1FnpOenl7k8enp6cVeJzExUaNGjSq0PyoqqgxVAwAAZ/v9998VHBzs6jLcAuMnAABQEuU5fnJaUOQsQ4cOLfC/aCdOnFC9evW0f/9+Bp0ulpWVpaioKB04cIBp7C5EP7gP+sI90A/uIzMzU3Xr1lX16tVdXUqFw/jJPfHzyX3QF+6DvnAP9IP7cMT4yWlBUc2aNeXr66uMjIwC+zMyMhQeHl7kOeHh4aU6XpKsVqusVmuh/cHBwfwBdhNBQUH0hRugH9wHfeEe6Af34ePj9Ieyui3GT5D4+eRO6Av3QV+4B/rBfZTn+MlpIzF/f3+1b99eycnJ+ftsNpuSk5MVGxtb5DmxsbEFjpekxYsXF3s8AACAN2H8BAAAnM2pt57Fx8erX79+6tChgzp27KgJEybo9OnT6t+/vySpb9++ql27thITEyVJAwcOVNeuXfX222+re/fumj59utasWaOPPvrImWUDAAC4DOMnAADgTE4Ninr16qUjR45oxIgRSk9PV9u2bZWUlJS/4OL+/fsLTJfq1KmTvvjiC73yyit6+eWX1bhxY82dO1etWrUq8TWtVqsSEhKKnE4N56Iv3AP94D7oC/dAP7gP+qJojJ8qLvrBfdAX7oO+cA/0g/twRF9YDJ5BCwAAAAAAADlxjSIAAAAAAAC4N4IiAAAAAAAASCIoAgAAAAAAgB1BEQAAAAAAACR5SVA0adIk1a9fXwEBAYqJidGqVasue/zMmTPVrFkzBQQE6JprrtGCBQucVKl3K00/TJkyRV26dFFISIhCQkIUFxf3p/2Gkivt34kLpk+fLovFop49ezq2wAqktH1x4sQJDRgwQBEREbJarWrSpAk/o8pBafthwoQJatq0qSpXrqyoqCgNHjxY586dc1K13uuHH35Qjx49FBkZKYvForlz5/7pOcuWLVN0dLSsVqsaNWqkqVOnOrzOioLxk3tg/OQ+GD+5D8ZP7oHxk+u5bOxkeLjp06cb/v7+xqeffmps3rzZePzxx41q1aoZGRkZRR6/fPlyw9fX13jzzTeNLVu2GK+88opRqVIlY+PGjU6u3LuUth8efvhhY9KkScb69euNrVu3Go888ogRHBxs/Prrr06u3PuUti8u2LNnj1G7dm2jS5cuxt133+2cYr1cafsiOzvb6NChg3HHHXcYP/30k7Fnzx5j2bJlRmpqqpMr9y6l7Yf//Oc/htVqNf7zn/8Ye/bsMRYuXGhEREQYgwcPdnLl3mfBggXGsGHDjNmzZxuSjDlz5lz2+N27dxtVqlQx4uPjjS1bthjvvfee4evrayQlJTmnYC/G+Mk9MH5yH4yf3AfjJ/fA+Mk9uGrs5PFBUceOHY0BAwbkf52Xl2dERkYaiYmJRR7/wAMPGN27dy+wLyYmxvj73//u0Dq9XWn74Y9yc3ONwMBAY9q0aY4qscIoS1/k5uYanTp1Mj7++GOjX79+DHTKSWn74oMPPjAaNGhg5OTkOKvECqG0/TBgwADjpptuKrAvPj7e6Ny5s0PrrGhKMth58cUXjZYtWxbY16tXL6Nbt24OrKxiYPzkHhg/uQ/GT+6D8ZN7YPzkfpw5dvLoW89ycnK0du1axcXF5e/z8fFRXFycUlJSijwnJSWlwPGS1K1bt2KPx58rSz/80ZkzZ3T+/HlVr17dUWVWCGXti1dffVWhoaF69NFHnVFmhVCWvpg3b55iY2M1YMAAhYWFqVWrVhozZozy8vKcVbbXKUs/dOrUSWvXrs2fXr17924tWLBAd9xxh1NqxkV8ZjsG4yf3wPjJfTB+ch+Mn9wD4yfPVV6f137lWZSzHT16VHl5eQoLCyuwPywsTNu2bSvynPT09CKPT09Pd1id3q4s/fBHL730kiIjIwv9oUbplKUvfvrpJ33yySdKTU11QoUVR1n6Yvfu3Vq6dKl69+6tBQsWaOfOnfrHP/6h8+fPKyEhwRlle52y9MPDDz+so0eP6vrrr5dhGMrNzdWTTz6pl19+2Rkl4xLFfWZnZWXp7Nmzqly5sosq82yMn9wD4yf3wfjJfTB+cg+MnzxXeY2dPHpGEbzD2LFjNX36dM2ZM0cBAQGuLqdCOXnypPr06aMpU6aoZs2ari6nwrPZbAoNDdVHH32k9u3bq1evXho2bJgmT57s6tIqlGXLlmnMmDF6//33tW7dOs2ePVvz58/X6NGjXV0aAORj/OQ6jJ/cC+Mn98D4ybt49IyimjVrytfXVxkZGQX2Z2RkKDw8vMhzwsPDS3U8/lxZ+uGCcePGaezYsVqyZIlat27tyDIrhNL2xa5du7R371716NEjf5/NZpMk+fn5KS0tTQ0bNnRs0V6qLH8vIiIiVKlSJfn6+ubva968udLT05WTkyN/f3+H1uyNytIPw4cPV58+ffTYY49Jkq655hqdPn1aTzzxhIYNGyYfH/6PxVmK+8wOCgpiNtEVYPzkHhg/uQ/GT+6D8ZN7YPzkucpr7OTRveXv76/27dsrOTk5f5/NZlNycrJiY2OLPCc2NrbA8ZK0ePHiYo/HnytLP0jSm2++qdGjRyspKUkdOnRwRqler7R90axZM23cuFGpqan521133aUbb7xRqampioqKcmb5XqUsfy86d+6snTt35g82JWn79u2KiIhgkFNGZemHM2fOFBrMXBh8musIwln4zHYMxk/ugfGT+2D85D4YP7kHxk+eq9w+r0u19LUbmj59umG1Wo2pU6caW7ZsMZ544gmjWrVqRnp6umEYhtGnTx9jyJAh+ccvX77c8PPzM8aNG2ds3brVSEhI4PGu5aC0/TB27FjD39/fmDVrlnHo0KH87eTJk656C16jtH3xRzy1o/yUti/2799vBAYGGk8//bSRlpZmfPPNN0ZoaKjx2muvueoteIXS9kNCQoIRGBhofPnll8bu3buNRYsWGQ0bNjQeeOABV70Fr3Hy5Elj/fr1xvr16w1Jxvjx443169cb+/btMwzDMIYMGWL06dMn//gLj3h94YUXjK1btxqTJk0q0yNeURjjJ/fA+Ml9MH5yH4yf3APjJ/fgqrGTxwdFhmEY7733nlG3bl3D39/f6Nixo7Fy5cr873Xt2tXo169fgeO/+uoro0mTJoa/v7/RsmVLY/78+U6u2DuVph/q1atnSCq0JSQkOL9wL1TavxOXYqBTvkrbFytWrDBiYmIMq9VqNGjQwHj99deN3NxcJ1ftfUrTD+fPnzdGjhxpNGzY0AgICDCioqKMf/zjH8bx48edX7iX+e6774r82X+h/fv162d07dq10Dlt27Y1/P39jQYNGhifffaZ0+v2Voyf3APjJ/fB+Ml9MH5yD4yfXM9VYyeLYTAPDAAAAAAAAB6+RhEAAAAAAADKD0ERAAAAAAAAJBEUAQAAAAAAwI6gCAAAAAAAAJIIigAAAAAAAGBHUAQAAAAAAABJBEUAAAAAAACwIygCAAAAAACAJIIiAAAAAAAA2BEUAQAAAAAAQBJBEQAAAAAAAOwIigAAAAAAACBJ+n/XQ9XTa8ET/gAAAABJRU5ErkJggg==", "text/plain": [ - "
" + "tensor([-3.3755, -5.5730, -3.7751, ..., -2.6631, -6.7499, -4.9545])" ] }, + "execution_count": 14, "metadata": {}, - "output_type": "display_data" + "output_type": "execute_result" } ], - "source": [] + "source": [ + "combined_state_dict[\"model.layers.20.mixer.dt_proj.bias\"]" + ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 15, "metadata": {}, "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "============================================================\n", - "WRONG APPROACH: Naive Discretization\n", - "============================================================\n", - "Step 1: x = 7.000\n", - "Step 2: x = 5.500\n", - "Step 3: x = 4.750\n", - "Step 4: x = 4.375\n", - "Step 5: x = 4.188\n", - "\n", - "============================================================\n", - "CORRECT APPROACH: Solving the Differential Equation\n", - "============================================================\n", - "\n", - "We need to solve: dx/dt = Ax + Bu\n", - "This is a first-order linear ODE with constant coefficients.\n", - "\n", - "Step 1: Homogeneous solution (u=0)\n", - " dx/dt = Ax\n", - " Solution: x_h(t) = e^(At) * x(0)\n", - "\n", - "Step 2: Particular solution (variation of parameters)\n", - " Full solution: x(t) = e^(At)*x(0) + ∫[0,t] e^(A(t-τ))*B*u(τ) dτ\n", - "\n", - "Step 3: Apply ZOH (u is constant over [0,Δ])\n", - " x(Δ) = e^(AΔ)*x(0) + (∫[0,Δ] e^(As) ds)*B*u\n", - " x(Δ) = e^(AΔ)*x(0) + A^(-1)*(e^(AΔ) - 1)*B*u\n", - "\n", - "Discretized system:\n", - "A_d = e^(AΔ) = e^(-0.5*1.0) = 0.607\n", - "B_d = (e^(AΔ)-1)/A * B = 1.574\n", - "Step 1: x = 7.639\n", - "Step 2: x = 6.207\n", - "Step 3: x = 5.339\n", - "Step 4: x = 4.812\n", - "Step 5: x = 4.493\n" - ] - }, - { - "ename": "IndexError", - "evalue": "index 1000 is out of bounds for axis 0 with size 1000", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mIndexError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[1], line 148\u001b[0m\n\u001b[1;32m 145\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m i \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mrange\u001b[39m(\u001b[38;5;28mlen\u001b[39m(x_naive_history)):\n\u001b[1;32m 146\u001b[0m \u001b[38;5;66;03m# Get continuous value at discrete time point\u001b[39;00m\n\u001b[1;32m 147\u001b[0m continuous_idx \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mint\u001b[39m(i \u001b[38;5;241m*\u001b[39m \u001b[38;5;28mlen\u001b[39m(t_continuous) \u001b[38;5;241m/\u001b[39m \u001b[38;5;241m5\u001b[39m)\n\u001b[0;32m--> 148\u001b[0m continuous_value \u001b[38;5;241m=\u001b[39m \u001b[43mx_continuous\u001b[49m\u001b[43m[\u001b[49m\u001b[43mcontinuous_idx\u001b[49m\u001b[43m]\u001b[49m\n\u001b[1;32m 150\u001b[0m errors_naive\u001b[38;5;241m.\u001b[39mappend(\u001b[38;5;28mabs\u001b[39m(x_naive_history[i] \u001b[38;5;241m-\u001b[39m continuous_value))\n\u001b[1;32m 151\u001b[0m errors_correct\u001b[38;5;241m.\u001b[39mappend(\u001b[38;5;28mabs\u001b[39m(x_correct_history[i] \u001b[38;5;241m-\u001b[39m continuous_value))\n", - "\u001b[0;31mIndexError\u001b[0m: index 1000 is out of bounds for axis 0 with size 1000" - ] - }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABIoAAANECAYAAADfVMS/AAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQABAABJREFUeJzs3XV8FEcbwPHfxYlDSICUECRAElyLe3EtWrxQrEgp0hZa3IpTtI5LaXFocShSKFCkuHsJBIgQQvTm/WPfO3K5SwgQQZ4vn/2Qm53dnZ2T3XtuRKeUUgghhBBCCCGEEEKIt55VRhdACCGEEEIIIYQQQrwaJFAkhBBCCCGEEEIIIQAJFAkhhBBCCCGEEEKI/5NAkRBCCCGEEEIIIYQAJFAkhBBCCCGEEEIIIf5PAkVCCCGEEEIIIYQQApBAkRBCCCGEEEIIIYT4PwkUCSGEEEIIIYQQQghAAkVCCCGEEEIIIYQQ4v8kUCREEnQ6HX369MnoYjy3zp07kzt37ufe7tq1a+h0OhYsWJDqZXoZuXPnpmHDhhldDCGEEOKt8Lre/wjxNqpWrRrVqlXL6GKIN5AEisQbZ+XKleh0OtasWWO2rlixYuh0Onbt2mW2LleuXFSoUCE9imji2rVrfPjhh+TLlw8HBweyZ89OlSpVGDFiRLqXJaOcOXOGkSNHcu3atQwth1KKxYsXU6VKFdzd3XF0dKRIkSKMHj2ax48fm+WvVq0aOp0OnU6HlZUVrq6uFCxYkA4dOrBt2zaLx8idO7dxm8RL3bp1ky3f7t27jXmXLFliMU/FihXR6XQULlz4+SsAmDt3rsVg4avyHAkhhLDsVb3/ed7rx8iRI9HpdNy/f9/ievkByTLDD34JF1dXV4oXL87s2bOJj49/of3+/vvvjBw5MnUL+4oJDg7mk08+wd/fn0yZMuHl5UXZsmX5/PPPiYiIyOjiCZEhbDK6AEKktkqVKgGwb98+mjVrZkwPDw/n1KlT2NjYsH//fqpXr25cd/PmTW7evEmbNm3StayXLl2iTJkyZMqUiS5dupA7d27u3LnD0aNHmThxIqNGjUq3svj6+vLkyRNsbW3T7ZgGZ86cYdSoUVSrVu2FWkOlhvj4eNq2bcvKlSupXLkyI0eOxNHRkb179zJq1Ch+/fVXtm/fTrZs2Uy2y5kzJxMmTADg8ePHXLp0idWrV7NkyRJatWrFkiVLzOq0ePHiDBw40KwM3t7eKSqrg4MDy5Yto3379ibp165d46+//sLBweF5Tt3E3LlzyZo1K507dzZJfxWeIyGEEEl7Ve9/5PqRvj744APq168PQFhYGL///jt9+/bl+vXrTJ48+bn39/vvvzNnzpw3Nlj08OFDSpcuTXh4OF26dMHf358HDx7w77//Mm/ePHr16oWzs3NGF1OIdCeBIvHG8fb2Jk+ePOzbt88k/cCBAyilaNmypdk6w2PDTVZ6mT59OhERERw/fhxfX1+Tdffu3UvXsuh0upcKMLzuJk2axMqVKxk0aJDJjVT37t1p1aoVTZs2pXPnzvzxxx8m27m5uZkFbL7++mv69evH3LlzyZ07NxMnTjRZ/84775ht8zzq16/P+vXruX//PlmzZjWmL1u2jGzZspE/f35CQkJeeP/p6fHjxzg5OWV0MYQQ4rX3Ot3/iLRTsmRJk3uMjz/+mHfffZdly5a9UKDoTffTTz9x48YN9u/fb9ayLjw8HDs7uwwqmRAZS7qeiTdSpUqVOHbsGE+ePDGm7d+/n0KFClGvXj0OHjyIXq83WafT6ahYsaLZvtauXUvhwoWxt7enUKFCbN682bhu165dSTbzXrZsGTqdjgMHDiRZzsuXL5MzZ06zIBGAl5eXWdrcuXMpVKgQ9vb2eHt707t3b0JDQ5Pcf2xsLFmyZOHDDz80WxceHo6DgwODBg0CLI9R1LlzZ5ydnbl9+zZNmzbF2dkZT09PBg0aZNaE+cGDB3To0AFXV1fc3d3p1KkTJ06ceOa4RwsWLKBly5YAVK9e3dhcevfu3Sb59u3bR9myZXFwcCBv3rwsWrTIbF+hoaH0798fHx8f7O3t8fPzY+LEiSbPtSVPnjxh8uTJFChQwNg6KKFGjRrRqVMnNm/ezMGDB5PdF4C1tTUzZ84kMDCQ2bNnExYW9sxtnkeTJk2wt7fn119/NUlftmwZrVq1wtra2myb+fPnU6NGDby8vLC3tycwMJB58+aZ5MmdOzenT5/mzz//ND4P1apVS9Fz9Mcff1C5cmWcnJxwcXGhQYMGnD592mT/htfT5cuXqV+/Pi4uLrRr1w6Aixcv0rx5c7Jnz46DgwM5c+akTZs2qV53QgjxJkuv+x+A69ev8/HHH1OwYEEyZcqEh4cHLVu2NOliltJr/MuaMmUKFSpUwMPDg0yZMlGqVCl+++03s3yG8ZeWLl1KwYIFcXBwoFSpUuzZs8ckn6H727lz52jVqhWurq54eHjwySefEBUVZbbfJUuWUKpUKTJlykSWLFlo06YNN2/eNMlTrVo1ChcuzJkzZ6hevTqOjo688847TJo0yWx/s2bNolChQjg6OpI5c2ZKly7NsmXLXqhudDod2bJlw8bGvH3As67dnTt3Zs6cOcb9GBbQAlLvv/++yf6KFCmCTqfj33//Nab98ssv6HQ6zp49a0y7ffs2Xbp0IVu2bMbX188//2xWvujoaEaMGIGfnx/29vb4+Pjw2WefER0dbXaOffr0eeZr1pLLly9jbW1NuXLlzNa5urqa/Ii6d+9eWrZsSa5cuYzl+fTTT03eb4Z6c3Z25saNGzRs2BBnZ2feeecdY12ePHmSGjVq4OTkhK+vr9lzu2DBAnQ6HXv27KFHjx54eHjg6upKx44dU/RDYErrbdu2bVSqVAl3d3ecnZ0pWLAgQ4cOfeb+xdtBAkXijVSpUiViY2P5+++/jWmGXwoqVKhAWFgYp06dMlnn7++Ph4eHyX727dvHxx9/TJs2bZg0aRJRUVE0b96cBw8eANpF38fHh6VLl5qVYenSpeTLl4/y5csnWU5fX19u3rzJzp07n3lOI0eOpHfv3nh7ezN16lSaN2/Od999R+3atYmNjbW4ja2tLc2aNWPt2rXExMSYrFu7di3R0dHPbG4eHx9PnTp18PDwYMqUKVStWpWpU6fy/fffG/Po9XoaNWrE8uXL6dSpE+PGjePOnTt06tTpmedVpUoV+vXrB8DQoUNZvHgxixcvJiAgwJjn0qVLtGjRgvfee4+pU6eSOXNmOnfubHIzExkZSdWqVVmyZAkdO3Zk5syZVKxYkSFDhjBgwIBky7Bv3z5CQkJo27atxRspgI4dOwKwcePGZ54TaMGiDz74gMjISLNfcGNjY7l//77ZkvhGIymOjo40adKE5cuXG9NOnDjB6dOnadu2rcVt5s2bh6+vL0OHDmXq1Kn4+Pjw8ccfG29aAGbMmEHOnDnx9/c3Pg9ffvnlM5+jxYsX06BBA5ydnZk4cSLDhg3jzJkzVKpUyWxMiri4OOrUqYOXlxdTpkyhefPmxMTEUKdOHQ4ePEjfvn2ZM2cO3bt358qVK8kGQoUQQphKr/sfgMOHD/PXX3/Rpk0bZs6cSc+ePdmxYwfVqlUjMjISSNk1PikPHz60eK209OPPN998Q4kSJRg9ejTjx4/HxsaGli1bsmnTJrO8f/75J/3796d9+/aMHj2aBw8eULduXZN6MWjVqhVRUVFMmDCB+vXrM3PmTLp3726SZ9y4cXTs2JH8+fMzbdo0+vfvz44dO6hSpYrZNSwkJIS6detSrFgxpk6dir+/P59//rlJa+UffviBfv36ERgYyIwZMxg1ahTFixc3eU6TExkZaayrK1euMGfOHDZv3mx2T5aSa3ePHj147733jPkNC0DlypVN7m8ePnzI6dOnsbKyYu/evcb0vXv34unpaXzO7969S7ly5di+fTt9+vThm2++wc/Pj65duzJjxgzjdnq9nsaNGzNlyhQaNWrErFmzaNq0KdOnT6d169Zm552S16wlvr6+xMfHG88rOb/++iuRkZH06tWLWbNmUadOHWbNmmW8R0woPj6eevXq4ePjw6RJk8idOzd9+vRhwYIF1K1bl9KlSzNx4kRcXFzo2LEjV69eNdtHnz59OHv2LCNHjqRjx44sXbqUpk2bopRKsowprbfTp0/TsGFDoqOjGT16NFOnTqVx48bs37//mfUg3hJKiDfQ6dOnFaDGjBmjlFIqNjZWOTk5qYULFyqllMqWLZuaM2eOUkqp8PBwZW1trbp162ayD0DZ2dmpS5cuGdNOnDihADVr1ixj2pAhQ5S9vb0KDQ01pt27d0/Z2NioESNGJFvOU6dOqUyZMilAFS9eXH3yySdq7dq16vHjxyb57t27p+zs7FTt2rVVfHy8MX327NkKUD///LMxrVOnTsrX19f4eMuWLQpQGzZsMNln/fr1Vd68eY2Pr169qgA1f/58k30BavTo0SbblihRQpUqVcr4eNWqVQpQM2bMMKbFx8erGjVqmO3Tkl9//VUBateuXWbrfH19FaD27NljUh/29vZq4MCBxrQxY8YoJycndeHCBZPtv/jiC2Vtba1u3LiR5PFnzJihALVmzZok8zx8+FAB6v333zemVa1aVRUqVCjJbdasWaMA9c0335idj6VlwoQJSe5LKaV27dqlAPXrr7+qjRs3Kp1OZzyvwYMHG59PS+WKjIw021+dOnVMXgNKKVWoUCFVtWpVs7xJPUePHj1S7u7uZu+foKAg5ebmZpJueD198cUXJnmPHTtmPC8hhBAvLj3vfyxdVw4cOKAAtWjRImNactd4S0aMGJHkddKwNGjQwGSbxGWJiYlRhQsXVjVq1DA7N0AdOXLEmHb9+nXl4OCgmjVrZlaGxo0bm2z/8ccfK0CdOHFCKaXUtWvXlLW1tRo3bpxJvpMnTyobGxuT9KpVq5rVTXR0tMqePbtq3ry5Ma1JkybJ3lskxXAfZ2np1auX0uv1xrzPc+3u3bu3svSV0fC8njlzRiml1Pr165W9vb1q3Lixat26tTFf0aJFTeq2a9euKkeOHOr+/fsm+2vTpo1yc3MzPpeLFy9WVlZWau/evSb5vv32WwWo/fv3G9NS+pq1JCgoSHl6eipA+fv7q549e6ply5aZ3NcbWHrNT5gwQel0OnX9+nVjmuF+Z/z48ca0kJAQlSlTJqXT6dSKFSuM6efOnVOAyXeG+fPnK0CVKlVKxcTEGNMnTZqkALVu3TpjWtWqVU3u21Jab9OnT1eACg4OTrZ+xNtLWhSJN1JAQAAeHh7GXzpOnDjB48ePjX2PK1SoYIyYHzhwgPj4eIv982vVqkW+fPmMj4sWLYqrqytXrlwxpnXs2JHo6GiTJs6//PILcXFxzxyHplChQhw/fpz27dtz7do1vvnmG5o2bUq2bNn44YcfjPm2b99OTEwM/fv3x8rq6du2W7duuLq6WvzFzKBGjRpkzZqVX375xZgWEhLCtm3bLP4iY0nPnj1NHleuXNmkDjZv3oytrS3dunUzpllZWdG7d+8U7f9ZAgMDqVy5svGxp6cnBQsWNCnDr7/+SuXKlcmcObPJL4+1atUiPj7erFl5Qo8ePQLAxcUlyTyGdeHh4Skut2HwQ8P+Dd599122bdtmtnzwwQcp3nft2rXJkiULK1asQCnFihUrkt0+U6ZMxr/DwsK4f/8+VatW5cqVKy/VvWvbtm2EhobywQcfmNS7tbU17777rsUZdnr16mXy2M3NDYAtW7YYf4UWQgjx/NLz/ifhdSU2NpYHDx7g5+eHu7s7R48efelzWbVqlcVrZeJJJRKXJSQkhLCwMCpXrmyxHOXLl6dUqVLGx7ly5aJJkyZs2bLFrFt94vuYvn37AtoAzwCrV69Gr9fTqlUrk2tg9uzZyZ8/v9k10NnZ2eTe0M7OjrJly5rUq7u7O7du3eLw4cPPrCNLunfvbqyrVatW0bt3b7777juT1tUvcu1OzHBfZri/2rt3L2XKlOG9994ztigKDQ3l1KlTxrxKKVatWkWjRo1QSpkcu06dOoSFhRmfs19//ZWAgAD8/f1N8tWoUQPArIwpec1aki1bNk6cOEHPnj0JCQnh22+/pW3btnh5eTFmzBiT1jsJX2ePHz/m/v37VKhQAaUUx44dM9v3Rx99ZPzb3d2dggUL4uTkRKtWrYzpBQsWxN3d3WI5u3fvbjIhSq9evbCxsTG+/ixJab25u7sDsG7dumcO0SDeTjKYtXgj6XQ6KlSowJ49e9Dr9ezfvx8vLy/8/PwA7UZp9uzZAMYbJks3Srly5TJLy5w5s0n/YH9/f8qUKcPSpUvp2rUroHU7K1eunPF4ySlQoACLFy8mPj6eM2fOsHHjRiZNmkT37t3JkycPtWrV4vr164B2MUnIzs6OvHnzGtdbYmNjQ/PmzVm2bBnR0dHY29uzevVqYmNjUxQocnBwwNPTM9k6uH79Ojly5MDR0dEkX0rOPyVS8jxcvHiRf//916ysBskNDm4IAiUO6CSUkmBSYoYpVRNvkzVrVmrVqpXi/Vhia2tLy5YtWbZsGWXLluXmzZtJdjsD7XU+YsQIDhw4YBaMCQsLMwZrntfFixcBjDcgibm6upo8trGxIWfOnCZpefLkYcCAAUybNo2lS5dSuXJlGjduTPv27V+4XEII8TZKz/ufJ0+eMGHCBObPn8/t27dNvlCnxvhyVapUMZmwwcDSxBsbN25k7NixHD9+3GQcFsN4Ognlz5/fLK1AgQJERkYSHBxM9uzZk8ybL18+rKysjF2zLl68iFLK4j4Bs1lPc+bMaVamzJkzm4zp8/nnn7N9+3bKli2Ln58ftWvXpm3bthbHkbIkf/78JvcY77//PjqdjhkzZtClSxeKFCny3NduSwyTZ+zdu5cePXqwd+9eqlevTpUqVejbty9Xrlzh7Nmz6PV6Y6AoODiY0NBQvv/+e5MhDBIy3K9dvHiRs2fPpvi+LiWv2aTkyJGDefPmMXfuXC5evMiWLVuYOHEiw4cPJ0eOHMaAz40bNxg+fDjr168322/i17yl+2c3NzeLrwE3NzeL5Uz8unJ2diZHjhxm3foTSmm9tW7dmh9//JGPPvqIL774gpo1a/L+++/TokULkx+lxdtLAkXijVWpUiU2bNjAyZMnzWYyqFChAoMHD+b27dvs27cPb29v8ubNa7YPS4MCA2Z9gzt27Mgnn3zCrVu3iI6O5uDBg8YbsZSytramSJEiFClShPLly1O9enWWLl360gEFgDZt2vDdd9/xxx9/0LRpU1auXIm/vz/FihVLUbkyWkqeB71ez3vvvcdnn31mMW+BAgWS3L+h3/y///5L06ZNLeYx3MQFBgampMgAxvEOUitglljbtm359ttvGTlyJMWKFUuybJcvX6ZmzZr4+/szbdo0fHx8sLOz4/fff2f69Okv9UuSYdvFixeb3FwbJB7zyd7e3uINyNSpU+ncuTPr1q1j69at9OvXjwkTJnDw4EGzwJIQQoikpdf9T9++fZk/fz79+/enfPnyuLm5odPpaNOmTbq2UNi7dy+NGzemSpUqzJ07lxw5cmBra8v8+fNfeADopCT+gq/X69HpdPzxxx8W6yzxtOopqdeAgADOnz/Pxo0b2bx5M6tWrWLu3LkMHz6cUaNGvVC5a9asyezZs9mzZw9FihR57mt3UipVqsSOHTt48uQJ//zzD8OHD6dw4cK4u7uzd+9ezp49i7OzMyVKlACe3jO0b98+yXEsixYtasxbpEgRpk2bZjGfj4+PyeOU3rMnR6fTUaBAAQoUKECDBg3Inz8/S5cu5aOPPiI+Pp733nuPhw8f8vnnn+Pv74+TkxO3b9+mc+fOZq/5pMqTGuVMTkrrLVOmTOzZs4ddu3axadMmNm/ezC+//EKNGjXYunXrK3H/LzKWBIrEG8vwC9m+ffvYv38//fv3N64rVaoU9vb27N69m7///pv69eu/1LHatGnDgAEDWL58OU+ePMHW1jbF3bosKV26NAB37twBMM6Kdv78eZMbupiYGK5evfrMYFKVKlXIkSMHv/zyC5UqVWLnzp18+eWXL1y+xHx9fdm1axeRkZEmrYouXbqUou0t/eL3vPLly0dERMQLBdYMMz4sW7aML7/80uLF0TDLWsOGDVO0z/j4eJYtW4ajo2OaTTtcqVIlcuXKxe7du5k4cWKS+TZs2EB0dDTr1683+cXNUtPypJ6LpNINzby9vLxeOqhpCJR+9dVX/PXXX1SsWJFvv/2WsWPHvtR+hRDibZJe9z+//fYbnTp1YurUqca0qKgoswGcU+Man5xVq1bh4ODAli1bsLe3N6bPnz/fYn5Da5qELly4gKOjo1krjIsXL5InTx7j40uXLqHX68mdOzegXQOVUuTJkyfZH6Sel5OTE61bt6Z169bExMTw/vvvM27cOIYMGWKxRdWzxMXFAU9bOj/PtTu5569y5crMnz+fFStWEB8fT4UKFbCysqJSpUrGQFGFChWM91Wenp64uLgQHx//zOPmy5ePEydOULNmzTR/DVmSN29eMmfObLwXP3nyJBcuXGDhwoUmg1dv27Ytzcpw8eJFqlevbnwcERHBnTt3kn3fPk+9WVlZUbNmTWrWrMm0adMYP348X375Jbt27UqVH6rF603alYk3VunSpXFwcGDp0qXcvn3b5Bc1e3t7SpYsyZw5c3j8+PFLf5HPmjUr9erVY8mSJSxdupS6detabC6d2N69ey3OWGboe2zoalarVi3s7OyYOXOmyS8OP/30E2FhYTRo0CDZ41hZWdGiRQs2bNjA4sWLiYuLe6lAVmJ16tQhNjbWZFwlvV5vMqNWcpycnABeaoarVq1aceDAAbZs2WK2LjQ01HiTZImjoyODBg3i/PnzFgNomzZtYsGCBdSpU8fi9KmJxcfH069fP86ePUu/fv1S1IT7Reh0OmbOnMmIESPo0KFDkvkMN2iJuwVYuol2cnKy+Dwk9RzVqVMHV1dXxo8fb/G1HBwc/MzzCA8PN3t+ihQpgpWVldlUrkIIIZKXXvc/1tbWZq0gZs2aZTbOT2pc459VDp1OZ3Lca9eusXbtWuPjX375xXhPdeDAAZo3b067du0AuHnzJuvWraN27dpmPxQlvo+ZNWsWAPXq1QNg9uzZWFlZMWrUKLO6UEo9c8atxB49eoROp+P48ePGNDs7OwIDA1FKERsby/Lly5+7pe2GDRsAjC3Jn+fandzzZ+hSNnHiRIoWLWrsLl65cmV27NjBkSNHTMaYtLa2pnnz5qxatcriLHMJj9uqVStu375tcm9p8OTJEx4/fvzM806JgIAAiz+2HTp0iAcPHhhfN5bupZRSfPPNNwBMmTIlVX+EBfj+++9Nnp958+YRFxdnfP1ZktJ6e/jwodn64sWLA8i9lwCkRZF4g9nZ2VGmTBn27t2Lvb29ycCFoDW/NvwKlhotPjp27EiLFi0AGDNmTIq2mThxIv/88w/vv/++sant0aNHWbRoEVmyZDH+Cujp6cmQIUMYNWoUdevWpXHjxpw/f565c+dSpkyZZw6aDVpf5FmzZjFixAiKFCmSoqlpU6pp06aULVuWgQMHcunSJfz9/Vm/fr3xIvSsXzSKFy+OtbU1EydOJCwsDHt7e2rUqIGXl1eKyzB48GDWr19Pw4YN6dy5M6VKleLx48ecPHmS3377jWvXriUbvPviiy84duwYEydONN5EZsqUiX379rFkyRICAgJYuHCh2XZhYWEsWbIE0KakvXTpEqtXr+by5cu0adPG4mvh9u3bxm0ScnZ2TrLrW1KaNGlCkyZNks1Tu3Zt7OzsaNSoET169CAiIoIffvgBLy8v4y9lBqVKlWLevHmMHTsWPz8/vLy8qFGjRrLP0bx58+jQoQMlS5akTZs2eHp6cuPGDTZt2kTFihWf2Q1z586d9OnTh5YtW1KgQAHi4uJYvHix8YZSCCFEyqXX/U/Dhg1ZvHgxbm5uBAYGcuDAAbZv346Hhwe3bt0yu/Y3a9bM+Hfjxo1Zt24dMTExzJo1iyVLlnDhwgWsra0tXvvnz59Pr169zMYSjIiIYPr06SilqFu3Lm3btuXevXvMmTMHPz8//v33X4KDg+nUqRNz586la9euBAYG8ueff9KrVy8mTZrE3LlzASx267p69SqNGzembt26HDhwgCVLltC2bVuKFStGREQEN2/epGfPnsybN49r167RtGlTXFxcuHr1KmvWrKF79+4MGjTIYv19+umnXL9+3eTHpBMnTgAwZMgQKleuTLZs2Th79iyzZ8+mQYMGuLi40KhRI+rWrZvk83L06FHjPcajR4/YsWMHq1atokKFCtSuXRvQxiBK6bXb8Prp168fderUwdramjZt2gBa1/rs2bNz/vx540DfoLVk//zzzwFMAkUAX3/9NYsWLaJ06dL07NmTwMBAHj58yNGjR9m+fbvx3rFDhw6sXLmSnj17smvXLipWrEh8fDznzp1j5cqVbNmyxdgC/0XFxcVx8eJFxo4dy/nz5ylVqhR2dnacPXuWn3/+GQcHB4YOHQpoY5Lmy5ePTz/9lM6dOzNp0iR+//1349hCPXr0MJnUJTXExMRQs2ZNWrVqZbzvr1SpEo0bN05ym5TW2+jRo9mzZw8NGjTA19eXe/fuMXfuXHLmzJlmLeHFayYdZ1gTIt0NGTJEAapChQpm61avXq0A5eLiouLi4szWA6p3795m6b6+vqpTp05m6dHR0Spz5szKzc1NPXnyJEXl279/v+rdu7cqXLiwcnNzU7a2tipXrlyqc+fO6vLly2b5Z8+erfz9/ZWtra3Kli2b6tWrlwoJCTHJ06lTJ+Xr62u2rV6vVz4+PgpQY8eONVtvmFY14VT2nTp1Uk5OTmZ5DdPGJhQcHKzatm2rXFxclJubm+rcubPav3+/AkymAU3KDz/8oPLmzausra1NptH19fU1mwZXKfPpQJXSpnsdMmSI8vPzU3Z2dipr1qyqQoUKasqUKSbTiyYlPj5ezZ8/X1WsWFG5uroqBwcHVahQITVq1CgVERFhsQwkmH7W2dlZ5c+fX7Vv315t3brV4jF8fX2TnL7W0vOW0K5du1I0jXzVqlXNptZdv369Klq0qHJwcFC5c+dWEydOVD///LMC1NWrV435goKCVIMGDZSLi4sCTOo4qefIULY6deooNzc35eDgoPLly6c6d+5sMgVxUq+nK1euqC5duqh8+fIpBwcHlSVLFlW9enW1ffv2ZM9TCCGEZelx/xMSEqI+/PBDlTVrVuXs7Kzq1Kmjzp07p3x9fVX79u3VnTt3jMuUKVOUr6+v0ul0ClA//PCDevDggSpdurQKCAhQy5cvV5cuXVKHDh1S/v7+ClB79uwxHqt3796qdOnSxnIY7gv+/PNP4zTk+fPnV/b29srf31/Nnz/feK+yZMkS5e3tbXJuS5YsMeYvUaKEyfVMqaf3OWfOnFEtWrRQLi4uKnPmzKpPnz7Ge7z9+/crKysr9fjxY7Vq1SpVqVIl5eTkpJycnJS/v7/q3bu3On/+vHGfia/NFSpUUOPGjTO5b5s1a5by9vZWVapUUR4eHsre3l7ly5dPDR48WIWFhSX3lBvv4xIuNjY2Km/evGrw4MHq0aNHZtuk5NodFxen+vbtqzw9PY3PX0ItW7ZUgPrll1+MaTExMcrR0VHZ2dmZ3RNfuHBBAapz587Kx8dH2draquzZs6uaNWuq77//3iRvTEyMmjhxoipUqJCyt7dXmTNnVqVKlVKjRo0yqY/nvWc3OHnypAJU3759VcmSJVWWLFmUjY2NypEjh2rZsqU6evSoSf4zZ86ogIAApdPpVNasWVW3bt3UiRMnUnz/bOn+zFDOhPe68+fPV4D6888/Vffu3VXmzJmVs7OzateunXrw4IHZPhPfD6ek3nbs2KGaNGmivL29lZ2dnfL29lYffPCBunDhQpL1Jd4uEigSIpXExsYqT09P1aVLl4wuyitjzZo1ClD79u3L6KIIIYQQIoPExcWp1q1bKw8PD3X8+HGl1+tVzZo1VdGiRVV4eLhJ3ujoaJU7d27Vrl07Y1r58uVVjx49zPY7depU5ebmpvR6vcXj1qxZ0yx40qFDB5MfSbZv367s7e3Vli1bVNmyZZWDg4PKnj27AlRwcLBxXzNmzFC+vr7K0dFRde3aVX3zzTeqYMGCxvW7du1SVapUUW5ubsrFxUWVLl1anTlzxqxM0dHRysbGxqRM7777rlJKqa5du6rWrVurMWPGKB8fH+Xs7Ky6detmsr2vr69JUOLbb79VhQsXVpkyZVJZsmRRderUUfHx8Uk8E0rt27dPValSRTk4OChvb281YsQI47pr164pOzs7tXr1amPa/Pnzlaurqzp27JhSSqmjR4+qhg0bqmzZsikHBwdVokQJtXv3bpNjXL9+XXXs2FF5eXkpBwcHVbRoUbV37171008/mT0f8+bNs1hOvV6v5s+frwICAowBwITl8vT0VD/88IPJNocOHVL29vbqypUrSimlHjx4oLp37668vLyUq6uratq0qfrvv/+M+RcvXqxy5cplfNy6dWuz19nAgQONQRzDayfhcvbsWbVr1y5la2uroqOjjdutXr1alS5dWmXKlEnly5dP/fzzzyb7zZkzp5o2bZrq1KmTcnNzMzkfQ6Do8OHDFutGiPQggSIhUsmvv/6qALOL5dsiMjLS5HFcXJyqUaOGcnV1NVsnhBBCiLdDXFycatOmjTFIpJRSq1atSvaL8AcffKAKFy5s3N7R0dGstYkhX7Vq1ZI89v3791XVqlVVv3791J07dxSg6tevr9zd3Y15pk6dqjJlyqQaNGigDh48qE6dOqW8vLxMAkVjx45V3t7eat26derSpUvqo48+UpkzZ1atW7dWSil18eJF5erqqubOnasuX76sTp06pebOnavu3btnVqb4+Hj1999/K0AdP35c3blzx9g6vFSpUipLlixq3Lhx6vz58+rXX39VOp3OeG8ZEhKiAGPQZsmSJSpnzpxq48aN6tq1a+rw4cNq5syZSdbHr7/+qjw8PNSCBQvU5cuX1R9//KE8PDzU4sWLjXn69OmjihYtqvR6vdq6datydHRU27ZtM67fuHGjWrZsmTpz5ow6d+6c+uijj5SXl5dx/bVr11S2bNlUy5Yt1cGDB9WFCxfU999/r06cOKEiIiLUgAEDVNWqVY2tzaKioiyW1VCOzZs3qytXrqjZs2cre3t7Y4v7GjVqqE8//dRkm+rVq6t+/foppZQKDQ1VgYGBqkmTJuro0aPq5MmTqkqVKqpGjRrG/AMHDlSNGzc2Pi5QoICaO3euyT5r1qyphg4dqpRS6uHDh6pcuXJqyJAhxvLr9Xo1Y8YMVaxYMeM233//vXJzc1MLFixQV65cUQsWLFBWVlbGVnIPHjxQgMqfP79atmyZunz5svr888+Vvb29io6OlkCReCVIoEiIl3Tw4EH1/fffKx8fH1WiRImMLk6G6dq1q2rbtq2aNWuWmjJliqpQoYKxObgQQggh3j6GIFGWLFmMwQ2llGrcuLEqW7Zskts1bdpUlSxZUiml1KlTp5Lssg2oAQMGJFuGbNmyqeXLlyultC5KZcqUUVWqVDGu79ixo/Lz8zPpIlWuXDljoOjy5cvKxsZG7dixw7j+0aNHSqfTqQkTJiillPrpp5+Ur69virq5K6W1uPbw8DBJi42NVQ4ODmrYsGEm6VmyZDF2Od+9e7dJy5UOHTqotm3bpuiYYWFhysPDw+Q8lNICMh9++KHxcVBQkHJyclLDhw9Xbm5uJkEkS86fP68AY7CrXr16qkmTJknmb9KkiRo4cGCy+9y/f7/KnDmzun//vkl64cKF1cKFC5VSSvXr10/VqVPHuG7z5s3KxcXFGNwbOHCgKlq0qIqNjTXm2bp1q9LpdMbhBGrWrKmGDx+ulFIqIiJCWVlZqf3795scM2vWrCbd6tzd3dWGDRtM8nTu3Fl17NhRKaXU3bt3VaZMmdTKlStN8lSoUEF99tlnSimldu7cqQCTANy///5rfM1JoEi8CmQwayFe0rx581iyZAnFixdnwYIFGV2cDFOjRg2mTp3Kxo0biYqKws/Pj1mzZtGnT5+MLpoQQggh0ll8fDwdOnRg69at7NixwzijEsCxY8eSnLlJKcWxY8eoWbMmoA3ObGdnx4EDB7Cyejphc2RkJBUrVjQbrDuhoKAg7t69a5ztC+DBgwc0bNjQ+PjEiRO0b9/eZNr5hDN8LV68mMDAQGrUqGFMs7GxQafTGc+pYsWKREREEBAQwAcffECrVq0oUqRIkuU6duyYSZkAzp07R1RUFB999JExLTw8nIcPH+Ln52csa0BAAHZ2dgDUr1+fDh06cO3aNVq1akWrVq3IkSOHxWOuXbuWBw8emA2EHBMTQ+fOnY2Ps2XLRvfu3Rk9ejQTJkwwmzBl2bJl/PDDD1y6dInQ0FDi4+NxdnbG3d2d69ev88cff3Ds2LFkz/1ZE1X8+OOPhIeH4+vra5L+5MkTbGy0r69FihRhzZo1gPaaGTJkCIMHDzZOXLJ06VJGjBhhzA/aQO/q/7PHgVafH3/8sfFvpZTJ83b79m3u379vfK6uXr1KaGio2XN34sQJ4+yzq1evxt3d3ewc7ezsjLOJnThxgjx58phMQX/16lXc3NxSNGuyEOlBAkVCvKQFCxa81QEig7Zt29K2bduMLoYQQgghMpghSLRlyxa2b99uEiQCiI2NNQnMJLRp0yauX79Ox44dAfjnn38oVKgQJUuWNMn3119/AZilJ3T8+HEyZcpEgQIFAC2gkDdvXuMX/djYWM6ePcukSZNMtouOjmb+/PlkzZqV48ePmx3j+PHj6PV643kVLFiQK1eusGnTJn777TeKFy/O1KlTjbPXWipX4mDD8ePHyZEjB7ly5TJJs7W1JTAw0OJ2bdq0oWLFiqxdu5alS5cyZMgQduzYQfny5S0es169esycOdNsXZYsWYx/37x5k19++QVbW1s8PDxM8o0aNYq5c+cyatQoSpcujZubG9OnTzfO1nb8+HHs7OzMnm+DkJAQbty4YXbulso6bNgw2rVrZ7bunXfeAaBw4cLcunWLiIgINmzYwJ07dxgwYACgBQODgoIoXLiwybZnzpwhZ86cuLu7c+vWLe7fv28sqyF44+LiYsx/5MgRHB0dyZ8/v7FcWbJkwcfHx5gnNjaWM2fOGM/p9OnTBAYGmgQ19Xo9586dMwbdTpw4Qbly5czO2VCWzp07mwTvhMgIVs/OIoQQQgghhBDPFh8fT8eOHY1BohIlSpjlKVGiBHv37kUpZZIeHBxM3759admyJVWrVgW0FkWWgkFHjx7F2dnZGASy5Pjx4xQuXBhra2tAa6Fz7do14xfys2fPEhMTY1LGsLAwrl69asxjZWXFkydPTPY7bdo0smXLRvbs2Y1prq6ufPDBB6xatYp27dqxf//+JMt18uRJs2DK8ePHzerq2LFjBAYGGlsQnThxwmw7Hx8f+vbty4EDB/D09OTo0aMWj2lra0t4eDh+fn5miyFQFBoaSt26dWnUqBGjRo1i9OjRREVFGffx008/8eWXX9KzZ09Kly5Njhw5+OOPP4xlsrW1JS4ujsjIyCTP287OjoCAgCTrxrCf2NhYi2XNlCkTgDEI9O+//zJs2DCGDx+Ok5MTAE5OTmbPW1xcHDNnzjQGn44fP46rqyt58uQB4PLly/j7+5uUY9myZRQuXNgY9Dl58qTFlmDR0dHGOnBxcTF7vSxfvpxHjx7RtGlTQHseLT3XSQXYhMgIEigSQgghhBBCvDS9Xk/Hjh1Zu3YtS5YsIUeOHAQFBZks8fHxDB06lJMnT9K1a1eOHz/OpUuXWLhwIWXLliVv3rz8/PPPgNYCyFKLHtACRcWLFzdpuZFYwlYaoH1Bt7a2plChQsbH77zzDp6enibbJGzFU6FCBdavX8+6deu4fPkyQ4cOZfXq1cb9/vXXXwwfPpxDhw5x7do1fvvtNzZt2pRs9yq9Xs/58+f577//CAsLMx43cfAgYfnj4uI4ffq0MVDx448/Mm/ePE6ePMnly5eZNGkSISEh1KlTx+Ix69Wrx19//cWYMWO4ePEip0+fZsWKFUyfPh3QWlE1bdqUPHnyMGfOHPr27UtUVBRz58417sPDw4OtW7dy6dIl9u7dS7Nmzbh165ax3O+++y5ubm706tWLs2fPcubMGb799lsuXrxoPG/DcxoUFERMTIzFstavX5/Zs2ezYsUKrl69yj///MPMmTPZuHGjMY+zszO+vr4MHDgQKysrunXrZlzn4OBAtWrVGD9+PKdPn+b48eO0aNECGxsbhg4daqzbokWLotPpAMiaNSvnz58nPDyc2NhYJk6cyNq1a00CQ3q9nuDgYC5fvkxQUJBxPzlz5jQG2+rVq8fBgwdZvHgx165dY9GiRfTu3ZuZM2fi4eFBXFwcZ86cSfa5FuJV8MZ3PdPr9fz333+4uLgYPwiEEEII8epRSvHo0SO8vb2T/fIn0p7cP4kXceTIEZYtWwZoX/YT0+l03Lhxg6JFi7J27Vq+/vprKlasiE6no0CBAvTp04cuXbqg1+sJDw/n4sWLPHr0iIIFCxIeHm52rIoVK5qlJ3T06FF69OhhzPP3339ToEABoqOjiY6O5tChQxQuXNhkHwcOHMDf35+oqCiioqJo3749hw8fpn379tjb29OkSRMqVKhAQEAA4eHhhIeHs337dmbOnElMTAx+fn6MHz+e+vXrJ1m2L7/8khEjRjB+/Hj69u3L2LFjOX78OB07djTZ5p9//qFt27aEh4dz5swZoqOjyZcvH+Hh4URERPDjjz8yaNAgbG1tKV68OKtXr8bLy8vicUuWLMl3333HjBkzmDBhAo6OjhQqVIj+/fsTFhZGly5dCAsLY/ny5Tx+/BiAfv36MX78eFq3bo2LiwuTJ0+mT58+FClShCJFijB48GC2b9+On58f4eHh2NrasmLFCoYNG0aZMmWwtbWlTJkyNGjQgPDwcIoWLUrjxo2pXr06jx8/5uDBgxZbF/Xp04dHjx7x+eefExQURNasWSlTpgxVqlQxObeAgAD++OMPFixYYNaKadasWQwcOJDy5ctjb29PgwYN2LBhA6C1LDt8+DCBgYHG/bVp04bff/+dXLly4e3tTZcuXfDz8zN57bVq1YpNmzYRGBiIi4sLV65cMXsNFStWjKlTpzJ8+HCCgoLw9/dn7ty5NGzY0OLzCFpLrmvXrhnrUYjnlRb3TzqVuM3nG+bWrVsm/UiFEEII8Wq7efMmOXPmzOhivNXk/kkIIYR4vaTm/dMb36LIMCDZzZs3cXV1TbX9Gpoeenp6yq+eaUjqOX1IPac9qeP0IfWcPtKqnsPDw/Hx8TEZTFRkDLl/er1JPac9qeP0IfWcPqSe015a1nFa3D+98YEiQ3NpV1fXVL/RiYqKwtXVVd5MaUjqOX1IPac9qeP0IfWcPtK6nqWrU8aT+6fXm9Rz2pM6Th9Sz+lD6jntpUcdp+b9k7wKhBBCCCGEEEIIIQQggSIhhBBCCCGEEEII8X8SKBJCCCGEEEIIIYQQwFswRpEQQjxLfHw8sbGxaXoMvV5PbGwsUVFR0vc7DUk9p48XrWdbW1usra3TsGRCCCGEEOJlSaBICPHWUkoRFBREaGhouhxLr9fz6NEjGag3DUk9p4+XqWd3d3eyZ88uz48QQgghxCtKAkVCiLeWIUjk5eWFo6Njmn5xVUoRFxeHjY2NfEFOQ1LP6eNF6lkpRWRkJPfu3QMgR44caVlE8QpSSnE/8j43H93EytkKTydPeZ8KIYQQryAJFAkh3krx8fHGIJGHh0eaH08CGOlD6jl9vGg9Z8qUCYB79+7h5eUl3dDeEqFRoSw8vpBZh2ZxOeSyMT1f5nz0LduXTsU74e7gnnEFFEIIIYQJGcDhOUXFRbH4xGJazKlC69F5aTGnCotPLCYqLiqjiyaEeA6GMYkcHR0zuCRCvF0M77m0HhdMvBq2XNpCzmk5+XTLp1wJuWKy7krIFT7d8ik5p+Vky6UtGVRCIYQQQiQmgaLnsP78erynetNxbUfWPdjPnmxPWPdgPx3XdsR7qjcbzm/I6CIKIZ6TtDoRIn3Je+7tseXSFhosa8CT2Ceo//9LyJD2JPYJDZY1kGCREEII8YrI0EDRnj17aNSoEd7e3uh0OtauXWuyXinF8OHDyZEjB5kyZaJWrVpcvHgxQ8q6/vx6mq5oSmhUKAD6/9/nGv4PjQqlyYomrD+/PkPKJ4QQQoi3w+tw/xQaFUrzlc21gc/RJ5tXjx6lFM1XNjfeZwkhhBAi42RooOjx48cUK1aMOXPmWFw/adIkZs6cybfffsvff/+Nk5MTderUISoqfbt5RcVF0XltZwCzX8MMDOmd13aWbmhCCJEOFixYgLu7e0YX45VhKWDwInLnzs2MGTNeej8i7bwO908Ljy8kMjbymUEiAz16ImMjWXRiURqXTAghhBDPkqGDWderV4969epZXKeUYsaMGXz11Vc0adIEgEWLFpEtWzbWrl1LmzZt0q2cv57+lZCokGfmUyhCokL47cxvtC/aPh1KJoR42zyr286IESMYOXJk+hQGuHTpEuPGjWPbtm0EBwfj7e1NuXLlGDhwIKVLl0614+TOnZv+/fvTv39/Y1rr1q2pX79+qh0jI61Zs4aJEydy9uxZ9Ho9uXLl4r333kvTgM2CBQvo378/oaGhJumHDx/GyckpzY4rXl563j/Fx8cTHx9vlq7T6bCysjLJl7AMsw7Neq7jGMz8eyZ9y/Y1ftZZOnZCCQdEfxXy6vVa66jUyGtlZWWsh+fNa3je4uPjzbZ7mf2+TnmVUuj1SQcqE76GXyRvwnpOWKaX3e/rkheSf288T15I+j1nqZ5ftff96/gZkThvwnrW6XSv1Hv5df2MSMxQx3q9PtXfn896Db6IV3bWs6tXrxIUFEStWrWMaW5ubrz77rscOHAgyRud6OhooqOjjY/Dw8MBjB/oL2LNuTVY6azQq2dvb6WzYvXZ1bQt3PaFjiVMGd7sL/rciZR5G+vZcM6GJT0YjvMyx/vvv/+Mf//yyy+MGDGCc+fOGdOcnZ1NjhMfH4+NTdp81B85coRatWpRuHBhvv32W/z9/Xn06BHr1q1j4MCB7N69O1WPl/i5cnBwwMHBwaw+U6Oe09OOHTto3bo1Y8eOpXHjxuh0Os6cOcO2bdue+xye5/WcVD1lzZrVYnpKt09pGS1dl9+mz6C0ktr3T/v377cYOMySJQtFihQxPt63b5/x+QuLDTOZ3SylFIrLIZe5//g+Ho7abJSHDh1KsiWUo6MjZcqUMT4+cuQIkZGRFvM6ODjw7rvvGh8fO3aMR48eWcxra2tLhQoVjI///fdfs4CqgZWVFZUrVzY+PnnyJA8fPrR8gkDVqlWNf58+fZr79+8nmbdSpUrGL43nzp3j7t27SeYtX748dnZ2AFy8eJHbt2/z+PFjnJyczH5gePfdd3FwcADg8uXL3Lp1K8n9li5d2vj8X7t2jevXryeZt0SJEri6ugJw8+ZNrly5kmTeYsWKGVuE3r59m0uXLiWZt3DhwsbZSYOCgjh//nySeQMCAvDy8gK02RXPnj2bZN6CBQuSPXt2AB48eMCpU6eSzOvn58c777wDQGhoKCdOnEApZbGO8+bNi4+PD6C9j44dO5bkfn19fcmdOzegtRQ8cuRIknlz5sxJvnz5AIiKiuLvv/9OMq+3tzf58+cHICYmhgMHDiSZN1u2bPj7+wPal819+/YlmTdr1qwUKlTI+HjPnj1J5k3uMyIxd3d3ihUrZnx84MAB44QHievZxcWFkiVLGvPKZ8SLfUYkvJ8E03ouV66cfEbw8p8RiRnquHDhwvj6+gKp9xnx+PHjJPfxol7ZQFFQUBCgfXgllC1bNuM6SyZMmMCoUaPM0oODg1+4yXVQWFCKgkQAeqUnKCyIe/fuvdCxhCm9Xk9YWBhKKZNfJUTqehvrOTY2Fr1eT1xcHHFxcWl+PEPQBl5uMF/Dl3gAFxcXdDqdMe3PP//kvffeY/369YwYMYJTp07x+++/s2jRIkJDQ1m1apVx24EDB3LixAm2b98OaK+ByZMn89NPPxEUFET+/PkZOnQozZs3T/J8OnfujJ+fHzt37jR53RQuXJjevXsb6/XkyZMMHDiQgwcP4ujoSLNmzZg8eTLOzs4AdO3aldDQUCpWrMiMGTOIiYmhVatWTJ06FVtbW2rVqsX169cZMGAAAwYMALQb30WLFjFw4ECCg4MBGD16NOvXr6dfv36MGTOGkJAQ6tSpw7fffouLiwsA+fPnp2/fvvTr189Y3tKlS9O4cWOGDx8OwI0bN+jfvz+7du3CysqK2rVrM2PGDOP1yFDe5Opz1apVjB07lsuXL+Po6Ejx4sVZtWqVxS/c69evp0KFCnz66afGtLx589KwYUOT1+Z3333H9OnTuXnzJrlz52bIkCG0b2/aejU+Pp64uDjja+HevXvGG63jx49TtmxZLly4wPXr1+nSpQuA8bn76quvGD58uFkdWaqP6dOnG193Y8aMYf369fTv359Ro0ZZrPeE4uLi0Ov1PHjwAFtbW5N1Sd2Ui5RL7funpL5U2djYmNzrPH782Pgl8EH0g+cud0Kjvr5LjZLWFC0ay6NHj4iJibGYLz4+3qQMjx49SvJeLzY21iRveHh4is8tPDw8yZtwKyurFOcFnjuv4UtgWFhYsnmDg4ON76fQ0FAeP35srIvE15zg4GDs7e1N8ibl/v37xvXPyvvgwQPjMUNCQp6Z1/C8piSv4fr58OHDZPMm/AKekryGz79nnVtISIixfh89esTjx49RSlms45CQEGP9Pn78ONn9hoaGGl8TT548SXHe6OjoFOeNjY1NNm9YWJgxb3x8fLJ57ezszN73SUnuMyKxxO+jiIgI4/UvcT0rpcze9/IZ8WKfEQklrGf5jHi6/mU+IxIz1HFISAiZMmUCUu8zIqnX6st4ZQNFL2rIkCHGLxKgvcF8fHzw9PQ0RjCfV3a37FgFpbxFUXa37MZIpXg5er0enU6Hp6fnWxPAyAhvYz1HRUXx6NEjbGxs0qzFjSWJvxi/DMNzZSi/4Wbhq6++YvLkyeTNm5fMmTOzZMkSrKysTM7T0KzYkDZu3DiWLl3KvHnzyJ8/P3v27KFz585kz57d5Nctg2PHjnHmzBmWLl1q/IUqIUMQ4fHjxzRs2JDy5ctz6NAh7t27R7du3fj000+ZP3++8Tz+/PNPvL292blzJ5cuXaJNmzaUKFGCbt26sXr1aooXL063bt3o1q2b8ZwTn7+VlRVXrlxh48aNbNiwgZCQEFq3bs2UKVMYN26cSb0lrgtDml6vp0WLFjg7O7N7927i4uLo06cP7du3Z9euXcbtk6vPO3fu0KFDByZOnEizZs149OgRe/fuxdra2uJrLUeOHKxYsYJz585RuHBhi8/1mjVrGDBgANOnT6dWrVps3LiRbt264evrS/Xq1Y35DMcwvBYSvr4T/l+5cmWmT59u0iLN2dnZpC6Tq48OHTqwdetWbG1tU1zvBobnzsPDw/iLpUHixyL9JHX/VLNmTYv3T4m7lbz33nvGv+9H3oekf3h9pjnTcjDnSRZ0OkVgYA1Kl4bSpRVlykDhwpDwIydhNw1Ln1UJJcxr+PU5tfNmzZo1xd1KnpU3YReJ580bHx9PcHCwxev6y+w3YQuRZ+VN2PLkWXkDAgJSnLdgwYIpyuvp6WlsVWNJwtewp6ensbVOSvLmzp0bvV5vsY4TdxXJlStXivarlCJnzpwpzuvt7Z3ivIZWESnJW6dOnRTlBZ4rb8LPCEsSvjdq1qxp/NtSPSfMm/Aa+Kz9ymdE0nkT1rONjY18RvDynxGJGerYy8vLeL+VWp8RhlbAqemVDRQZPtDu3r1Ljhw5jOl3796lePHiSW5nb29vjIAmZLixfxHN/Jux5tyaFOXVKz3vB7z/1nzZTg+GN4jUadp62+rZcKFI2A8boHRpSOZH95egAEOQyPTX3ezZIZnW5kkylDvx/6NHj6Z27dpJ5k+8TXR0NBMmTGD79u2UL18egHz58rF//36+//57qlWrZrYvQxPggICAZFtILV++nKioKBYtWmRsTTN79mwaNWrExIkTja0eMmfOzJw5c7C2tiYgIIAGDRqwc+dOunfvjoeHB9bW1ri6uppcDyydv16v56effiJz5szodDo6dOjAzp07zc49cZkNaTt37uTkyZNcvXrV2HVg0aJFFCpUiCNHjpg0YU+qPoOCgoiLi6N58+bGpsVFixZNso769evHvn37KFq0KL6+vpQrV47atWvTrl074/Vs6tSpdO7cmd69ewNac+i///6bqVOnUqNGDbPzSFgeS/Vkb2+Pu7s7Op3OpE6fpz7++ecfypUrZ6z3BQsWGFsQWar3xPu29Hnztnz+pKXUvn+ytbVNUYA74XOX3TU7+TLn40rIlSQnAbFIr4PQvPAkCwBK6Th92obTp2HhQkM5oXhxKFNGW8qWhQIFwMrq+V4/b3peQ7DXEMzNiDJkdF4w/dKd2nn1en2K6jgty5DRedPjeX5WPb8Kr7U3IW9S9fyqljc18kL6vo8MdZzwx87UKkNq/hBt8MoGivLkyUP27NnZsWOH8cYmPDycv//+m169eqVrWVoGtuCTlR8SahuPSqbHiE6Be6w1LQIsd9UQQrz6goLg9u202POLdzd7Xs87iPSlS5eIjIw0+7UvJiaGEiVKWNwmpePSnD17lmLFipl0uapYsSJ6vZ7z588bA0WFChUyufjlyJGDkydPPtd5gDbodcLuTjly5HiursBnz57Fx8fHGBQBCAwMxN3dnbNnz5oEipJSrFgxatasSZEiRahTpw61a9emRYsWZM6c2WJ+JycnNm3axOXLl9m1axcHDx5k4MCBfPPNNxw4cABHR0fOnj1L9+7dTbarWLEi33zzTYrP7UU8qz7KlSsHvHy9i9TzKtw/6XQ6+pbty6dbPn125oTbWUFzn35k663j3Dm4eBFu3YKEvVWio+Hvv7XFwNUVSpV6GjwqUwZy5YKX6OUrhBBCvNUyNFAUERFhMjDV1atXOX78OFmyZCFXrlz079+fsWPHkj9/fvLkycOwYcPw9vamadOm6VpOh517WPhbPE0+0IJBSQWLFLDwt3gcquyBZJpiCiFeXcm0zn5JCQMr5i2KUlPicXCsrKzMAjuGQSJB+ywG2LRpk3EgPgNLLQwAChQoAGiDJyYVTHoeiX8JMbRSSe39PKsuUuJZ+7C2tmbbtm389ddfbN26lVmzZvHll1/y999/kydPniT3my9fPvLly8dHH33El19+SYECBfjll1/48MMPn6t8hjKCaUDvec/zeaTW8ydS5nW4f+pUvBNf7vySJ7FP0PPs14IOK+ytMvF+vo44F3x6GxUTA1euwNmzcO4cXL4MiWOQ4eGwa5e2GHh6mgaOypQBGRVACCGESJkMDRQdOXLEpF+poW98p06dWLBgAZ999hmPHz+me/fuhIaGUqlSJTZv3py+YxgoBcOG0eiSFWtX6OncFEIygZUe9FZP/wdAB7ZKB8OGQe3a8lOWEK+hF+kClhJKaYP4av2+0+YYSfH09DSbqeH48ePGL/eBgYHY29tz48aNZ/bfNyhevDiBgYFMnTqV1q1bmzX1DQ0Nxd3dnYCAABYsWGCcSQO0WZSsrKyS7UOemJ2dXapM/enp6cmdO3eMj8PDw7l69arxcUBAADdv3uTmzZvGVjRnzpwhNDSUwMBA4z6Sq0/QAiUVK1akYsWKDB8+HF9fX+M4QymRO3duHB0djYMWBgQEsH//fjp16mTMs3//fmOZLJ0nwJ07d4wtmY4fP26SJyV1mpL6EOnvdbh/cndwZ1WrVTRY1gArZZVssEiHFTp0DMm7Gmcbd5N1dnbg768tBhERWmujs2fh/HktkBQWZrrP4GD4/XdtMciVS+tebAgclSoF7qaHE0IIIQQZHCiqVq1ast0XdDodo0ePZvTo0elYqkRiYuDGDdDraXwe/psKvwXCGn94mAmyPIFm5+CvnDCvLHRprDj523U8YmK0jvRCCJHBatSoweTJk1m0aBHly5dnyZIlnDp1ytgSyMXFhUGDBvHpp5+i1+upVKkSYWFh7N+/H1dXV5PghIFOp2P+/PnUqlWLypUr8+WXX+Lv709ERAQbNmxg69at/Pnnn7Rr144RI0bQqVMnRo4cSXBwMH379qVDhw5mszIlJ3fu3OzZs4c2bdpgb29vMvvb89bFggULaNSoEe7u7gwfPtyky1utWrUoUqQI7dq1Y8aMGcTFxfHxxx9TtWpVY5e+Z9Xn33//zY4dO6hduzZeXl78/fffBAcHJzkQ48iRI4mMjKR+/fr4+voSGhrKzJkziY2NNXYHHDx4MK1ataJEiRLUqlWLDRs2sHr1auMsa4n5+fnh4+PDyJEjGTduHBcuXGDq1KlmdRoREcGOHTsoVqwYjo6OODo6muRJrj5KlSr1Qs+BeHmvxf0TUMevDpvabqL5yuZExmozsiQcs0j3/9aV9laZGJJ3NSVczcdWs8TZGUqU0BaDBw/gwgUteHThAly7BokngblxQ1tWr36alj//0+BR6dLaPv8/IaMQQgjx1nplxyh6Zdjbw+HD2k9TgAPQHmir1/Pw4UOyZMnC2X/jKdvzfXbl+Y9znvDRsLKstrNLxxFJhBAiaXXq1GHYsGF89tlnREVF0aVLFzp27GgyBtCYMWPw9PRkwoQJXLlyBXd3d0qWLMnQoUOT3G/ZsmU5cuQI48aNo1u3bty/f58cOXJQoUIFZsyYAYCjoyNbtmzhk08+oUyZMjg6OtK8eXOmTZv2XOcwevRoevToQb58+YiOjk7xGEmJDRkyhKtXr9KwYUPc3NwYM2aMSYsinU7HunXr6Nu3L1WqVMHKyoq6desya9YsY55n1aerqyt79uxhxowZhIeH4+vry9SpU6lXr57FMlWtWpU5c+bQsWNH7t69S+bMmSlRogRbt241trpq2rQp33zzDVOmTOGTTz4hT548zJ8/3+JA46B1BVu+fDm9evWiaNGilClThrFjx9KyZUtjngoVKtCzZ09at27NgwcPGDFiBCNHjjTZT1L1MXPmzBepfvEWquNXh1sDbrHoxCJm/j2TyyGXjeuy2eWlkVc/anh0wsna7aWO4+EB5ctrC2itOIOCtBZHZ8/CpUtw/br2+19CFy9qy/Ll2mOdDgIC+P9Ma9pSrBgkiqEKIYQQbzSdetG77ddEeHg4bm5uhIWFWZze9UXp9Xru3buHl5cXVlZWfFPpVypebUX5rhBnDUuaLaFd0Xapdry3VeJ6FmnjbaznqKgorl69Sp48edKlO4ZSKkHXMwkjpxWp5/TxMvWc3Hsvra7Z4vmlxXOhlCIo7D4r1lzDVp+bXJ5Z0/V9Gh+vTVZw/rw23tGlS3DzJsTFJb+dtTUUKqQFjUqV0v4vWhTScySE5/U2XtfTm9Rx+pB6Th9Sz2kvLes4La7Z0qIolTRZ3IJ7+coy/M9DDK8BvTb2popvFXzcfJ69sRBCCCHEG06n0+Hh6IGnbTxxcR7pHsy1ttbGKcqVCwyTPMbFaS2NLlx4Gjy6fdt0prX4ePj3X235+WctzcYGChc2DR4VKSKjDgghhHgzSKAoleTOo2Nzm8kM+aUqGwvAoZxhdF7XmW0dtmGlk6isEEIIIcSrxsYG8uXTFkPv0JgYLXh07pzW+ujyZbhzxzR4FBcHx49ry48/amm2tlrwyBA4KlVKgkdCCCFeTxIoSkUfzKvCtt8as2T1eor0tGbn1Z3M/Hsm/cv1z+iiCSGEEEKIFLCz0wa5zp8fGjXS0qKj4epVLXh04YIWPAoK0sZCMoiNhWPHtMVS8KhkSe3/V73bmhBCCCGBolTk5gYPB08g7/iNzNgaT6+G8MX2L6idrzaBnjKNsBBCCCHE68jeHvz9tcUgKgquXNFaHZ0/rwWSkgseGdjYaGMeJQweFSsGmTKl3/kIIYQQyZFAUSprNTKQVXO70uPIDyws6M7B/KG0X92egx8dxM7aLqOLJ4QQQgghUoGDAwQGaotBSoJHcXFw4oS2GMY8srbWZlszBI5KloTixcHZOV1PSQghhAAkUJTqbG3Ba85IItstZfW6UPL0duZY0DFG7R7FuJrjMrp4QgghhBAijTwreHThghY8unPHNHgUHw+nTmnLokVamk4HBQtqQSPDUqIEuLun6ykJIYR4C0mgKA1Ua+vNipEDaXNxDJPXO9OvdQRf7/+aBgUaUMGnQkYXTwghhBBCpBNLwSPDmEfnz8PFi1og6b//TAfMVkobE+ncOVi27Gl63rxawChh8ChbtvQ7HyGEEG8+CRSlkVIrBnOv1Lf0PRvEnH/Lcr7oITqs6cCJnidwtpN2xEIIIYQQbytLYx4lnG3t0iUteHTrltbaKKErV7Rl1aqnad7eT4NGJUpo3dZkwGwhhBAvSgJFaSR/SRfW1BxBsx192LDpCiX8fbgScoWBWwbyXaPvMrp4QgghhBDiFZJwtjWD2Fi4eVPrsnbxojbb2q1bWlApof/+05aNGw0pVri7e1GypM4YPCpZEgoU0MZDEkIIIZJjldEFeJPVWNGdy9b5yR99nxbLqqNDx/dHv2fjhY3P3lgI8XrZvl3rV7B9e0aX5LlUq1aN/v37p/lxHjx4gJeXF9euXUvV/aZW+b/44gv69u378gUSQohUZGurdTWrWxf69oUZM+CXX2D2bPjkE6hfXxvHyFLrodBQK3bu1DF1KrRvr12iXF2hfHn4+GP4/ns4fFgbQ0kIIYRISAJFacgtqy1Xu00AYM613/A53x2Aruu7Evw4OCOLJoRITUrB0KFw9qz2f8IRStNA586d0el0fP311ybpa9euRafTPde+Vq9ezZgxY1KzeBaNGzeOJk2akDt3bgCuXbuGTqezuBw8eDDNy5PYoEGDWLhwIVeuXHmp/WzevBmdTkdQUJBJeo4cOYznbmCogx07dgBa0MtQBw4ODhQoUIAJEyagLLyeFi5cSJkyZXB0dMTFxYWqVauycaPpjxC7d+9Gp9NRqFAh4hP1XXF3d2fBggUmaceOHaN169bkyJEDe3t7fH19adiwIRs2bLBYBiFExrC2hly5oGZN6NkTJk/Wgkfffw+ffQZNm0KRIgpX13izbSMj4eBBmDcPevSAsmW1mdWKFIEOHWDaNNi5Ex4+TP/zEkII8eqQQFEaqz7rff51LIcTkQz+NRZvm0Lce3yP7hu7y423EG+KrVu1n2VB+3/r1jQ/pIODAxMnTiQkJOSl9pMlSxZcXFxSqVSWRUZG8tNPP9G1a1ezddu3b+fOnTsmS6lSpdK0PAnFx8ej1+vJmjUrderUYd68eS+1v0qVKmFjY8Pu3buNaWfPnuXJkyeEhISYtKjatWsX9vb2VKxY0ZjWrVs37ty5w/nz5xkyZAjDhw/n22+/NTnGoEGD6NGjB61bt+bff//l0KFDVKpUiSZNmjB79myzMl25coVFhmmUkrBu3TrKlStHREQECxcu5OzZs2zevJlmzZrx1VdfERYW9mIVIoRIFzodZM8OlSpBly4wdqxi8eJgFizQM2wYtG4NpUtD1qzm2xpmXFuyBAYO1AJQHh7g66sFnUaOhLVrtfGT5NZVCCHeDhIoSmPWNjrUpMkA9IxbSNaFo7G1smXtubUsPLEwg0snhHhpSsGwYU8HfbC21h6n8d10rVq1yJ49OxMmTEgyz4MHD/jggw945513cHR0pEiRIixfvtwkT8KuW0OHDuXdd98120+xYsUYPXq08fGPP/5IQEAADg4O+Pv7M3fu3GTL+vvvv2Nvb0+5cuXM1nl4eJA9e3aTxdbWFtBaTjVt2tQkf//+/alWrVqSx4qOjubzzz8nZ86cODk58e6775oEbRYsWIC7uzvr168nMDAQe3t7bty4AUCjRo1YsWJFsufyLM7OzpQpU8bkmLt376ZSpUpUrFjRLL1cuXI4JOgz4ujoSPbs2fH19eXDDz+kaNGibNu2zbj+4MGDTJ06lcmTJzNo0CD8/PwICAhg3Lhx9O/fnwEDBnDz5k2TMvXt25cRI0YQHR1tscyPHz+ma9euNGjQgE2bNlG7dm3y5s1LQEAAXbt25cSJE7i5ub1UvQghMkbmzFCmDLRrB8OHw88/w/LlMH48dO4MFSvCO++AlYVvBDduwLp1MGoUNGsGuXNrAaQaNWDAAFi0CE6cMB8vSQghxOtPAkXpoFjvShx+pwk2xDPq4iLejdS+cPX7ox/XQq9lbOGEEE8pBY8fP9+yfr3WisjQtSc+Xnu8fn3K9/ECQSVra2vGjx/PrFmzuHXrlsU8UVFRlCpVik2bNnHq1Cm6d+9Ohw4dOHTokMX87dq149ChQ1y+fNmYdvr0af7991/atm0LwNKlSxk+fDjjxo3j7NmzjB8/nmHDhrFwYdKB771796ZbK6E+ffpw8OBBli9fzr///kvLli2pW7cuFy9eNOaJjIxk4sSJ/Pjjj5w+fRovLy8AypYty61bt156HKXq1auza9cu4+Ndu3ZRrVo1qlatapK+e/duqlevbnEfSin27t3LuXPnsLOzM6YvX74cZ2dnevToYbbNwIEDiY2NZVXCqZDQgmtxcXHMmjXL4rG2bt3KgwcP+Oyzz5I8p+ft0iiEeHU5OUHhwvD++/D551o3tF9/henTtbGL3nsP/Py0mdkSCwmBXbu0vJ06abOrOTtrg2V/+KE2htLu3Vo+IYQQry+Z9SydvLPoa+JqbqQp65g+pT8lZ1Tk6P39dFzTkV2ddmFtJVNQCJHhIiO1O97UkKgljA6wTSpvRIR25/6cmjVrRvHixRkxYgQ//fST2fp33nmHQYMGGR/37duXLVu2sHLlSsqWLWuWv1ChQhQrVoxly5YxbNgwQAsMvfvuu/j5+QEwYsQIpk6dyvvvvw9Anjx5OHPmDN999x2dOnWyWM7r16/j7e1tcV2FChWwSvRTdkRERArO3tyNGzdYsGABly9fJleuXOh0OgYNGsTmzZuZP38+48ePByA2Npa5c+dSrFgxk+0NZbx+/brZeELPo3r16owfP547d+6QI0cO/vzzTwYPHkxcXJyxa9uVK1e4ceOGWaBo7ty5/Pjjj8TExBAbG4uDgwP9+vUzrr9w4QL58uUzCR4lLL+rqysXLlwwSXd0dGTEiBEMHTqUbt26mbUOMuQvWLCgMe3w4cMmZVuxYgUNGzZ8wRoRQrzqbG0hXz5tqVtXS9PrISgILl3SlsuXtVZGiXuixsbC8ePaklCuXFogqVixp//nyWO59ZIQQohXiwSK0ol3DX+OlPqI0v98x8S4Lxj4+1KcKxVn7429TDswjcEVB2d0EYUQr6GJEydSo0YNk4CQQXx8POPHj2flypXcvn2bmJgYoqOjcXR0THJ/7dq14+eff2bYsGEopVi+fDkDBgwAtC5Kly9fpmvXrnTr1s24TVxcXLJdk548eWLSvSqhX375hYCAgJSebrJOnjxJfHw8hQoVMkmPjo7Gw8PD+NjOzo6iRYuabZ8pUyZAa3Fkyfjx443BJoAzZ86QK1cus3wVKlTAzs6O3bt3U6xYMZ48eULJkiXR6/UEBwdz9epVdu/eTaZMmcy647Vr144vv/ySkJAQRowYQYUKFahQoYJJnhcZ365r165MnTqViRMnmpxDUooWLcrx/3/ry58/P3Fxcc99TCHE683KCry9taVKlafpoaFw5QpcvKgFj65fh7t3tcBSQjduaMv69U/TXFygaFEtaGQIIBUuDMlcloQQQmQACRSlo0IrRxDpt5hy6m9y/H6MIk1m8N2dj/hq11fU8atD0WzmX1yEEOnI0VFr3ZMSSkHVqtoADfHmM8tgba3dBf/5J+h0KKWIi4vDxsbGvBvPS9whV6lShTp16jBkyBA6d+5ssm7y5Ml88803zJgxgyJFiuDk5ET//v2JSWZAiQ8++IDPP/+co0eP8uTJE27evEnr1q2Bpy19fvjhB7OxjKytk24VmTVr1iQH3fbx8TG2VkrMysrKLCgSGxub5HEiIiKwtrbm4MGD2Nvbm9Szc4KWYpkyZbLYlerh/6f58fT0tLj/nj170qpVK+PjpFpJOTo6UrZsWXbt2sXDhw+pVKkS1tbWWFtbU6FCBXbt2sWuXbuoWLGiWcsgNzc3Y32sXLkSPz8/ypUrR61atQAoUKAA+/btIyYmxmzb//77j/DwcAoUKGBWJhsbG8aNG0fnzp3p06ePybr8+fMDcP78eWPgyt7ePsnnRQjxdnN3h5IltcUgOloLCl26pAWQrl2Dmze19IQePYL9+7XFQKeD/PmfBo8MS86c2johhBDpTwJF6ShT3hycbzWIgr+MZgJDqDXqNPUnruf3y+tpv7o9h7sdxt7GQodwIUT60OlS3gVsyxY4ejTp9fHx2vr9+6FOHS2wFBcHNjapfuf79ddfU7x4cZOuQwD79++nSZMmtG/fHgC9Xs+FCxcIDAxMcl85c+akatWqLF26lCdPnvDee+8Zx/DJli0b3t7eXLlyhXbt2qW4fCVKlGDJkiXPfV6enp6cOnXKJO348ePGwa4tHSc+Pp7g4GDjVPPP49SpU9ja2pq1SDLIkiULWbJkSdG+qlevzooVKwgJCTEZfLtKlSrs3r2bP//8k549eya7D2dnZz755BMGDRrEsWPH0Ol0tGnThpkzZ/Ldd9/Rt29fk/xTpkzB1taW5s2bW9xfy5YtmTx5MqNGjTJJr127NlmyZGHixImsWbMmRecnhBAJ2dtrwZ78+aFePS1Nr4d79552XbtyRQsm/T8mb6QUXLigLb/++jQ9c2bT1kdFi0KhQvD/xp9CCCHSkASK0lmB7wcRsuZb8sdcouF/P6A7+QOeWQ9w8t5Jhu0axqT3JmV0EYUQz5JwpjNLrYkMDDOg1a6dpsUpUqQI7dq1Y+bMmSbp+fPn57fffuOvv/4ic+bMTJs2jbt37yYbKAKt+9OIESOIiYlh+vTpJutGjRpFv379cHNzo27dukRHR3PkyBFCQkKMXdQSM7R4CgkJIXPmzCbrHjx4QFBQkEmau7s7Dg4O1KhRg8mTJ7No0SLKly/PkiVLOHXqFCVKlLB4nAIFCtCuXTu6dOnClClTKFmyJMHBwezYsYOiRYvSoEGDZM977969VK5c2dgF7WVUr16dMWPGEBQUZNItsGrVqkyePJlHjx4lOZB1Qj169GDMmDGsWrWKFi1aUL58eT755BMGDx5MTEwMTZs2JTY2liVLlhhbj/n4+CS5v6+//po6deqYpDk7O/Pjjz/SunVrGjRoQL9+/cifPz8RERFs3rwZSL7FmBBCWGJlBdmza0ulSk/TIyLg6tWn4x5duwb//af9lpJQSIjWKPfPP033WaDA0wCS4X9pfSSEEKlLhpNLZzpXF+K+HAnACEaxbKoDw0v8AMCUv6aw5/qeDCydECJFtm41neksKYYZ0LZuTfMijR49Gn2iASK++uorSpYsSZ06dahWrRrZs2c3m27ekhYtWvDgwQMiIyPN8n/00Uf8+OOPzJ8/nyJFilC1alUWLFhAnjx5ktxfkSJFKFmyJCtXrjRbV6tWLXLkyGGyrF27FtACTMOGDeOzzz6jTJkyPHr0iI4dOyZb9p9//pl27doxaNAgChYsSNOmTTl8+LDFsYQSW7FihcnYSy+jfPny2Nvbo5QymfHt3XffJTY2FmdnZ8qUKfPM/WTJkoWOHTsycuRI4/M7Y8YM5s6dy/LlyylcuDClS5dmz549rF271qyVUWI1atSgRo0aZmMONWvWjL/++gtHR0c6duxIwYIFqVGjBjt37pSBrIUQqcrZGYoUgWbNYNAgmD1ba0k0ezYMGACNGmnjFrm6mm+r18O5c7ByJXz5pZY3Vy7IkkXrDd63L/zwA/z9tzapqBBCiBejUy8yKuZrJDw8HDc3N8LCwnC1dMV5QXq9nnv37uHl5WU2Y88zxcZyP0dhsj64wBi+4s+aY8jVtyvzj/+Mr5sv//b6F1f71Cvr6+yl6lmk2NtYz1FRUVy9epU8efIkOdCyRUrBu+/CP/+Yj9xpiZUVlCqFOniQuPh4y2MUvQU2bdrE4MGDOXXqVJq+xpIdCyoZf/zxBwMHDuTff//FxkYa2z7Li9YzJP/eS6trtnh+afVcxMToWbnyHnFxXnh4vB3Xm4yglB64B3ih072+9RwWZtr66MYNrfXRs36nAa2FUd68Wqsjw1KkiJaWGo0k38Z7p4wg9Zw+pJ7TXlrWcVpcs+VuOCPY2uIyawK0bc4ApjFvRy86dJ3BbvddXA29yiebP2F+k/kZXUohhCUxMdqdakqCRKDlu3lT2+4t7r7ToEEDLl68yO3bt5PtGpVRHj9+zPz58yVIJIQQrxA3N21mtOLFn6bFxWnBosuXtQDS1avaZTYszHRbpbQ8ly9DwuHXHB21FktFijwNHhUpAlmzpscZCSHE60HuiDOIfZtmPBxVniznDzCSkXw5+Ht+3r6Iur9UYcHxBTQu0JhmAc0yuphCiMTs7bXuZMHBKd/Gy0vb7i2fYrx///4ZXYQktWjRIqOLIIQQIgVsbLTuZrlyQcKh3sLDtfGODMEhQ+ujxBN9RkbCoUPaklCOHE+DRoYlMBCep9GxEEK8KSRQlFF0OjL/OBkqV6IrPzHjdn92zK/EZ+99xsT9E+m+sTvlfcqT3Tl7RpdUCJGYj4+2PI83u5evEEIIkaFcXZ92MTOIj4e7d7UZ1y5d0gJJN27Agwfml+U7d7Ql4bCCVlbaTG6JA0h586bLKQkhRIaRQFEG0lWqSETtZjhvXcPXfEHzaes50mEUm7Nt5sTdE3y0/iM2fLDhrRzPRAghhBBCiJdhbQ3e3tqScOa1J0/g+nWt5dGVK9rft25prY0S0uvh/Hlt+e23p+mOjhAYqMPPz5XSpZ92YcuWTWZfE0K8GSRQlMGcZ04gPmA9jdUGysftYcAnVVi8bAmlfyjFpoub+PHoj3QrlTqz8AghhBBCCPG2y5QJ/P21xUApePjwafe1K1e0sY/u3DHvOR4ZCUeO6DhyxJEVK56me3hoASPDGEiFC0OhQtpYS0II8TqRQFFGK1gQ1bUb/PgtkxlMuZ0HObGtMONrjGfQtkF8uuVTauSpQb4s+TK6pEIIIYQQQryRdDot0OPhAaVKPU2Pj9eCRVeuaAGka9e0ANKDBwqlTJsPPXgAu3drS0K5cmlBo4SLv78WsBJCiFeRBIpeATZjRhC3ZDHvRh2iBb8xYEBLzpz9lI0XN7L72m46rOnAng/3YGMlT5cQQgghhBDpxdoacubUlipVnqY/eaK4efMBV69m4fJlK65fh9u3tUG1E7txQ1t+//1pmpUV5MtnGjwqVAgKFABb27Q/LyGESI5EHl4F2bNj88VgGDmSCQwhMLgJQ4fYsWDSAop+W5QDtw4waf8khlYemtElFUIIIYQQ4q3n4AD588eRP7/puESG2deuXtVaId24oQWQoqJMt9fr4eJFbVmz5mm6ra0WLDIEjgz/58unBa2EECI9SKDoVTFwIPFz5uEXfJkefMfsH/rSsaMvs+rNotPaTozYPYK6fnUpmaNkRpdUCCGEEEIIYYGl2deUgvv3nw6gbZh9LSgIYmNNt4+NhdOntSUhe3sICNCCRgmXPHm01klCCJGaJFD0qnB2xnr0SOjVi+GMZiGd6NnTlX/+6cD6gPWsOruKDms6cKTbETLZSodmIV4VUXFR/Hr6V9aeX8uDyAd4OHrQtGBTWhZqiYONQ0YX77VVpUoVevbsSdu2bTO0HGfOnKF27dqcP38eJyenDC2LEEKI15NOB56e2lK69NP0+Hi4e1cLHF25ov1/65aWFh9vuo/oaDh+XFsSypTJcgDJ11cCSEKIFycfH6+Srl1RBQviyX0+YxKnT8O0aTq+bfgt2ZyycSb4DEN3SPczIV4V68+vx3uqNx3XdmTtubX8ef1P1p5bS8e1HfGe6s2G8xvS7NhBQUH07duXvHnzYm9vj4+PD40aNWLHjh1pdsyXsWDBAtzd3VOUd/369dy9e5c2bdoAMHLkSHQ6XZLLqFGjjNs+efKEUaNGUbBgQezt7cmaNSstW7bkdKKfZkeOHEnx4sXNjn3t2jV0Oh3H/38nHhgYSLly5Zg2bdoLnbcQQgiRFGtr8PaGChWgfXv46iv49lv49VeYPRsGDYL339cG186WzXLg58kTOHoUFi+GL76ARo0gb15wcdGCUp06wcSJsHGj1h1Or0//8xRCvH6kRdGrxNYW3ddfQ7NmDGAa8+jF6NHv0Lp1Vn5u8jMNljVgxt8zaFigITXz1szo0grxVlt/fj1NVzQ1PtYrvcn/oVGhNFnRhLVt1tK4YONUPfa1a9eoWLEi7u7uTJ48mSJFihAbG8uWLVvo3bs3586de6H9xsTEYGdnZ5YeGxuLbTqOrDlz5kw+/PBDrP5/Rzxo0CB69uxplm/IkCGsXbvW2OooOjqa9957jxs3bjBlyhTKlSvH3bt3mTBhAu+++y7bt2+nXLlyz12eDz/8kG7dujFkyBBsbOSyKYQQIm3Z2GgzpeXKZTqAdmys1uLI0ALpxg3t8f37Wve2hCIj4Z9/tCUhR0etBVJgoNbyKDBQW3LnljGQhBBPSYuiV02TJlCxIo48YRQjiIqCjz+Gen716VGqBwCd13UmNCo0Y8spxFssKi6Kzms7A6BQFvMY0juv7UxUXJTFPC/q448/RqfTcejQIZo3b06BAgUoVKgQAwYM4ODBg8Z8N27coEmTJjg7O+Pq6kqrVq24e/eucb2hVc2PP/5Injx5cHDQusrpdDrmzZtH48aNcXJyYty4cQCsW7eOkiVL4uDgQN68eRk1ahRxcXHG/YWGhtKjRw+yZcuGg4MDhQsXZuPGjezevZsPP/yQsLAwYyugkSNHWjy34OBgdu7cSaNGjYxpzs7OZM+e3WTZsWMHixcvZsWKFeTPnx+AGTNmcODAAdasWUOrVq3w9fWlbNmyrFq1ioCAALp27YpKfCedAu+99x4PHz7kzz//fO5thRBCiNRia6uNSVS9OnTtCqNGwU8/aS2QZsyATz6Bxo2heHHImtV0kG0DQwDJ0AKpcWPw8wNnZyhRQmvZNG6cNsD2+fOQ4DIvhHiLyE+jrxqdDiZPhgoV+JD5TOdTtmwpxMqVMKXZFHZc3cGlh5fo83sflry/JKNLK8QbRSlFZGzkM/MtP7mckKiQZ+8PRUhUCEv/XUrrQq2Ji4vDRm+DLtGdm6Oto1laUh4+fMjmzZsZN26cxTFzDN279Hq9MUj0559/EhcXR+/evWndujW7d+825r906RKrVq1i9erVWCf4KXHkyJF8/fXXzJgxAxsbG/bu3UvHjh2ZOXMmlStX5vLly3Tv3h2AESNGoNfrqVevHo8ePWLJkiXky5ePM2fOYG1tTYUKFZgxYwbDhw/n/PnzgBb8sWTfvn04OjoSEBCQZB38888/dOvWja+//po6deoY05ctW8Z7771HsWLFTPJbWVnx6aef0q5dO06cOGGxy1ly7OzsKF68OHv37qVmTWnNKYQQ4tViZ6d1N8ub1zQ9OvppC6SrV5/OwGapBVJUlOUxkOzstFnYAgOftkQKDIT8+bUBtoUQbyYJFL2KypeH99/HevVqvuYLGrOBTz6BOnWcWdxsMRV/rsjSk0tpXLAxrQq1yujSCvHGiIyNxHmC5QDGy/how0d8tOGjJNdHDInAyS5lAyVfunQJpRT+/v7J5tuxYwcnT57k6tWr+Pj4ALBo0SIKFSrE4cOHKVOmDKB1N1u0aBGenp4m27dt25YPP/zQ+LhLly588cUXdOrUCYC8efMyZswYPvvsM0aMGMH27ds5dOgQZ8+epUCBAsY8Bm5ubuh0OrJnz55sua9fv062bNmM3c4Su3fvHs2aNaN58+YMGjTIZN2FCxeoVq2axe0MgacLFy4YA0UnT540C1gl1eLI29ub69evJ1t2IYQQ4lVibw/58mlLQtHRWsDo+nWtC9vNm1pAKTjYPIAUEwOnTmlLQtbW2n4TBpACAsDfH2TuByFefxIoelVNmADr1tEofiNV2c2fd6vxxRfw7bflGFppKGP3jqXnxp5U9KnIO67vZHRphRDpJKVdp86ePYuPj48xSATawMzu7u6cPXvWGCjy9fU1CxIBlE44LQtw4sQJ9u/fb+yGBhAfH09UVBSRkZEcP36cnDlzGoNEL+rJkyfGLnCJxcbG0qJFC7Jly8YPP/xgMc/zdC0rWLAg69evN0m7ffu2xWBTpkyZiIx8dmszIYQQ4lVnb/+0BVL16k/TY2O1ANKNG1oA6fp1+O8/uHfPfBa2+Hi4cEFb1q41XefrqwWNDIshiJQlS5qfmhAilUig6FVVoAD06AFz5zLVajBl9H/z3XdWfPABDK86nD8u/cE/d/6h6/qu/NHujxR3WxFCJM3R1pGIIRHPzNd2dVs2XthoHLg6OVY6KxoWaMjSZku1rmc2lruepVT+/PnR6XQvPGB1YklN+Z44PSIiglGjRvH++++b5XVwcCBTpkypUp6sWbMSEmK5W1+/fv24ePEihw8fthhMKlCgQJL1cvbsWWMeAzs7O/z8/EzyJTVY9cOHD8mX+CdZIYQQ4g1ia6sNap07t+kg2nFxEBSkdWG7du3pINpBQZbHMLp+XVs2bzZN9/J62uooYSApZ07L4ykJITKOBIpeZcOHw6JFlIo4Qkt+ZSWt+egj+PdfWxY3W0zJ70uy5fIW5h2Zx8dlPs7o0grx2tPpdCnqAtYioAXrz69/Zj7QZkFrGdgSJzsn4qwsB4qeR5YsWahTpw5z5syhX79+ZgGd0NBQ3N3dCQgI4ObNm9y8edPYqujMmTOEhoYSGBj43MctWbIk58+fNwusGBQtWpRbt25x4cIFi62K7OzsiE/8c6QFJUqUICgoiJCQEDJnzmxM//777/n555/ZtWsXOXPmtLhtmzZt+PLLLzlx4gSlSpUypuv1eqZPn05gYKDZ+EUpderUKVq0aPFC2wohhBCvMxsbLZiTMydUqvQ0PT5e665244Y2BtL161qLpDt3tDGPErt3T1sSzw3h7Pw0eOTv//TvfPm0Ywsh0p+89V5l2bLB4MEwYgTT7IeyJroZly7ZMWoUfP11ABNrTeSTzZ8waOsgauapScGsBTO6xEK8FVoWasknmz8hNCo0yVnPAHTocHdwp0Vg6gYY5syZQ8WKFSlbtiyjR4+maNGixMXFsW3bNubNm8fZs2epVasWRYoUoV27dsyYMYO4uDg+/vhjqlatatatLCWGDx9Ow4YNyZUrFy1atMDKyooTJ05w6tQpxo4dS9WqValSpQrNmzdn2rRp+Pn5ce7cOXQ6HXXr1iV37txERESwY8cOihUrhqOjI46O5i2pSpQoQdasWdm/fz8NGzYEYP/+/fTt25fhw4eTN29egoKCTLbJlCkTbm5ufPrpp6xbt47333+fKVOmUK5cOe7evcv48eM5e/Ys27dvf6Eg3bVr17h9+za1atV67m2FEEKIN5W1NWTPri1lyz5NVwoePtTGPjK0Qrp1SwsgPXpkvp+ICDhyRFsS79/PT0fu3O4UL64zCSS5uaXlmQkhLI8WKl4dAwZA9uy8E32FPtbfAjBlChw9Cn3K9qFW3lo8iXtChzUdiI2PzeDCCvF2cLBxYGHThYAWDLLEkL6w6UIcbCyPufOi8ubNy9GjR6levToDBw6kcOHCvPfee+zYsYN58+Zpx9fpWLduHZkzZ6ZKlSrUqlWLvHnz8ssvv7zQMevUqcPGjRvZunUrZcqUoVy5ckyfPh1fX19jnlWrVlGmTBk++OADAgMD+eyzz4ytiCpUqEDPnj1p3bo1np6eTJo0yeJxrK2t+fDDD1m6dKkx7ccffyQmJoavvvqKHDlymC2ffPIJoHWB27FjB+3atePLL7/Ez8+PunXrYm1tzcGDBylXrtwLnfvy5cupXbu2ybkKIYQQwjKdDjw8oHhxaNoU+vfXvr8sXQrLlsGkSdCrF9SvD0WKaHkt/Y4THw/nz+vYssWBiRN1dO4M5cqBuzt4e0ONGvDxxzBzJmzdqrVs0j97VAAhRAro1POM/JkBHj16xLBhw1izZg337t2jRIkSfPPNN8aBWJ8lPDwcNzc3wsLCcHV1TbVy6fV67t27h5eXV5Kz86Sa77+HHj2IdPQgR+RlwnGjeHE4dAjuPrlFkXlFCI0KZWTVkYyoNiJty5LO0rWe32JvYz1HRUVx9epV8uTJk+Tgyc+y/vx6Oq/tTEhUCFY6K/RKb/w/s0NmFjZdSKOCjQBtkOWkxigSpoKCgihUqBBHjx597uBMatdzTEwM+fPnZ9myZVSsWPGl9/emeJl6Tu69l1bXbPH80uq5iInRs3LlPeLivPDweDuuNxlBKT1wD/BCp5N6TgtSx6krJubpQNqGcZD++w/u3lXExaX8OuPoCAULaq2ODP/7+0P+/No6Ydnb+F0gvaVlHafFNfuV73r20UcfcerUKRYvXoy3tzdLliyhVq1anDlzhnfeeUtm++rSBaZPx/HcOaZ6TqRb8HiOH9ci80OG5GRu/bm0Xd2WMXvGUC9/Pcq+U/aZuxRCvLzGBRvz38D/+O3Mb6w5t4aHkQ/J4piFZv7NaBHYItVbEr0tsmfPzk8//cSNGzcyvBXPjRs3GDp0qASJhBBCiDRkZwd58mhL1apP0+PiFPfv3+fmTQ+uXbPixg0toBQUpHVZSywyEo4d05bEfH3Ng0gFC2qtk+Q3PCFMvdItip48eYKLiwvr1q2jQYMGxvRSpUpRr149xo4d+8x9vBEtigDWr4cmTdDbO5A75iI3VU7s7eHECe0D7oNVH7Di1AoKeBTgWI9jzzWL0qtMotvp422s59RoUfQ8pEVR+pB6Th/SoujNJy2KXm/S2iXtSR2nj+Tq+dEjbeyj69e1Fkg3b2qtkB48eL4uaM7O2oTThsCRYSlQ4O1phfQ2fhdIb9KiKBXFxcURHx9vdiOZKVMm9u3bl0GlyiCNGkGlSljt28evhUdQ7tRPREdDt26wezfMqT+HPdf3cOHBBT7b9hmz68/O6BILIYQQQgghRJpwcdFmRwsIME2PjdVaHN248TSI9N9/Wpql2dgiIrTxX48eNV/n42MaPDIsPj4g8RTxJnulA0UuLi6UL1+eMWPGEBAQQLZs2Vi+fDkHDhxIcorm6OhooqOjjY/Dw8MBLYKnT8XRzfR6PUqpVN3nM02ciFXFipQ9s4C6OT9h862i7N0L8+bp6dXLnZ8b/0zdpXWZc3gO9f3qU9evbvqVLY1kSD2/hd7Gejacs2FJD4bjvMINOd8IUs/p40Xr2fCes3Rdfps+g4QQQqQNW1stkOPjAwl7jisFoaFay6MbN7TFMBvbw4fa+sRu3tSW7dtN0x0ctHGPErY+MvyfOXOanp4Q6eKVDhQBLF68mC5duvDOO+9gbW1NyZIl+eCDD/jnn38s5p8wYQKjRo0ySw8ODibKUgj5Ben1esLCwlBKpV/zvLx5cW/YEIeNG/nZcxDet7YC8MUXUK7cfYq9U4yuhbvy06mf6LKuCztb7iSLQ5b0KVsayZB6fgu9jfUcGxuLXq8nNjYWG5u0/yhUShlnAJMuUWlH6jl9vEw9G957Dx48wNbW1mTdI0vzJgshhBCpQKfTgjiZM0PRoqbrYmO1gJGhFdKtW4bBtLVxjxKLioKTJ7UlMU9P08CRYcmXTwswCfE6eKXHKEro8ePHhIeHkyNHDlq3bk1ERASbNm0yy2epRZGPjw8hISGpPkZRcHAwnp6e6fvF+uJFdIULo4uLY0q97Qz+oyYA772n+OMPxZO4SEr/UJrzD87TIqAFK5qveK2/LGVYPb9l3sZ6jo+P5+LFi3h5eeHh4ZEux4yNjTX7YixSn9Rz+njRen7w4AH37t0jf/78WFtbm6wLDw8nc+bMMkbRK0DGKHq9yfg5aU/qOH1kdD0rBeHhT8dCunlT+zsoCO7fh///ZpIiOh3kzm0aPDIsPj6Q6JKYrmSMorQnYxSlEScnJ5ycnAgJCWHLli1MmjTJYj57e3vs7e3N0q2srFL9CdHpdGmy32QVLAg9esCcOfS/8znfeB/i1n9WbNum4+efdXTr5syS95dQ/qfy/Hb2N1acXkG7ou3Sr3xpIEPq+S30ttWzlZUVmTNnJjg4GJ1Oh6OjY5oGVQ2D/8bHx7/WwdtXndRz+niRelZKERkZSXBwMJkzZ7YYZHpbPn+EEEK8HnQ6cHPTlkKFTNfFx2stjgxBpFu3tBnZ7t6FsDDzfSkFV69qy5Ytpuvs7bUWR4kDSPnzQ7ZsMiubSH+vfKBoy5YtKKUoWLAgly5dYvDgwfj7+/Phhx9mdNEyzvDhsHAhNsf/YcPnKykxsQ0AAwZA7dpQ2rc0w6oMY8TuEfT+vTdVfKvg4+aTwYUW4tWTPXt2AO7du5fmxzKMyWJlZSUBjDQk9Zw+Xqae3d3dje89IYQQ4nVlbQ3e3tpStqzpusjIp13ZbtzQAkiGrmwJOr8YRUfDmTPakpiLy9OgkeF/w98yHpJIK698oCgsLIwhQ4Zw69YtsmTJQvPmzRk3btzb3a3Ayws+/xyGDaP4yqH0/LAZ3863JyICunSBbdtgaOWhbLq4iUO3D9F5XWe2ddiGlTSLFcKETqcjR44ceHl5ERsbm6bHMozJ4uHhIa0m0pDUc/p40Xq2tbU1624mUl98fDwjR45kyZIlBAUF4e3tTefOnfnqq68kgCqEEOnA0VFrIZQvn2m6YUDt27efDqZt6MoWHGy5K9ujR/DPP9qSmIeHeQDJsLi4pMmpibfEKx8oatWqFa1atcroYrx6Pv0U5s6Fq1eZ3n0em3z6c/Mm7NwJ330HvXrZsLjZYkp8V4KdV3cy8++Z9C/XP6NLLcQrydraOs2/vOr1emxtbXFwcJAARhqSek4fUs+vtokTJzJv3jwWLlxIoUKFOHLkCB9++CFubm7069cvo4snhBBvrYQDahcubLouPl4LFiUMIv33nxZESmpWtgcPtOXgQfN12bKZBo78/J7+7+ycNucn3hyvfKBIJMHJCUaPhm7dcJg8hkU/daZ6M3cABg+GOnWgQN4CTHlvCh///jFfbP+C2vlqE+gZmLHlFkIIIUSa+uuvv2jSpAkNGjQAIHfu3CxfvpxDhw5lcMmEEEIkxdoasmfXllKlTNfFxmoBo1u3ng6obejKZmk8JNDW3b0L+/aZr8uRwzR4lC8fZMlig5OTtEQSGgkUvc46d4Zp0+DsWar9PZGePSfw7bfw+LHWBW3nTuhZuifrL6xn86XNtF/dnoMfHcTO2i6jSy6EEEKINFKhQgW+//57Lly4QIECBThx4gT79u1j2rRpGV00IYQQL8DWVpsZzccHypc3XRcdrQWNDC2Rbt/Wxke6e1frtmbJnTvasmePIcUKyApoQaSErY8M/0tLpLeLBIpeZzY2MHEiNG4MM2Yw+ejHbN7sw7Vr8OefMGcO9O2r4+fGP1N4XmGOBR1j1O5RjKs5LqNLLoQQQog08sUXXxAeHo6/vz/W1tbEx8czbtw42rVLehbU6OhoohOMsBoeHg5o3Qz1en2qlU3blwL0FrtRiNShTSmu1bNIG1LH6UPq+dns7CB3bm2pWNF0XWSkaUuk27d13LkD9+5BRITlMesMQaS9e83XZc+u8PPTWiD5+an//689dndP7TN7s+j1euNkIGmx79QmgaLXXcOGUKUK7NmD86Th/PzzfGrU0FZ9/jnUqwd+fjn4ruF3tPy1JV/v/5oGBRpQwadCxpZbCCGEEGli5cqVLF26lGXLllGoUCGOHz9O//798fb2plOnTha3mTBhAqNGjTJLDw4OJioqKtXKFhurx9o6DJ1Oof2CLdKGHghD+4It9Zw2pI7Th9Tzy3B0hLx5tSWxx491BAVZc+uWDbdvW3Hnjp7//svE3bs2PHpkeezOoCAdQUGG7mymgaYsWfTkzh1H7tzx5MkTT+7cceTJE4+vbxweHoq3fS4FvV5PWFgYSqlUH9/xUVJNx16CTqk3+/ec8PBw3NzcCAsLw9XVNdX2q9fruXfvHl5eXhk/kOehQ/Duu9roaMeP0/eHosyera2qWFFrXWRtDR3XdGTxv4vJmzkvJ3qewNnu1W87+ErV8xtM6jntSR2nD6nn9JFW9ZxW1+y3jY+PD1988QW9e/c2po0dO5YlS5Zw7tw5i9tYalHk4+NDSEhIqj4XMTF6fvstmLg4T7JkkfdoWtFaYQQDnuhk1ts0IXWcPqSe00fieo6M1FoV3b6ttUT677+nLZEePXr+iI+rqzLOAqctTx+/8w68Dbdser2e4OBgPD09U/0eNTw8nMyZM6fq/ZO0KHoTlC0LrVrBypXw+ed8/dsf/P47XLkC+/fD9OkwaBDMqjeL3dd2cyXkCgO3DOS7Rt9ldMmFEEIIkcoiIyPNbkKtra2TbZpub2+Pvb29WbqVlVWq3tBqu9IBVvKlL81JPac9qeP0IfWcPp7Ws5PT03GJEnvy5Gl3tlu3tGBSUFDyA2uHh+s4dgyOHUt4LI29vdbiyTSQpC25c2vr3xQ6nS7Vr6tAmvw4KoGiN8W4cbB6NWzejNPBHSxYUJOqVbVpFL/8EmrXhqJF3VjYdCE1FtXg+6Pf06hgIxoWaJjRJRdCCCFEKmrUqBHjxo0jV65cFCpUiGPHjjFt2jS6dOmS0UUTQgjxmsuUCfLk0ZbEoqO1gNHt2+ZBpJAQsPR7RXQ0nD2rLYnpdJAr19PAUeKAkptb6p9fWlBKcT/yPjcf3cTK2QpPJ090r3hfPAkUvSn8/KBXL5g1Cz77jMqHDzNwoBVTpkBMDLRvr/VQq56nOgPKDWDawWl0Xd+VU71O4enkmdGlF0IIIUQqmTVrFsOGDePjjz/m3r17eHt706NHD4YPH57RRRNCCPEGs7fXAju5cpmvi4vTuq7duWMYWPtpEOn+fW19YkrB9evasnOn+XoPD9MAUsL/X4UubaFRoSw8vpBZh2ZxOeSyMT1f5nz0LduXTsU74e7gnnEFTIaMUfSCXslxMIKDtXfGo0ewdCnRzdtStiz8+6+2euBAmDIFouKiKP19aU4Hn6apf1NWt1r9ykY0X8l6fgNJPac9qeP0IfWcPmSMojdfWj0XMTF6Vq68R1ycFx4e8h5NK9p4I/cAL+muk0akjtOH1HP6yOh61uu1Fkf//fe0JdKdO1oQKThY6+72vOzstFZPlgJJefJoA32npS2XttB8ZXMiYyMBUDwNu+j+3/XO0daRVa1WUcevzksdKy2u2dKi6E3i6QlffKH1NRs6FPvmzVmyxJ7SpbVWRVOnQv36UKOGA4ubLebdH99l7bm1LDyxkM7FO2d06YUQQgghhBBCvGWsrLTWQR4eUKSI+fpHj7TWR4YubXfuaI+Dg7VxkSw1fYmJgfPntcWS7NmfzgiXeMmR4+VaI225tIUGyxqglDIJEBkY0p7EPqHBsgZsarvppYNFqU0CRW+a/v1hzhytfd6cORQZMIAJE7TWRACdOmktjErkKMHo6qMZsmMI/f7oR7Xc1cjtnjsjSy6EEEIIIYQQQphwcdGW/PnN18XEaF3aDK2R7tzRluBgrUtbbKzlfQYFactff5mvs7fXBtJOHEDKk0f738Ul6bKGRoXSfGVzlFLoSXoSCQA9eqyUFc1XNufWgFuvVDc0CRS9aRwdYfRo+OgjGDsWPvyQ/v0zs2mT1q/z1i3o3RuWLYPBFQaz8cJG9t/cT8c1HdnVaRfWVtYZfQZCCCGEEEIIIcQz2dlBzpzaUras6TqltC5thtZICcdFCg6G8HDL+4yOTr41UtasT4NGhoG9DX+vC1pIZGykxZZElujRExkbyaITi+j3br/nOPO0JYGiN1GnTjB9Opw+DV9/jdXEiSxYAEWLQmgoLF8OjRrBBx9Ys6jZIop9W4y9N/Yy7cA0BlccnNGlF0IIIcQrIj4+nvj4eLN0wxS/CfMlx9r66Q9RSsWjVDx6vflNtE4HOl3ivJb3mVZ5AaysXjSvnuSG/3yevDqdlXEMyefNq9fHA9qi06lk86ZVGTI+r/r/uCtJ5dUZx2J5kbxafvM6ftn9vi55gf+/zlIjr+n7M2HexPUsnxEJy5x676OE9WxlpXvF3ssvl9fdXVv8/c0/I6Ki9MYBtg1BpKAgrSVScLCOmBir/x9DYWX19L0REqItR49qj/V6HUpZAQr6zYLMwHMOATzz75l8XOpjk7GDE15rlVLoLU0bx7OvwS9CAkVvIhsbmDgRGjaEb76B3r3xyZWLuXOhbVstS69eUKkS5PXJy/Q60+m2oRtf7fqKOn51KJqtaMaWXwghhBCvhL/++gsnJyez9CxZslC06NP7hf379yd5A+vu7k7x4sWNj0NDTxIfb09MjPldtJ2dC1mzljI+Dg4+TFxclMX92to64un59Ofj+/f/Ifb/g4YmZmPjgJdXOePjBw+OExPzyGJeKytbsmevaHz88OFJoqNDLebV6azIkaOK8XFIyCmioh5azAvg7V3N+Hdo6FmePAlOMm/27JWNX1zDwi4QGRmUZN5s2SpgbW0HQHj4JR4/vg08BpxI/G3Fy6scNjYOADx6dJWIiJtJ7tfTswy2ttrzHxFxg0ePriWZN2vWktjZaYOoPn58i/DwK0nm9fAojr29OwCRkXcIC7uYZN4sWYrg4OABwJMn9wgNPZdk3syZA8mUyQuAqKhgQkLOJJnX3d0fR8fsAERHP+Thw5NJ5nVzy4+T0zsAxMSE8eDBcUBhqY5dXfPi7KxN+RQb+4j7948muV8Xl9y4uOQGIC4ukuDgw0nmdXb2wdU1HwDx8dHcu3cwybxOTt64uRUAQK+P5e5dC31r/s/RMTvu7v6A9iU7KGhvknkzZfIkc+ZCxsfJ5XVwyEKWLE8/I+7e3Z9kEMre3h0Pj+LGx/fuHUSvN/QXMq1n+Yx46uU+I/5LlONpPXt5lX+rPiNsbS3P1Obm5k9MTHbu3oX//ntITMxJIiIgIgIiIyEqwcvu4sX83L79Djg+gCyXeV4KxeWQy2zatQk3Wzdjeu7cucmdOzcAkZGRHD5s+TPi8ePHz33MZ5FA0Zuqfn2oVg1274Zhw2DhQj74ADZs0FoUhYVpDY+2b4euJbqy/vx6NlzYQPvV7Tnc7TD2NvYZfQZCCCGEEEIIIUS60+kgc2ZtyZ0bHiaK7+n18PixFjgKCNCGCL4RHsGplzjmk/gnJoGijKRTybXjegOk1fSur8UUzIcPax01dTo4dgyKFSMkROuCduuWlmXyZBg0CO5G3KXIvCIERwYzuMJgJr03KWPL/n+vRT2/AaSe057UcfqQek4faVXPaXXNFs/P8Fw8fPjQ4nPxol3PYmL0/PLLHeLivMiSxfy1I91KEpb5ZbueWZ7q+nXoKpI6edOj65l5Hb9qXcTejK5nT+tZPiMSljm1u55p9WxlZfOKvZdfn8+I8Lj7tP/XM8l8z3J3wF08HD1M9puSrmfh4eFkyZIlVe+fpEXRm6xMGWjdGn75BT7/HDZvJnNmWLgQatbUsgwdCtWrQ6lS2fih0Q80/aUpU/6aQsMCDaniWyX5/QshhBDijWZtbW0yvlBy+VJKp7NGp7NOUZBRy/s8+83ovFavRF6taq0Ba7NAUXqVIePz6ky++Kd2XqV0PKuO07oMGZkXTIMaaZX3WfX8arzvX7/PiMR5TetZl2zetCpD+udN/feGi7UH2e3ycTfmSooHswbQoSNv5rx4Onua1H/iMiR1rX2ea3BKyc+tb7px48DWFrZsgW3bAKhRAwb/f8zq2Fho0wYePYIm/k3oUrwLCkXHNR0Jj05iGHghhBBCCCGEEEIY6XQ6Gnr1faFt+73bL8kgUUaQQNGbLl8++Phj7e/PPtM6UwJjx2oNjgAuXYI+fbS/Z9SdQR73PFwPu84nmz/JgAILIYQQQgghhBCvnxpZOmFv5YguhaEWK50VjraOdCzWMY1L9nwkUPQ2+OorcHWF48dh2TIA7Oy0Qa1dXLQsixbBkiXgYu/CwqYL0aFjwfEFrDm7JuPKLYQQQgghhBBCvCacbdz5Iu8qdOieGSyywgodOla3Xo27g3v6FDCFJFD0NsiaFb74Qvv7q6+Mc/nlywfz5j3N1quX1rqosm9lPqv4GQDdN3YnKCLpqRaFEEIIIYQQQgihKelah+F+m7C3yvT/gJFplzJDWibbTPze7ndq56udQSVNmgSK3haffALvvKPN2zdnjjG5XTvo1En7OyICPvgAYmJgVLVRFM1WlPuR9/lo/UfJjh4vhBBCCCGEEEIITUnXOvxc+BYf5ZxBNru8JuvyZs7LjLozuD3g9isZJAIJFL09HB1hzBjt77Fj4eFD46pZsyB/fu3vI0e0Rkf2NvYsabYEO2s7Nl3cxI9Hf8yAQgshhBBCCCGEEK8fZxt3Gnn147tCF1lc5B5jPA6xteY9Lva9SL93++Hm4JbRRUySBIreJh07QuHCEBoKEyYYk11ctPGKbG21x5Mna5OkFclWhPE1xgPw6ZZPufTwUgYUWgghhBBCCCGEeD3pdDpcbTzwsPbBzc7jlZrdLCkSKHqbWFvDxIna3zNnat3Q/q9UqaerQIsp3b0Ln5b/lKq+VXkc+5iOazoSp49L50ILIYQQQgghhBAivUig6G1Trx5Ur64NRDRsmMmqTz7RVgPcu6cFi1BWLGy6EBc7Fw7cOsCk/ZPSv8xCCCGEEEIIIYRIFxIoetvodDDp/8GeJUvg2DHjKisrWLAAsmfXHm/dCuPHg6+7L7PrzwZgxO4RHL1zNJ0LLYQQQgghhBBCiPQggaK3UenS2vRmSsHnn5us8vKCZcu0oBHAiBGwcyd0KNqB9wPeJ04fR4c1HXgS+yQDCi6EEEIIIYQQQoi0JIGit9W4cdro1du2aU2HEqheHUaN0v7W66FtWwgK0vFdw+/I5pSNM8FnGLpjaAYUWgghhBBCCCGEEGlJAkVvqzx5oHdv7e/PPtMiQgkMHQp16mh/372rNUByt8vKT41/AmDG3zPYcWVHepZYCCGEEEIIIYQQaUwCRW+zr74CNzc4cQKWLjVZZWUFixfDO+9oj//8E0aOhAYFGtCjVA8AOq/rTGhUaPqWWQghhBBCCCGEEGlGAkVvMw8PGDJE+/vLLyEqymS1pyf88gtYW2uPx42DP/6AKbWn4JfFj1vht+jze590LrQQQgghhBBCCCHSigSK3nb9+kHOnHDzJsyaZba6YkX4+uunjzt0gJC7zixuthgrnRVLTy5l5emV6VhgIYQQQgghhBBCpBUJFL3tMmWCMWO0v8ePh4f/Y+++w6OoujiOf2cTklBDgIQapEnvvdpAUJEigiJYsCugICiC6KuIAiogICqKCIjSe5GugDSlC9I7IhBKSEJJQrLz/nFNQqQlsCUJv8/z7JOdyezs2ZOEnT3ce+6ZKw7p3h2aNTP3T5+Gxx+Hanlr805909D6lbmvcDTyqKciFhERERERERE3UaFIzDChChXg7FlTLPoPy4KxY6FIEbO9Zg307An/u/t/VMtfjfDocJ6b/Ry2bXs0bBERERERERFxLRWKxDQh+vRTc/+LL+DgwSsOCQqCyZMhUyazPXgwzJyeiXGPjCPAN4BF+xbx1bqvPBeziIiIiIiIiLicCkViNGkCDRtCbKxZDe0qatSAzz9P2n72WbBPluGTRp8A8Nbit9h1apcnohURERERERERN1ChSAzLShpV9NNPsHHjVQ/r2NHMVAM4fx4eeQSeKtWZhkUbcjHuIk/NeIpL8Zc8FLSIiIiIiIiIuJIKRZKkalVo187c79EDrtJzyLJgxAioXNls794Nz3Zw8H3zMeQMyMm6f9bR77cr+xyJiIiIiIiISNqnQpEk99FH4OcHS5fCokVXPSRLFpg+3fQtApg1C8Z9WYivHjI9ivqu6MsfR//wVMQiIiIiIiIi4iIqFElyRYtC587mfo8eEB9/zcMmTDAjjADeew+Cjj7B4+UeJ96O56kZT3Hh0gUPBS0iIiIiIiIirqBCkVypd2/ImRP+/BN+/PGahzVpAn37mvu2bWat9Sj3FQWyF2D36d30WNzDM/GKiIiIiIiIiEuoUCRXypULevUy9999Fy5evOahvXpBixbmfng4PPdELkY8MAaAL9d9yYK9C9wcrIiIiIiIiIi4igpFcnWvvQahofD33/DFF9c8zOGAsWOhZEmzvWULTOp3P51qmOlrz816jtMXTnsiYhERERERERG5RWm6UBQfH897771H0aJFyZw5M8WLF6dv377YV1mNS1wsc2bT2BqgXz84fe1iT2AgzJgBWbOa7Z9+gkI7PqFU7lIcO3eMV+e9qp+ZiIiIiIiISDqQpgtFn3zyCV9//TXDhw9nx44dfPLJJ3z66ad8cZ0RLuJC7dtDpUoQEQEff3zdQ8uWhTFjkrZ798jCq/nG4WP5MGX7FMZvHe/eWEVERERERETklqXpQtHq1atp0aIFTZs2pUiRIrRu3ZrGjRvzxx9aet0jfHzg00/N/eHD4cCB6x7eurVZ/QzA6YT3X6hBx3L/A6DTz504EnHEndGKiIiIiIiIyC1K04WiunXrsnTpUnbv3g3Ali1bWLlyJQ8++KCXI7uNNG4MjRrBpUtmNbQb+OADaNnS3I+IgAW936Fa3ppExETQYVYHnLbTreGKiIiIiIiIyM3z9XYA19OzZ08iIyMpXbo0Pj4+xMfH8/HHH9O+fftrPiYmJoaYmJjE7cjISACcTidOp+uKFE6nE9u2XXrONGvAABzVq8OECTjfeAOqVbvu4WPHQv36Flu3WuzZ5Uu9+T+QuWYVfjnwC0PXDqVLrS4pfurbKs9epDy7n3LsGcqzZ7grz/q5iYiIiHhfmi4UTZ48mZ9++onx48dTrlw5Nm/eTNeuXSlQoADPPPPMVR/Tv39/+vTpc8X+kydPEh0d7bLYnE4nERER2LaNw5GmB2bduoIFCXz0UTJPm8alN94gfMoUsKzrPuS773x44IHchIc7WDW7FPWL92Nl4Bv0WtqLqjmrUiqoVIqe+rbKsxcpz+6nHHuG8uwZ7spzVFSUy84lIiIiIjfHstPwclShoaH07NmTTp06Je776KOP+PHHH9m5c+dVH3O1EUWhoaGEh4eTI0cOl8XmdDo5efIkwcHBt8eHkUOHsEqXxoqNxTl3LqRg+t/y5dC4sUVcnAXYlB/wINuiF1IlXxVWP7caPx+/G57jtsuzlyjP7qcce4by7BnuynNkZCRBQUFERES49D1bUi8yMpLAwECX/yxiY51MnhxGXFwIuXPrb9RdbNsJhAEhWJby7A7KsWcoz56hPLufbTs5ejSMatVCqFnTtTl2x3t2mh5RdOHChSsuQH18fK47NN3f3x9/f/8r9jscDpd/aLAsyy3nTZOKFoXXXoNBg3D07AkPPGCaXV/HvfeaHtivvAJgseuT0eR4szybjm+i74q+fNzw+iupJbit8uxFyrP7KceeoTx7hjvyrJ+ZiIiIiPel6SuyZs2a8fHHHzNv3jwOHjzIjBkzGDx4MI888oi3Q7s9vfMO5MwJ27bBuHEpesjLL0PHjub+pfD8WHO/AWDAqgGsPrLaTYGKiIiIiIiIyM1I04WiL774gtatW9OxY0fKlCnDm2++ycsvv0zfvn29HdrtKVeupJXP3n0XLl5M0cOGDIF77jH3I9a0JufhJ3HaTp6a8RTnYs+5JVQRERERERERSb00XSjKnj07Q4YM4dChQ1y8eJF9+/bx0Ucf4ed349424iadO0PhwnD0KAwdmqKHZMoEU6ZAsWJm++z4LwiICWV/+H66LezmxmBFREREREREJDXSdKFI0qCAAPj4395C/fvDqVMpeliePDBvnpm5RnROoieMBWDkxpHM3T3XPbGKiIiIiIiISKqoUCSp164dVK4MkZHw0Ucpfljp0jB9Ovj6AgfvhTVvAPD87Oc5ef6ke2IVERERERERkRRToUhSz+GATz8197/6CvbvT/FD770XRo78d2NpPwgrS9j5MF6a+xK2bbs+VhERERERERFJMRWK5Obcfz80bgyXLiU1uE6hDh3MAmrEBcD0HyE+EzN3zmTslrFuCVVEREREREREUkaFIrl5n3wClgUTJ8K6dal6aN++0KYNcLwK/NoHgNd+fp2DZw+6Pk4RERERERERSREViuTmVa4MTz5p7vfoAamYOuZwwNixULs2sKoHHK7LuUtRtJ/6NPHOeLeEKyIiIiIiIiLXp0KR3Jq+fcHfH5Ytg/nzU/XQzJlh1iwococPzPgBYrOy+uhvDFw12D2xioiIiIiIiMh1qVAkt+aOO+D11839Hj0gPnWjgUJCYN48CHQWhwVDAHhn6bv88fc6xm0ZR+sprWk1uxWtp7Rm3JZxRMdFu/gFiIiIZDxHjx7lySefJHfu3GTOnJkKFSqwfv16b4clIiIi6YAKRXLrevWCoCD46y8znyyVypaFGTMg07bnYVcznFYstUfV5umZTzNr1yzWHFvDrF2zeHrm0xQYVIA5u+a44UWIiIhkDOHh4dSrV49MmTIxf/58tm/fzqBBgwgKCvJ2aCIiIpIOqFAkty4oCN5919x/7z24cCHVp7j3Xhj3gwV/tQYbbJwAOO3kX89Gn6XFxBbM3jXbNbGLiIhkMJ988gmhoaGMHj2amjVrUrRoURo3bkzx4sW9HZqIiIikA77eDkAyiE6dYNgwOHQIhgyBd95J9SlaPBpN5p1duWhbwNUbY9vYWFh0mNmBf7r/Q4BvwK3FLSIiksHMnj2bJk2a0KZNG5YvX07BggXp2LEjL7744jUfExMTQ0xMTOJ2ZGQkAE6nE6fT6bLYzLlswJmaNTAklWw7Kc/iHsqxZyjPnqE8u19Cjm3biQvfVgFc+j6dQIUicQ1/f/j4Y7MK2oAB8OKLEBycqlNM+WsKFwkH6/rH2diER4czdftUnqz45C0ELSIikvHs37+fr7/+mm7duvHOO++wbt06Xn/9dfz8/HjmmWeu+pj+/fvTp0+fK/afPHmS6GjX9Qe8dMmJj08ElmWjge3u5AQiMB/8lGf3UI49Q3n2DOXZ/Zz4+0cQE2MTFubaHEdFRbn0fJDKQpHT6WT58uX89ttvHDp0iAsXLhAcHEyVKlVo1KgRoaGhLg9Q0pEnnoBBg2DTJvjoIxg6NFUPn7lrJg7LkTjN7HocloMZO2eoUCQiImmep6+fnE4n1atXp1+/fgBUqVKFbdu2MWLEiGsWinr16kW3bt0StyMjIwkNDSU4OJgcOXK4LLbYWCfx8RZxccHow4g7OTH/86Y8u49y7BnKs2coz+7nJCbGwt8/mJAQ1+Y4IMD1s2xSVCi6ePEigwYN4uuvv+bMmTNUrlyZAgUKkDlzZvbu3cvMmTN58cUXady4Mf/73/+oXbu2ywOVdMDhgM8+g0aN4Kuv4LXXoESJFD/89IXTKSoSgelZdObCmZuNVERExO28df2UP39+ypYtm2xfmTJlmDZt2jUf4+/vj7+//xX7HQ4HDofrLmjNqSzAgWXpw4h7Kc/upxx7hvLsGcqz+1lYlmvfVwGXnw9SWCgqWbIkderUYeTIkdx///1kypTpimMOHTrE+PHjadu2Lb17977uPHjJwBo2hCZNYOFC6N0bJk1K8UNzZ8mdqhFFubLkupVIRURE3Mpb10/16tVj165dyfbt3r2bO+6445bPLSIiIhlfikpPixYtYvLkyTz00ENXvcgBuOOOO+jVqxd79uzhvvvuc2mQks588glYFkyeDH/8keKHtSzVMlUjih4p/cjNRigiIuJ23rp+euONN1i7di39+vVj7969jB8/nm+//ZZOnTq55PwiIiKSsaWoUFSmTJnE+4cPH8a+yjIVtm1z+PBhMmXKpOVXb3eVKsHTT5v7PXqQ0mVN2pRrQ1BAENaNu1mT3S87rcu2vsVARURE3Mdb1081atRgxowZTJgwgfLly9O3b1+GDBlC+/btXXJ+ERERydhSPZmtaNGinDx58or9Z86coWjRoi4JSjKAvn3NSmjLl8O8eSl6SIBvAGNbjgW4drHIBiw4Hx3Lkn2/uChYERER9/L09dPDDz/M1q1biY6OZseOHWoJICIiIimW6kKRbdtY1pUf4s+dO+eWbtuSToWGQteu5v7bb0NcXIoe1qxUM2a2nUnOgJyA6UV0+VdicsKxSjgdMTSf0IwR679xbdwiIiJuoOsnERERSS9S1MwaSFwy1bIs3nvvPbJkyZL4vfj4eH7//XcqV67s8gAlHevZE0aOhO3bYcwYeOGFFD2seanm/NP9H6Zun8r0HdM5HnGcfIH5aFWmFT67WtO+nQ9205ewq4zh1XmvcOjsQT5u+HFSMUlERCSN0PWTiIiIpDcpLhRt2rQJMP8jtnXrVvz8/BK/5+fnR6VKlXjzzTddH6GkXzlzwrvvQrdu8P778MQTkDVrih4a4BvAkxWfpF35doSFhRESEmKW/asI5yPgxRe/h7NF4d73GbBqAAcjDjKmxRj8fa9c2ldERMRbdP0kIiIi6U2KC0W//vorAM8++yxDhw4lR44cbgtKMpCOHWHYMDh4EIYMgd69b/mUL7wAZ85YvP32/+DsHdD8BSZum8g/Uf8w8/GZBGUOuuXnEBERcQVdP4mIiEh6k+q5OqNHj9ZFjqScvz/062fuf/IJhIW55LQ9epgbW56Bn+ZDdA5WHFpBve/rcfDsQZc8h4iIiKvo+klERETSixQVil555RX+/vvvFJ1w0qRJ/PTTT7cUlGQwjz8O1apBVJRZDc1FBgyAF18E9jeC71dCZEF2nNpB7e9qs/6f9S57HhERkZuh6ycRERFJj1I09Sw4OJhy5cpRr149mjVrRvXq1SlQoAABAQGEh4ezfft2Vq5cycSJEylQoADffvutu+OW9MThgE8/hYYNYcQIeP11uPPOWz6tZcHXX8P58zB+fAX4bi3Wk005EfInd4+5m0mtJ/FwyYdd8AJERERST9dPIiIikh6laERR37592b17N/Xq1eOrr76idu3aFC5cmJCQEEqVKsXTTz/N/v37+fbbb1m7di0VK1Z0d9yS3tx3Hzz4IMTFuaRPUQIfHxg7Fh59FIgshD3qN3wONObCpQu0mNiCr9d97bLnEhERSQ1dP4mIiEh6ZNm2baf2QeHh4Rw+fJiLFy+SJ08eihcvjmVZ7ojvlkVGRhIYGEhERIRLewM4nc7kq3HJjW3dCpUqgW3D2rVQq9YNH5LSPMfGmmLR3LmA4xKZWr3CpfLfA9Cjbg/6N+qPw9LP6Vr0++x+yrFnKM+e4a48u+s9O63Q9RPExjqZPDmMuLgQcufW36i72LYTCANCsHT94xbKsWcoz56hPLufbTs5ejSMatVCqFnTtTl2x3t2ilc9u1xQUBBBQVpZSlKpQgXo0AFGj4a33oLly838MRfw84MpU6B5c1i8OBOXpn5HQFQRouv8j09Xf8qhiEOMaTmGAN8AlzyfiIhIaun6SURERNKDVJeyPvjgA5xO5xX7IyIieOKJJ1wSlGRgH34IAQHw228wZ45LTx0QADNnwl13AVhEL3yP7EvG4mv5MumvSTQe15gzF8+49DlFRERSQtdPIiIikl6kulA0atQo6tevz/79+xP3LVu2jAoVKrBv3z6XBicZUKFC0LWrud+zp+lZ5EJZspjpZwmz2qJWPk2OOQvIlikHvx3+jbqj6nIg/IBLn1NERORGdP0kIiIi6UWqp579+eefvPzyy1SuXJlBgwaxe/duhg4dyltvvUWfPn3cEaNkND17wsiRsGOHmYb24osuPX327LBggemfvWkTnFnfkJDoVWR7+kF2nd5F7VG1mfvEXGoUrOHS5xUREbkWXT+JGJYVj8NxyVXdB7zG9HS5BESrp4sbZbQ82zY4nZmwbR9vhyJyXakuFAUFBTF58mTeeecdXn75ZXx9fZk/fz4NGzZ0R3ySEQUGwnvvmZFF//sftGsHWbO69Cly5oRFi+Dee2HbNgjbVp68366ldOem7Dy7hXvG3sOERyfQvFRzlz6viIjI1ej6ScQmW7bjZMt2loyx1oANOIEoIJ1XvdK0jJdnpxPOncvJuXP5yCivSTKem2pm/cUXXzB06FCeeOIJNmzYwOuvv8748eOpVKmSq+OTjOrVV2HYMNi/HwYPNoUjF8uTB375BRo2NAuundhbEAb/Rv132rDy+EIemfQIwx4YRqeanVz+3CIiIv+l6ye5nWXLdpycOc+SJ08Ifn5Z0uyKfylnA3GYj1Pp/bWkZRkrz7ZtExt7gVOnwgA4dy6/lyMSubpUF4oeeOAB1q9fz9ixY2ndujUXL16kW7du1K5dmz59+tCjRw93xCkZjZ8ffPwxPPEEfPopvPwyhIS4/GmCg5OKRX/+CScOZ8fqO4c2/TsyZf93dJ7fmYNnD/LJ/Z/gyADDWUVEJG3S9ZPcziwrnmzZTJEoe/bc3g7HRTJWASPtynh59vfPDEBcXBjnz4doGpqkSan+ZBwfH8+ff/5J69atAcicOTNff/01U6dO5fPPP3d5gJKBPfYYVK8O586Z1dDcJGFkUcJ/2B4/mokVPb6lS4WPABi4ZiBPTHuC6Lhot8UgIiK3N10/ye3M4biEwwF+flm8HYpImuDnlwWHw/xtiKRFqS4ULV68mAIFClyxv2nTpmzdutUlQcltwuEwo4kAvvkGdu9221Plzg1Ll0Llymb7xHGLiR17M6DWODI5MjH5r8ncP+5+Tl847bYYRETk9qXrJ7mdJcwyS//TzURcI+FvQX8Skla5dK5Nnjx5XHk6uR3cey80bQpxcfDOO259qoRiUZUqZvvECRj8zJN802Ahgf6BrDy8krrf12V/+P7rn0hERMSFdP0kIlezcuUygoIsIiLOejuUdKFixSJ8/fWQWz7Pww/fQ69eXW/5PCLpmZqyiPcNGGBGF02bBmvWuPWpcuWCJUugalWzHRYGPR+/l+/rryI0Ryi7T++m9ne1+ePoH26NQ0REROR2d/EiRER47nbxYupjPHHiOD16vEblysXIm9efcuVCadu2GcuXL3VpLq5WnKhZsy47dx4jR45Alz6XGNcqxI0bN5133unrnaBE0oibWvVMxKXKl4cOHeD77+Gtt+C339w6DjOhWHT//bBhgykWvdC8HONmruW9HQ+z6fgm7hlzDxMenUCL0i3cFoeIiIjI7eriRfj5Z1PA8ZTAQHjoIcicOWXHHz58kAceqEdgYE4+/PAzypatwKVLl/jll4W89VZn/vjDvdNG/fz8yJs3n1ufQ64UFJTL2yGIeJ1GFEna0KePeddetQpmz3b70wUFmWJR7dpmOzwc2jYtQL87l/NAiQe4GHeRRyY9wvA/hrs9FhEREZHbTWysKRL5+5sCjrtv/v7m+WJjUx5j9+4dsSyLJUv+oHnzRylRoiRlypSjU6duLF6cNAr+yJHDtGvXgkKFslG4cA6effYxwsJOJH5/wIAPaNCgMhMnjqNixSIULhzIc8+1JSoqCoCOHTuwatVyRowYSlCQRVCQxeHDB68Y8TJ+/BjuuCMnS5cupFatMhQqlI3WrR/g+PFjic91tZFJ7du3pGPHDonbZ8+G88orT1OkSBAFCmShdesH2bdvzxXxXu7rr4dQsWKRxO2VK5fRsGFNChbMyh135KRJk3ocPnzomrn8++8jPPvsY9xxR06KFs1Fu3YtOHz4IAC//LKIfPkCrhjZ07NnF5o3b5i4PXv2NOrUKUfevP5UrFiE4cMHXfP5Dh8+SFCQxdatmxP3RUScJSjIYuXKZRw+fJBmze4FoEiRIIKCrMQc/TeHN8pXSn4uIunNTReKYmNj2bVrF3Fxca6MR25XhQrBG2+Y+2+/bXoWuVnOnLBoEdxzj9k+dw5aPZyd13LP4cWqL2Jj89r813hz0Zs4bafb4xERkYxP108iyQUEQJYs7r8FBKQurvDwMyxduoDnn+9E1qxZr/h+YGBOAJxOJ+3btyA8/Axz5y5n+vTFHDy4n+eeezzZ8QcP7uPnn2cyceJcJk6cy+rVyxkyZAAA/fsPpUaNOjzzzIvs3HmMnTuPUbBg6FXjunjxAsOHD2TEiHHMm7eCv/8+zHvvvZmq19axYwc2b17P+PGzWbhwDWDz2GMPcelSylbgiouLo337ltStezcrV/7JokVr6NDhpWs2K7906RKtWzchW7bs/PzzbyxYsIqsWU0xJTY2lrvvbkhgYE5mz56W+Jj4+HhmzJhEmzbtANi8eQPPPvsYrVq1ZdWqrfTs+QH9+r3H+PFjUvXaExQsGMoPP5jnW7duFzt3HqN//6FXPTYl+XLFz0UkLUl1oejChQs8//zzZMmShXLlynH48GEAXnvtNQYMGODyAOU20qOHWct+1y4YNcojT5k9uxn2/NBDZvviRWjZ3JcHLn1Dv/v6ATBozSDaTm1LdFy0R2ISEZGMR9dPIunL/v17sW2bkiVLX/e45cuXsn37VkaOHE/lytWoXr0WX3/9A6tWLWfjxnWJxzmdTr78cgxly5anbt0GPPbYU6xYYfocBQYG4ufnR+bMWcibNx958+bDx8fnqs936dIlBg8eQZUq1alUqSovvNA58TwpsW/fHubPn83Qod9Rt24DKlSoxLff/sSxY0eZN29mis4RFRVJZGQEDzzwMEWLFqdUqTI88cQzhIYWvurx06dPwul0MmzYd5QrV4FSpcrw5Zej+fvvw6xcuQwfHx9atWrL1KnjEx+zfPlSIiLO0rz5owB8+eXn3H13Q9566z1KlChJu3YdeOGFznzxxWcpfu2X8/HxSZxiFhwcQt68+QgMvLIXVErzdas/F5G0JtWFol69erFlyxaWLVtGwGWl+UaNGjFp0iSXBie3mcBAeO89c//9980QnyVLyHPXXWaemJtkzgwzZkDr1mb70iV47DGL0EO9+PGRH8nkyMSU7VNo9EMjTl847bY4REQk49L1k0j6Ytt2io7bvXsHBQuGUqhQ0gig0qXLEhiYk927dyTuK1y4CNmzZ0/czpcvPydPhqU6rixZslC0aPGbPs+uXTvw9fWlevVaifty5cpNiRKlksV7PUFBuWjXrgOPPtqEtm2bMWLE0OtOs9q2bQv79+8lNDQ7hQplo1ChbBQrlovo6GgOHNgHQJs27Vm5chnHjv0DwJQpP9G4cdPEkVu7d++gVq16yc5bu3Y99u3bQ3x8fIpff2qlNF+3+nMRSWtSXSiaOXMmw4cPp379+smGF5YrV459+/a5NDi5Db3yChQrZtauHzQIq3dvfPfswerdG1L4hn0z/PxgwgR45hmzHR8PTz8N59e2Z+GTCwn0D2TVkVXUGVWHfWf0ey4iIqmj6yeR9KV48TuxLIvdu3e65Hy+vpmSbVuWhdOZ+tYGVzvP5UUth8NxRZErpVPKUnOOL78czaJFa6hZsy7Tp0+iRo2SrFu39qrnO3/+HJUrV2PFis3JbuvX76Z1azO1rGrVGhQtWpzp0ydy8eJF5s2bQZs27VMV9+Usy3zMvfx1pDYPqXGjn4tIepPqQtHJkycJCQm5Yv/58+evOS/1VhQpUgTLsq64derUyeXPJWmAnx/072/u9++PtX49gPm6aJFbn9rX1yy81rGj2bZtePll2DDtXlY9t4rCgYXZc2YPdUbV4fe/f3drLCIikrF4+vpJRG5NUFAu7ruvCaNGfcn58+ev+H5C4+WSJctw9OgR/v77SOL3du7cTkTEWUqVKpvi5/Pz83PJyJg8eYI5cSJpdE98fDw7dmxL3C5VqgxxcXGsX590LXvmzGn27t2VGG/u3MGEhR1PVui4vCl0gooVq9CtWy8WLVpNmTLlk00du1ylSlXZt28PefKEUKxYiWS3y6d7tWnTnilTfmLBgjlYloPGjZsmfq9kyTL8/vuqZOddu3YVxYuXvOo0vTx5ggGSjXT672vIlMkvMUfXkpJ8iWREqS4UVa9enXnz5iVuJ1zcfPfdd9SpU8d1kf1r3bp1HDt2LPG2ePFiANq0aePy55I0ok0bqF4dYmKw//39sn18zLQ0N1fmHQ4YPty0S0rw1lvww6ByrHluLVXzV+XkhZPcO/ZeZu6c6dZYREQk4/D09ZOI3LqBA78kPj6eRo1qMnv2NPbt28OuXTv45pthNG5cF4B77mlE2bIVeOml9mzZspENG/7g1Vefpl69u6lSpXqKn6tw4SJs2PA7hw8f5PTpUzc12gigQYP7WLRoHgsXzmP37p107/5qstXEihe/k4ceakHXri+yZs1Ktm7dwksvPUn+/AV56KEWANSvfw+nTp1k6NBPOXBgHyNHfsmSJfMTz3Ho0AH69OnFH3+s4fDhQ/zyyyL27dtDyZJlrhpTmzbtyZ07D+3bt2D16t84dOgAK1cu4+23X+fo0b+THbdly0YGDfqYFi1a4+/vn/i9zp27sXz5Uj77rC979+5mwoSxfPfdcF577eoNozNnzkyNGrUZMmQAu3btYNWq5Xz88bvJjgkNvQPLsli4cC6nTp3k3LlzV5wnJfkSyYhSXSjq168f77zzDq+++ipxcXEMHTqUxo0bM3r0aD7++GOXBxgcHEy+fPkSb3PnzqV48eLcfffdLn8uSSMsCx57zNz9tzBkxcfDunVuH1WU8PQDBkDfvkn7Pv0U3u2an6VPLuehOx/iYtxFWk1qxbDfh7k9HhERSf88ff0kkl5ER8OFC+6/Rd/EmiRFihRj2bKN1K9/L+++2526dcvTqtX9LF++lEGDvgJM0fenn2aRM2cQTZvexSOPNKJIkWJ8/33qeo917vwmPj4+1K5dlhIlgvn778OpDxh48snnaNv2GV599Wkefvhu7rijGA0a3JvsmC+/HE2lStVo2/ZhmjSpA9hMnvwzmTKZ6VOlSpVh4MCv+O67L2nQoBIbN/5B585JBZnMmbOwZ89OnnnmUWrUKEnXri/xwgudePbZl68aU5YsWZg3bwWFChXm6adbUatWGV577XliYqLJnj1H4nHFipWgWrWa/PXXn1dMO6tUqSqjR09m+vSJ1K1bnn79/kevXh/Srl2Ha+biiy++Jy4ujnvvrUavXl3p3fujZN8vUKAgvXr1oU+fnpQsmZcePTpf9Tw3ypdIRmTZNzF5ct++fQwYMIAtW7Zw7tw5qlatyttvv02FChXcEWOi2NhYChQoQLdu3XjnnXeuekxMTAwxMTGJ25GRkYSGhhIeHk6OHDmu+pib4XQ6OXnyJMHBwTgcqa63yfXYNlbt2rB+PZcPxrd9fKBKFey1a001xwNGjIDOnS1s2zzfww/bjPvpEm8vf41vN34LQNdaXfns/s9wWOn390C/z+6nHHuG8uwZ7spzZGQkQUFBREREuPQ9O63w1vXTzYiMjCQwMNDlP4vYWCeTJ4cRFxdC7tz6G3UX23YCYUBIYq8Wb/L1jSYk5AChoUXx8zPN3C9eNCvPRkR4Lo7AQLPSbebMrjibDcQBvoCmj7pPxsxzbGw0R44cICysKHFxATd+gJultX8zMiLbdnL0aBjVqoVQs6Zrc+yO92zfm3lQ8eLFGTlypEsCSI2ZM2dy9uxZOnTocM1j+vfvT58+fa7Yf/LkSaJv5r8SrsHpdBIREYFt2/ow4mJ+v/5Krn97E13Oio+H9esJnzyZ2HvvvcojXa9VK/Dz86dTp5zExlrMnWvR5H4YM6YPwZmC+fj3jxny+xD2hO3hi/u+ILOvS648PE6/z+6nHHuG8uwZ7spzVFSUy86VFnnr+kkkLcqc2RRtYmM995x+fq4qEomIZGypLhT5+Phw7NixKxoynj59mpCQELcuTzhq1CgefPBBChQocM1jevXqRbdu3RK3E0YUBQcHu3xEkWVZ+l9rV7NtrMGDsX18TGHov9/28SFo8GDsxx7z2Kii556DIkVsWrWCqCiLP/7w47HH8jJ//oeUKVCG52Y/x7wD8ziz8AwzH59Jnix5PBKXK+n32f2UY89Qnj3DXXm+fNn4jMab108iaVXmzCrciIikRakuFF1rplpMTAx+fn63HNC1HDp0iCVLljB9+vTrHufv75+s8VkCh8Ph8g8NlmW55by3tYUL4SqjiRIkjCqyliyBJk08FlajRrBsGTz4IISFwbZtFvXrWyxa1J5FTxai5aSWrPl7DfVG12N++/mUyFXCY7G5in6f3U859gzl2TPckeeM/DPz1vWTiIiISGqluFA0bJhp2mtZFt999x3ZsmVL/F58fDwrVqygdOnSro/wX6NHjyYkJISmTZve+GBJn2zbrGzm4wM3+p/Vt9+Gxo09NqoIoGpVWLXKPO2BA3DoENSrB/Pm3c3q51bz4E8PsvfMXuqMqsPstrOpE6pVbEREbnfevn4SERERSa0UF4o+//xzwPyP2IgRI/Dx8Un8np+fH0WKFGHEiBGujxAzxH306NE888wz+PreVFslSQ8WLTIrm6XEli3w/ffw/PPujek/SpQwxaIHHoA//4RTp+Cee2DixDKsfWEtD49/mA3HNnDfD/fxU6ufaFWmlUfjExGRtMWb108iIiIiNyPFVZcDBw4AcO+99zJ9+nSCgoLcFtR/LVmyhMOHD/Pcc8957DnFwxJGEzkc4HSm7DEvvQR588LDD7s3tv/Inx+WL4eWLc3XixfhkUdg2LB8LHtxGU9Me4K5u+fSenJrPm/yOV1qd/FofCIiknZ48/pJJK1ImHl5E4sti2RICX8L+pOQtCrVzQB+/fVXj1/kNG7cGNu2KVmypEefVzwoNhYOH055kQjMsY8/bqo1HpYzp2mn9MQTSaF07gwfvJONaW1m8Gr1V7Gx6bqwK28seIN4p5qUiojczrxx/SSSVjidmXA6ITb2grdDEUkTYmMv4HSavw2RtOim5nH9/fffzJ49m8OHDxP7nzUtBw8e7JLA5Dbj72+mnZ08mWy30+nkzJkz5MqVC8eGDbBjB4SGmq8bN8KmTaap9aRJ0KKFx0P+8UcoUgT69zf7Bg2Cw4d9GTv2S4rkLMLbS95myO9DOBx5mB8f+ZHMmbS0h4jI7UrXT3K7sm0fzp3LyalTYQD4+WXB8mCfSfewgTjMx6n0/lrSsoyVZ9u2iY29wKlTYZw7lxPb9rnxg0S8INWFoqVLl9K8eXOKFSvGzp07KV++PAcPHsS2bapWreqOGOV2ERpqbpdzOokLC4OQEMiSBY4dM1POxo6FChWgYEGYOxdatYKRI81a9h7kcEC/fnDHHdCxoxlZNGUK/POPxaxZPSgcWJhnZj7D9B3TuS/qPma3nU1w1mCPxigiIt6n6ye53Z07lw+AuLgwMsYChzbgxEzQSP8FjLQr4+XZ6YRz53Im/k2IpEWpLhT16tWLN998kz59+pA9e3amTZtGSEgI7du354EHHnBHjCJGqVLQujVkzw5t2pjC0AsvQHAwjB5tGlufOgVvveXR1dAAXn7Z1LgeewzOnzcNr+vWhZ9/bsvipwrQcmJL1v69ljqj6jC//XzuzH2nR+MTERHv0vWTiMW5c/k5fz4Eh+OSpy/VXM62ncBpIDeWlSEqX2lSRsuzbZvpZhpJJGldqgtFO3bsYMKECebBvr5cvHiRbNmy8eGHH9KiRQteffVVlwcpAoCPD5Qvb+7nzQsPPghz5kC3bqZY9Omn8PbbEBZm7nv4v6seeghWrICmTeH4cdi9G2rXhhkz7mL186t58KcH2Re+jzqj6jD7idnUDa3r0fhERMR7dP0kYti2D/Hx6f9DsilgZAICMkQBI61SnkW8I9V/bVmzZk2cV58/f3727duX+L1Tp065LjKRG6la1RSO5syBnj3hs8/M/kGDzBS0S5e8EtLatVCmjNk+dQoaNoTf55Vm7fNrqV6gOqcvnua+sfcxbfs0j8cnIiLeoesnERERSS9SXSiqXbs2K1euBOChhx6ie/fufPzxxzz33HPUrl3b5QGKXJNlQbNmpnfRlCnwxhswZowZeTR2rOlbdMHzq2vccYeZetaokdmOjYUOHeDzj/Lyy1PLaFayGTHxMbSZ0obP13zu8fhERMTzdP0kIiIi6UWqC0WDBw+mVq1aAPTp04eGDRsyadIkihQpwqhRo1weoMh1+fubfkUnTsCSJfDMMzBjBgQEmCbXjRtDeLjHwwoKgp9/hstnEnzyCTzVNivjms6gY/WO2Nh0W9SNLvO7EO+M93iMIiLiObp+EhERkfQi1T2KihUrlng/a9asjBgxwqUBiaRagQJw//2wYIFZq75ZM1i8GB5+2Aztuftu870CBTwaVqZM8NVXULYsdOliVjiYNQvuvsuHWbOGUyRnEXos6cGwP4ZxOPIwP7X6iSyZsng0RhER8QxdP4mIiEh6keoRRcWKFeP06dNX7D979myyiyARj6pVy6yKNnMmRERA/fqms3S+fLB1K9SrB3v2eCW0zp1h/nwIDDTbW7ZArVoWDXzeYuKjE/Hz8WPmzpncN/Y+ws6HeSVGERFxL10/iYiISHqR6kLRwYMHiY+/cppMTEwMR48edUlQIqlmWdCyJfj5wbRpZvhOxYqwejWUKAEHD5ri0caNXgmvcWNYswaKFzfbJ07APfdA/J+Ps+SpJQQFBPH70d+pM6oOu0/v9kqMIiLiPrp+EhERkfQixVPPZs+enXh/4cKFBCYMjwDi4+NZunQpRYoUcWlwIqmSOTM8+qhpaL1sGdx3HxQtCitXwoMPwqZNpjozaxbce6/HwytTBn7/3YS4fDnExED79vDWWw1Y+eYaHp7wIPvD91NnVB1mt51NvcL1PB6jiIi4lq6fREREJL1JcaGoZcuWAFiWxTPPPJPse5kyZaJIkSIMGjTIpcGJpFrhwqZAtHSpWX6seHHImxd+/dWMOFq2DB54ACZMMKuieVju3LBokZmONnKk2ffZZ7B5cynmjVrDMwuase6fdTT8oSE/tvqR1mVbezxGERFxHV0/iYiISHqT4qlnTqcTp9NJ4cKFCQsLS9x2Op3ExMSwa9cuHn74YXfGKpIy9epBsWIwfTqcO2f2BQaaRkGPPGLWq2/TJqlS42F+fvDNNzB8OPj+W6pdvBia3p2XYVV/pXmp5sTEx/DYlMcYvGYwtm17JU4REbl1un4SERGR9CbVPYoOHDhAnjx53BGLiGtYlikIWZYpFjmdZn9AAEyeDC+8YPa99BL06wdeKMRYFnTqZAY+hYSYfQcOwH0NsvK4PZ1ONTphY9N9UXden/868c4r+1qIiEj6oesnERERSS9SXChas2YNc+fOTbbvhx9+oGjRooSEhPDSSy8RExPj8gBFbkq2bKYZ0IEDpkdRAl9f+PZb6NXLbPfuDd26JRWTPOyuu2D9eqhe3WxfvAjt2/mQedkXfNpwIADD1w3n0cmPcuHSBa/EKCIiN0/XTyIiIpLepLhQ9OGHH/LXX38lbm/dupXnn3+eRo0a0bNnT+bMmUP//v3dEqTITSla1FRifv0VDh1K2m9ZZiTR4MFme8gQeOYZuHTJK2GGhsJvv0GHDkn7Bn5msahPd0Y9MBl/H39m7ZrFvWPvJex8mFdiFBGRm6PrJxEREUlvUlwo2rx5Mw0bNkzcnjhxIrVq1WLkyJF069aNYcOGMXnyZLcEKXLT7r7bNLieNg0u/GdEzhtvwLhxZpTRjz9CixZw/rxXwgwIgO+/hy++SOpbtGQJ9G3bhi9rLSVX5lz8cfQPan9Xm12ndnklRhERSb20cP00YMAALMuia9eubn0eERERyRhSXCgKDw8nb968idvLly/nwQcfTNyuUaMGR44ccW10IrfK4TBT0OLiYObMK/sRPfkkzJoFmTObZtf33w9nznglVMsyq6EtXQrBwWbfwYPQ8eF6dMm6hmJBxThw9gB1v6/LysMrr3suERFJG7x9/bRu3Tq++eYbKlas6LbnEBERkYwlxYWivHnzcuDAAQBiY2PZuHEjtWvXTvx+VFQUmTJlcn2EIrcqRw7T3Hr3bli79srvP/SQGb6TMyesWWOmqx096vEwE9x1F2zYADVrmu3YWHi/c0mqblxDjfy1OHPxDI1+aMSUv6Z4LUYREUkZb14/nTt3jvbt2zNy5EiCgoLc8hwiIiKS8fim9MCHHnqInj178sknnzBz5kyyZMlCgwYNEr//559/Urx4cbcEKXLL7rwT6tY1BaHChaFgweTfr1vXNApq0gT++stsL1oEpUp5JdyEvkVvvmmmowFMHRtCmU2/0PC1diw9OovHpj7GZxGf0b1OdyzL8kqcIiJyfd68furUqRNNmzalUaNGfPTRR9c9NiYmJllT7cjISACcTidOFy74YM5lA05vLDp627DtpDyLeyjHnqE8e4by7H4JObZtp8vXUXLl+3SCFBeK+vbtS6tWrbj77rvJli0bY8eOxc/PL/H733//PY0bN3Z5gCIu07AhHD4MU6fCyy+bxkCXK18eVq2Cxo1hzx6oX99MR0tYkszD/Pxg2DCoVw9eeAHOnYMdf2bh0BvTaPLRGyw8+wVvLX6Lg2cPMvSBofg4fLwSp4iIXJu3rp8mTpzIxo0bWbduXYqO79+/P3369Lli/8mTJ4mOjnZZXJcuOfHxicCybFIxsF1SzQlEYD74Kc/uoRx7hvLsGcqz+znx948gJsYmLMy1OY6KinLp+QAs207d/+dERESQLVs2fHySfyg9c+YM2bJlS3bxkxZERkYSGBhIREQEOXLkcNl5nU4nYWFhhISE4HDoj8ldXJ7ns2dhxAgoVgzatDGNgf4rLAwefBA2boRs2Uxvo8sakXrDrl2m1VLSwjk29d4cwups3bGxaVayGRMenUBWv6w3dX79PrufcuwZyrNnuCvP7nrPTgs8ef105MgRqlevzuLFixN7E91zzz1UrlyZIUOGXPUxVxtRFBoaSnh4uEt/FrGxTqZOPUlcXDC5culv1F3M/1yfBIKxLOXZHZRjz1CePUN5dj/bdvLPPyepWjWYGjVcm+PIyEiCgoJcev2U4hFFCQIDA6+6P1euXLccjIjb5cwJzZvD5MmmEdDVRguFhMCvv5q+Rr/8YnoY/fQTtG7t8XATlCoFv/8Or75qFmoDi1UD36B4s1CO1HiSObvncM/Ye5j7xFzyZst7g7OJiIinefL6acOGDYSFhVG1atXEffHx8axYsYLhw4cTExNzRcHK398ff3//K87lcDhcWgw0p7IAhz6MuJ3y7H7KsWcoz56hPLufhWW59n0VcMt/juq3QG4/ZcuaTtELFsDx41c/JkcO+PlnUxyKjYXHHjMjkbwoa1YYOxa+/RYSruX3zWmN70+/kN0nN+v/WU/tUbXZeWqnV+MUERHvatiwIVu3bmXz5s2Jt+rVq9O+fXs2b958RZFIRERE5HIqFMntqXFjyJMHpkwxhaCr8feHiRNNPyPbNsN5+vbFm903LQtefNEszpbQ+/TCrrpEfb6G7JeKc/DsQeqOqstvh37zWowiIuJd2bNnp3z58sluWbNmJXfu3JQvX97b4YmIiEgap0KR3J58fU2PoqgomDfv2sf5+MDXX8N775nt//0PunTB5a3qU6lKFdNCqV27f3ecuZOoz9cQcKoW4dHhNBrXiEnbJnk1RhEREREREUl/VCiS21fu3PDww7BlC2zefO3jLAs+/BCGDjXbX3wBTz557ZFIHpIjB/z4I4weDVmyABeCif7mFxy7HiE2Ppa209ry6apPSWW/ehERyYCWLVt2zUbWIiIiIpdToUhubxUrmuE58+bByZPXP/b1101Ta19fmDDBNMU+f94zcV6DZUGHDmZ0UeXKwKUsOCdOgbVdAHh7ydt0+rkTcc44b4YpIiIiIiIi6YQKRSIPPmhWQ5syBS5duv6x7drBnDlmCM/ChdCwIZw+7ZEwr6dUKVi71tSysH1gwRBY8DnYFl+v/5pHJj3C+VjvFrVEREREREQk7VOhSMTPz/QrOnPGFH9u5IEHYOlSyJXLrFnfoAEcOeL+OG/A39/Mjps928yqY21XmDwVLgUwd/dc7hp9N8fPXWOVNxERERERERFUKBIxQkLgoYdg/XrYtu3Gx9euDb/9BoUKwY4dUK8e7Ewby9I3a2baLt17L7CjFYz9Bc7nYePxDVT7ujY7Tu7wdogiIiIiIiKSRqlQJJKgShUoX95MLTtz5sbHly0Lq1aZeV9HjkD9+vDHH+6PMwUKFoQlS2DgQPALqwOj1sDpEvxz4RDVvqrLsgMrAIiOi2bclnG0ntKaVrNb0XpKa8ZtGUd0XLSXX4GIiIiIiIh4gwpFIgksywzHyZoVpk6FuBQ0gC5cGFauhBo1TK+i++6DxYvdH2sKOBzQvbsZJFWhYAlTLDpSh4uc5b4x9/PS1DcpMKgAT898mlm7ZrHm2Bpm7ZrF0zOfpsCgAszZNcfbL0FEREREREQ8TIUikcv5+5t+RSdOmCE5KZEnj+lZ1KiRWQWtaVOYNMm9caZChQqwbh282TEP/LAUtrfCdsQy8q9BhEeHA+C0ncm+no0+S4uJLZi9a7bX4hYRERERERHPU6FI5L/y54fGjc0yYnv2pOwx2bPD3Lnw2GNm5bQnnoCvvnJvnKng7w+ffQa/LspMwXU/QJz/dY+3sQHoMLODpqGJiIiIiIjcRlQoErmamjVNw+rY2JQ/xt8fxo+Hjh3BtqFTJ/jgA3M/jbjnHnj3p+ngG3PDY21swqPDmbp9qvsDExERERERkTRBhSKRq7EseOABKFcudY/z8YHhw02BCKBPH+jcGeLjXR7izVp8ZCYOK2V/+g7LwYydM9wckYiIiIiIiKQVKhSJuJplwfvvm4KRZZkpaO3aQcyNR/F4wukLpxN7Ed2I03Zy5kIKVoATERERERGRDEGFIhF36dQJJkyATJlg8mR4+GE4d87bUZE7S+4UjyiysMiVOZebIxIREREREZG0QoUiEXd6/HHT5DprVrOK2n33walTXg2pZamWKR5RZGOzdv921hxZ6+aoREREREREJC3w9XYAIi7z/PNm9I6fX9K+bt2gSBHPxvHPP/D55xAZaQpEXbvCL7/AQw+Zderr14dFi6BwYTh4EL75Bs6eNY996imoWzfpXLYN774L+/bBxIkuCa9NuTZ0WdCFs9FnE1c3u6p/v/VP7E7qfl+HxoVbMLjpx5QLSWXfJhEREREREUk3VCiSjKVHDyhW7PrHxMebptPX2r6ehBXMLOvax3z5pWmE3bAhrFoFQ4bA4MHw22/QpAns2mWKQXPnmj5G3bpB2bLgdEJUVPJzzZoF+fKZQpGLBPgGMLblWFpMbIGFddVikdkPzPoOQtdAle9ZdHgWFb6ezZMVnuLD+/pQJGcRl8UkIiIiIiIiaYOmnsntoVkz+OknU5QZO9YUb4YOhZ49TS8hgOnTzdL2nTvDwIFw/rzZP3489O8P//ufOfbMdZo7R0TAnj1mHXowBaGTJ+HYMShTxhSOypSBo0fhrrsgIMAUiQAcDggMTDrX4cOwdi20bu3qbNCsVDNmtp1JzoCc5qn/7VmU8DVnQE5mtZ3F5HeeI+8fI+Grv+Cv1tjYjNv6A3cOK0mX+V0IOx/m8thERERERETEezSiSDKWTz9NPvVs4MCkbR8fM7IHTKFo715zfObMsGEDLF5sjs+a1Yz0GTvWFI4Adu40haWcOc32Bx9A+/Zw553Jn//kSciVK2mEkmVBcLDZnz8/hIaakUVNm8Lvv8O335riUlCQmSL3/POmWBQXB198Aa+/nvLRTqnUvFRz/un+D1O3T2X6jukcjzhOvsB8tCrTitZlWxPgGwCloVEjePvt0owcOQVWrYeG7xBXfDHD/hjGqE2j6FanG93rdCcwIPDGTyoiIiIiIiJpWpofUXT06FGefPJJcufOTebMmalQoQLr16/3dliSVvXoAcOGJd0uLxo1apT82Pr1TZEIYPNmaNDAFInA9BPavDnp2GrVkopEYApF/y0SpVTu3KaxddmycOmSWRmtdm2z/6uvzDETJpjRSKGhN/ccKRTgG8CTFZ9kapupTG8+naltpvJkxSdNkehfQUGmnrV8OZTKXh3GLYKxS+BoDc5fOk/fFX0pPqw4g9cMJjou2q3xioiIiIiIiHul6UJReHg49erVI1OmTMyfP5/t27czaNAggoKCvB2apEcJRaEEAQFXPy4lj72W4GAzNS0+3mzbthlNFByc/Lhs2UyxqUoVc2z79qax9a5d5vvbtsGcOWaEUY8ecOGCuR8RkfKYXeyuu0zt7H//g0x/N4SRv8OkaXCyNKcvnqb7ou7c+cWdjNo4ijhnnNfiFBERERERkZuXpgtFn3zyCaGhoYwePZqaNWtStGhRGjduTPHixb0dmmQ0lSvDypWmIAOwYIEp4qRWYCAULw7Llpnt1ashTx4z7ey/7rkHataEl1822x98AAcOmOLSJ5/A99/DqFFmelyWLOZ+oHendwUEQJ8+pmBUt64FO1rB11th5vdYUaH8Hfk3L8x5gfJflWfa9mnY9nVWVRMREREREZE0J033KJo9ezZNmjShTZs2LF++nIIFC9KxY0defPHFaz4mJiaGmJiYxO3IyEgAnE4nTqfTZbE5nU5s23bpOeVKqcmzlVBguWy6mf3881CxIpZtYzudZmUxMMWYy7erVIH77sPq3h0cDuwiReCVV5KOufxYwOrTB7tdu6tPP+vYEWvIEJg0CbJkwX799aTHfvGFKQ7VqmWmmrVpgzVtGtSsifXHH7BiBfYrr2APH57Um8jpTIrfTVL7+1y6tJmK9s030Lu3DxGbn8Xe9gRU/xrfe/ux6/QuWk9pTfX81fn4vo9pVKzRjU+awenfDM9Qnj3DXXnWz01ERETE+yw7Df+Xf8C/U4O6detGmzZtWLduHV26dGHEiBE888wzV33MBx98QJ8+fa7Yv3v3brJnz+6y2JxOJxEREQQGBuJwpOmBWena7ZTnzGPHkqNXLyzbJvrhhzk7fDj4+3vkuW8lzydPOujbNztTpvw7Pc8/EuoMwveuwcQ5zgHQoGAD3qn5DpVDKrs48vTjdvpd9ibl2TPcleeoqChKlixJREQEOXLkcNl5JfUiIyMJDAx0+c8iNtbJ5MlhxMWFkDu3/kbdxbadQBgQgmUpz+6gHHuG8uwZyrP72baTo0fDqFYthJo1XZtjd7xnp+lCkZ+fH9WrV2f16tWJ+15//XXWrVvHmjVrrvqYq40oCg0NJTw83KUXOk6nk5MnTxIcHKwPI2502+V56lSsp57Cio3Fvu8+7OnTwYUFzmtxRZ5/+w06d7bYts0yO7KGEXD/x8RVHkEcsQA8UvoR+t7TlzLBZVwVerpx2/0ue4ny7BnuynNkZCRBQUEqFKUBKhSlb/rQ537KsWcoz56hPLtfeisUpempZ/nz56ds2bLJ9pUpU4Zp06Zd8zH+/v74X2UUhsPhcPmHBsuy3HJeSe62yvNjj5kpaS1bYv3yC1bDhvDzzxAS4vanvtU83303bNwIw4ebhtfnzoUQPXMo/NqN4DYfcLrQD8zYOYNZu2bxTKVn+OCeDygcWNjFryJtu61+l71IefYMd+RZPzMRERER70vTV2T16tVjV8IqUP/avXs3d9xxh5ciEvGAhg3h119NE+wNG6BBA7MiWjqQKRO88YZZvK1t2393RtzBye9G4/zqT+640BKn7WT05tHc+cWddFvYjVMXTnk1ZhEREREREUmSpgtFb7zxBmvXrqVfv37s3buX8ePH8+2339KpUydvhybiXtWrw6pVULgw7N4N9erBtm3ejirFChSACRNgyRLT+BqAsHIc+nQGWcavoZjjHmLjY/l87ecUG1qMD5d/SFRMlFdjFhERERERkTReKKpRowYzZsxgwoQJlC9fnr59+zJkyBDat2/v7dBE3K9kSVi9GsqVg3/+gbvuMtvpSMOGsGULDBwICdNlL+yuzf7//UL+JQsplrkqUbFRvL/sfYoPK87QtUOJiYu5/klFRERERETEbdJ0oQjg4YcfZuvWrURHR7Njxw5efPFFb4ck4jkFC8KKFVCnDoSHQ6NGpmdROuLnB927w5498NJLYFkAFsdWNmZ/z3WU3zGZO7LdyckLJ+m6sCslh5dkzOYxxDvjvR26iIiIiIjIbSfNF4pEbnu5cpk5XA8+CBcvQvPm8OOP3o4q1UJC4JtvYNMmuOeef3faDrZNasPhnn9xd+S35MtagMMRh3l21rNUHFGRmTtnkoYXZhQREREREclwVCgSSQ+yZIFZs+DJJyE+Hp56CoYM8XZUN6VSJfjlF5g6FYoUMfvsuEwsH/wi5/vv5UGfzwgKCGL7ye08MukR6oyqw68HfvVqzCIiIiIiIrcLFYpE0otMmWDsWOja1Wy/8Qa88w6kwxE3lgWPPgo7dkC/fpA1q9kfdSYz8997k8zf7ufhHL3JkikLvx/9nft+uI8mPzZhwz8bvBu4iIiIiIhIBqdCkUh64nDA4MGmugLQv79p/BMX5924blJAAPTqZfoXvfCCeXkA/+zPydxuH1F4xj6a5e1MJkcmFu1bRPWR1Xl86uPsPr3bu4GLiIiIiIhkUCoUiaQ3lmWqK99+ayor330Hjz0G0dHejuym5c8PI0fCn3/Cww8n7d+5Ph9zXv2CGmt30bTQU1hYTP5rMmW/LMtLc17i78i/vRe0iIiIiIhIBqRCkUh69eKLMGWKWVZsxgzT7Doy0ttR3ZJy5WDOHPj1V6hePWn/6p+LMu+FH3jw0BYaFmpGvB3PyI0jufOLO+mxuAenL5z2XtAiIiIiIiIZiApFIulZq1awYAFkzw7LlpnlxE6c8HZUt+yee+D332HiRChaNGn/z6MrsOLV2TwSvpJa+RoQHRfNZ6s/o9iwYny84mPOxZ7zWswiIiIiIiIZgQpFIundvfeaIlFwsFl7vn59OHDA21HdMocDHn/cNLz+/HPIlcvsv3QJZgytx+auy2l18WfK5a5EZEwk7/76LiWGlWD4H8OJjY/1bvAiIiIiIiLplApFIhlB1aqwapVZb37vXqhb1zT8yQD8/c1Cb/v2Qe/eSSukxURbTP/kQQ69s5FWzvEUDSzOifMneG3+a5QeXpof//yReGe8V2MXERERERFJb1QoEsko7rzTFIvKl4fjx+Guu2DlSm9H5TI5c8JHH8H+/fDGG6aABHAuysH0D5/g7Mc7aOn7NXmz5uPA2QM8NeMpqnxThbm752LbtldjFxERERERSS9UKBLJSAoUgBUroF49iIiA++833aEzkJAQGDzYDJx6+WXw9TX7w09lYua7r2AP2UezzAPI6Z+TrWFbaTahGfVH1+e3Q795N3AREREREZF0QIUikYwmKAgWLYKmTSE6Gh55BMaO9XZULleoEIwYATt3wlNPgWWZ/WFHszDn7bfxH7Gfhv49yeybmdVHVnPXmLtoOr4pm49v9mrcIiIiIiIiaZkKRSIZUZYsMGMGPP00xMdDhw4waJC3o3KL4sXhhx9g2zZ49NGk/ScOBbG0V3+yjtpLHd9X8HX48vOen6nyTRXaTWvH3jN7vRe0iIiIiIhIGqVCkUhGlSkTjB4N3bub7TffhLffhgzar6dsWZg6FTZuNIOoEpw6UIA1735N9rE7qOR4AoAJ2yZQ5ssyvDr3Vf6J+sdLEYuIiIiIiKQ9KhSJZGQOBwwcCJ98YrY//RSefx7i4rwblxtVqQLTp8OWLdCmTdKUtPB9Jdjyv/EETthESesh4pxxjNgwghLDStBrSS/CL4Z7N3AREREREZE0QIUikdtBjx4wapQpHI0ebeZoXbyY9P0lS8hz112wZIn3YnSxihVh8mTYuhWeeCKpYBSxqzK7359HtinLKWTX5WLcRQasGkCxYcX4ZOUnXLh0wbuBi4iIiIiIeJEKRSK3i+eeM0Nt/P1h9mx44AGzMpptY/Xuje+ePVi9e2e4qWnlysH48bB9u2l67fj3X71zf93F331W4jd1NrnjynM2+iw9l/akxLASjFg/gkvxl7wbuIiIiIiIiBeoUCRyO2nRAhYuhBw5YMUKuPtumDgRa/16APN10SIvB+kepUubptc7d5re3r6+ABax25px+uPNWDPHkfVSEY6dO8ar816lzJdlmLB1Ak7b6eXIRUREREREPEeFIpHbzd13w/LlkDevaeTToQP2v8NsbB8feO+9DDeq6HJ33mlm3+3bB126mAXisH2wNz/J+QG74Ocv8LsUwr7wfbSb3o6q31Rl/p752Bk4JyIiIiIiIglUKBK5HVWuDKtWQb58EBuL5TSjZqz4eFi3LsOOKrpc4cIwZAgcPgwffAC5cgHxfvBHZ2I/2wdLP8LnUg62nNjCQ+Mf4u4xd7Pq8CovRy0iIiIiIuJeKhSJ3K6KFTOjiv7L4YB3383Qo4oulzs3vP++KRgNHQqhoUBsNvitN/GD98OqN7HiAvjt8G/UH12f5hOas/XEVm+HLSIiIiIi4hYqFIncrhYtMlPP/svphPXr4bXX4MLtswJY1qzw+utmStrYsaYJNhdzw+LPsIftgQ0vgtOHObvnUGlEJZ6a8RT7w/d7O2wRERERERGXUqFI5HZk26YXkY/PtY/58kszP6tPHzh1ynOxeVmmTPD00/Dnn2ZxuLvvBiILwZxv4cvtsO0xbGx+/PNHSn1Rmtd+fo0T5054O2wRERERERGXUKFI5Ha0aJHpRRQff/3jTp82DXwKF4ZOncxwm9uEwwHNmsGyZbBhAzz1FGSKLAlTJ8E362FvY+LsSwxfN5winxfnnSXvEhEdccV5ouOiGbdlHK2ntKbV7Fa0ntKacVvGER0X7fkXJSIiIiIicgMqFIncblIymghMpeSOO6BKFbh4Eb76CkqWhMceM0Wm20jVqvDDD3DwILzzDuSKqQY/LoSxS+HvmkQ7z9N/1ccU+KQYHy4ZyMVLFwGYvWs2BQYV4OmZTzNr1yzWHFvDrF2zeHrm0xQYVIA5u+Z494WJSIbUv39/atSoQfbs2QkJCaFly5bs2rXL22GJiIhIOqFCkcjtJqWjiZxOOHTIDKtZuBAeeMDsmzIFataEe+6BefPMvttEgQLw8cdw5AiMGAGl/O6D79bCxBlwsgwXOMP7q94iz4d38tiYTrSc2JKz0WcBcNrOZF/PRp+lxcQWzN4121svR0QyqOXLl9OpUyfWrl3L4sWLuXTpEo0bN+b8+fPeDk1ERETSARWKRG4nCaOJHKn40x87FsqXh/nzTfPrp54CX19YvhwefhgqVoQxYyA21m1hpzVZssDLL8P27TBvnkWjQi3hq60wczScLcwF36NMOfQVtm1jc/XV4xL2d5jZQdPQRMSlFixYQIcOHShXrhyVKlVizJgxHD58mA0bNng7NBEREUkHVCgSuZ3Expp14FMzCig62qwhD6Yo9MMPsH8/dO8O2bPDX3/Bs89C0aLw6acQcWWfnozK4YCHHoLFi2HHdh9ea9CB7GN3w5b25gDr+o+3sQmPDmfq9qnuD1ZEblsR//67nCtXLi9HIiIiIumBr7cDEBEP8vc3085Onky22+l0cubMGXLlyoVjyRK4dMmsD79wITRtah53udBQGDgQ3n0Xvv0WhgyBf/6Bt9+Gjz4yw226dIFChTz32rysdGkYNgz69fOnwfCLbI52gHXjgpzDcjBj5wyerPikB6IUkduN0+mka9eu1KtXj/Lly1/zuJiYGGJiYhK3IyMjEx/vdOEUY3MuG3BiX33ApbiAbSflWdxDOfYM5dkzlGf3S8ixbTtd3rnDle/TCVQoErndhIaa2+WcTuLCwiAkxDTiOXfONK7OksUUlqpXN/v/K2dO6NHDFIXGjzfFo+3bzdchQ6BdO3jzTahQwROvLE3Ilg0C852GQyn7B9tpOzlz4YyboxKR21WnTp3Ytm0bK1euvO5x/fv3p0+fPlfsP3nyJNHRrpsee+mSEx+fCCzLRgPb3ckJRGA++CnP7qEce4by7BnKs/s58fePICbGJizMtTmOiopy6flAhSIR+a/LC0L332+mqk2ZYkYJBQRc/TH+/mb62TPPmF5Gn31mehj98IO5PfggvPWWaYBt3WA+VgaQO0tuHJYjsXH1ddmwau9Wnv9uML1atqJEniJuj09Ebg+dO3dm7ty5rFixgkI3GOHZq1cvunXrlrgdGRlJaGgowcHB5MiRw2UxxcY6iY+3iIsLRh9G3MmJmf+sPLuPcuwZyrNnKM/u5yQmxsLfP5iQENfmOOBan9FugQpFInJtPj7QujV88w3MmWPuX6/Q43CYqWpNm8Iff5iC0fTppng0fz5Uq2YKRo8+ahpiZ1AtS7Vk+o7pKTvYgkuZTvP90e58/2V38tlVaV+lFS/Ue5TSeUq7N1ARyZBs2+a1115jxowZLFu2jKJFi97wMf7+/vj/d5ox4HA4cKRmAYQbMKeyAAeWpQ8j7qU8u59y7BnKs2coz+5nYVmufV8FXH4+ULlQRG4kVy5o3tw0rd64MeWPq1nTjETavRs6doTMmWHDBmjb1kxrGz4cMuhSzW3KtSEoIAjrht2sLayYQFgwCA7eDU4Hx62NDNr8LmW+LEOh/mXpuehdNh3bhK1mHiKSQp06deLHH39k/PjxZM+enePHj3P8+HEuXrzo7dBEREQkHVChSERurFw506do/nw4cSJ1jy1eHL78Eg4dgvffNyuoHTgAr70GhQvD//4HYWHuidtLAnwDGNtyLMA1i0UWFpYF058ax9x3u9EifBmOz4/B7JGw5wGIz8TR2B18suZjqn5blYKfFqPbwu6sPrI6ZVPaROS29fXXXxMREcE999xD/vz5E2+TJk3ydmgiIiKSDqhQJCIp06SJKfJMmQKxsal/fHAwfPCB6Xn05ZdQrBicOQN9+8Idd8Arr8CePS4P21ualWrGzLYzyRmQEzCrm13+NWdATma1nUXLMs1o2hRmzoS/d4XQr/ULFFszHz4Lg2k/wo5H4FJmjkUf5PO1g6n3fT3yf1aIjvM6snT/Ui7FX/LSKxSRtMq27aveOnTo4O3QREREJB1QoUhEUiZTJmjTBiIi4Oefb/48WbKYqWi7d8PkyVCjBkRHmz5IpUqZ/kVr17oubi9qXqo5/3T/h3GPjKNFqRbUyV+HFqVaMO6RcfzT/R+alWqW7Pj8+aFXL1MvWzovJ09XaU+WOdPh05MwaRr82Q6icxB28Rhfr/+aRuMakW9gPp6d9Sxzd88lOs51KxOJiIiIiMjtSYUiEUm5PHlMo+rNm2HLlls7l4+PKTz9/jssW2bOa9um+XWdOtCggWmg7Uzf06wCfAN4suKTTG0zlenNpzO1zVSerPgkAb7XXp3A4YD77oOxY+H4cRj9bVbuydsKpv9kRhr9+DNseAHO5+FM9BnGbB5DswnNCP4smLZT2zLlrymciz3nwVcpIiIiIiIZhQpFIpI6lStDpUowbx6cOnXr57MsuPtumDsXtm2DZ581o5dWrjRNtMuVg1GjICbm1p8rHcqeHTp0gF9/Na2d+vzPn+L2gzBnJAw6BmN+hd9fg8iCnIs9x6S/JvHY1MfI82keWkxswQ9bfiD8Yri3X4aIiIiIiKQTKhSJSOo1bQo5cph+RZdc2COnXDn4/ntTEenRwzzHzp3wwgtQpAgMGABnz7ru+dKZIkVM7+89e+C33+CF53zJceYemD8MPj8MI9fCyh5wpjgx8THM3jWbZ2Y+Q8jAEBqPa8w367/h+Lnj3n4ZIiIiIiKShqlQJCKp5+dnpo2dPg2LFrn+/AULwiefwJEjMHCg2T5+3DTwCQ2Fbt1MU+zblGVB/fowciQcOwYTJkDzZg4yhdWCJZ/AsD3w9RZY9j6cKE+cM47F+xfzyrxXKDCoAA1GN2DI2iEcOnvI2y9FRERERETSGBWKROTm5M0LDzwA69bBX3+55zly5IDu3WH/ftOwp3x5OHcOPv8ciheHp5669V5J6VyWLNC2LcyaZWppI0fCvfdaWGEVYdkH8PVW+GIXLB4AR2tgY7Py8EreWPgGRYYWofq31en3Wz92ndrl7ZciIiIiIiJpgApFInLzqlUz08Vmz4ZwN/bB8fODp5+GP/+E+fNNp+e4OPjxR9MzqUkTWLLENMO+jeXKZWbp/fKLGYw1eDBUrw6cLgmr3oaRf8Dnh2D+UDh4F9gWG45toPcvvSn9ZWnKfVWO9355j83HN2Pf5rkUEREREbldqVAkIjfPsqBZMzOsZepUiI93//M98AAsXQrr18Pjj5slwhYtgvvvN4WrCRNMEek2V7AgvPGGGfC1ezf06QOlSgERheH312HMchh4DGZ/C3ubYDl92X5yOx/99hFVvqlC8WHFeXPRm6w5sgannb5XnhMRERERkZRL04WiDz74AMuykt1Kly7t7bBE5HIBAdC6tZn3tGSJ5563WjWYOBH27oXXXjPFqk2boF07KFEChg4109SEO+80TbB37DAz9d5999+i0fm8sPFF+HEB9qcnYfo42NESKz6AA2cPMGjNIOp+X5dCgwvRaV4nfjnwC3FOFeFERERERDKyNF0oAihXrhzHjh1LvK1cudLbIYnIfxUsCI0awZo1ZviKJxUtCsOGmebWH34IwcFw6BB07QqFC0Pv3qaIJVgWVKwIffuaotHWrfD++1C2LBCdE/58EibNwB5wCiZNha1P4LiUnWPnjvHV+q9o+END8g3Mx3OznmPe7nnExMV4+yWJiIiIiIiLpflCka+vL/ny5Uu85cmTx9shicjV1K4NJUvCjBkQGen558+dG957zxSJRowww2jCw6FfP7Ou/EsvwS41bE5gWaY3+AcfmF7kf/1l6mwVKgCXssKOR2HaeJwDTsJP82Djcziic3P64mlGbx7NwxMeJvizYJ6Y9gRTt0/lXKxGb4mIiIiIZARpvlC0Z88eChQoQLFixWjfvj2Hb+MlsUXSNMuCli0hUybTr8jppb42mTPDyy+bITPTp5sCVkyMWQ6sTBkT46pV3oktDStb1tTZ/vwTdu6Ejz+GqlWBeH/Y8xDMHoXz0+Mw5hf4oxPWuQJExUYxcdtE2kxpQ/BnwbSc2JJxW8YRftGNjc1FRERERMStLDsNL20zf/58zp07R6lSpTh27Bh9+vTh6NGjbNu2jezZs1/1MTExMcTEJE2HiIyMJDQ0lPDwcHLkyOGy2JxOJydPniQ4OBiHI83X29It5dkzXJrnw4fNUvb16pnVybzNtmHVKqyBA7HmzEnaXbcudvfu0Ly5aYjtZun1d/nIEZgzB2bNsli2DOLiLPMNywkF/4Ay06HsNAjan/gYX4cv9xa5l1ZlWtGiZAvyZsvrsXjTa57TG3flOTIykqCgICIiIlz6ni2pFxkZSWBgoMt/FrGxTiZPDiMuLoTcufU36i627QTCgBAsS3l2B+XYM5Rnz1Ce3c+2nRw9Gka1aiHUrOnaHLvjPTtNF4r+6+zZs9xxxx0MHjyY559//qrHfPDBB/Tp0+eK/bt3775mcelmOJ1OIiIiCAwM1IcRN1KePcPVec60di1+q1YR3bo18Xfc4YIIXcNn926yfvMNmadOxYqNBSCueHHOv/wyF9u0MY253SQj/C5HRFj88os/Cxf688sv/kRFJbwOG/L+mVQ0Cvkr8TEWFjXz1aRpsaY8WORBCmUv5NYYM0Ke0wN35TkqKoqSJUuqUJQGqFCUvulDn/spx56hPHuG8ux+KhS5WY0aNWjUqBH9+/e/6vc1oihjUZ49w+V5tm348UcICzPTwLJlu/VzutKxY1jDh8OIEVhnzwJg582L3bkzvPIK5Mrl8qfMaL/LsbGwbBnMnm0xezYcPWolfTP3LigzA8pMg4Lrkz2uev7qtCrTikdKP0LJ3CVdHldGy3NapRFFGZ8KRembPvS5n3LsGcqzZyjP7qdCkRudO3eOwoUL88EHH/D666+n6DHuutBxOp2EhYUREhKiDyNupDx7hlvyfO6caSodEgJPPWV6GKU1UVHw3Xfw+edmjhVA1qzw/PPwxhumCbaLZOTfZduGjRth7lz4+WdYt87sAyDwMJSeYUYaFV4JVtJbTrngcjxa5lFalWlFxbwVsVzwO5KR85yWuCvP7nrPltRToSh904c+91OOPUN59gzl2f3SW6EoTf8WvPnmmyxfvpyDBw+yevVqHnnkEXx8fHjiiSe8HZqI3Ei2bNCqFRw4ACtXejuaq8ue3RSE9u0zI6AqVYLz52HYMChRAtq1g02bvB1lmmdZUK0avP8+/P47HD8OP/wAbdtCkKMw/N4FRq+AgcdgzjewtzHE+/LXyb/4cMWHVP6mMkU/v5Mei3uw9u+1OG0vNUIXEREREZG0XSj6+++/eeKJJyhVqhSPPfYYuXPnZu3atQQHB3s7NBFJiWLFoEED+OUXs2x9WpUpE7Rvb4pCixbB/fdDfDxMmGCW/mrUyOxPPwMwvSphENmECWb24cqV8M47UPnOvLDhJfhxIXwWBtN/gJ0t4FIAh6L28dnqz6gzqg4h/Qvz8qzX+PXAr8Q541L0nNFx0YzbMo7WU1rTanYrWk9pzbgt44iOi3bzqxURERERyVjS1dSzm6GpZ+mb8uwZbs2z02lWQQsPN/1/smRx7fndZdMmGDgQJk0yRSMwI47efBMef9wUl1JBv8vG0aOwYAHMmwdLl0JkJOB3DkosMD2NSs4F/3OJx2chDw0LtuDF+q1oXKIh/r7+V5xz9q7ZdJjZgfDocByWA6ftTPwaFBDE2JZjaVaqmQdfZcanqWcZn6aepW+aRuJ+yrFnKM+eoTy7n6aeiYhczuGARx+FuDiYOTP9jMqpUgV++slMS+va1fQu2rLFDJUpXhwGDzY9jiRVChY0LaCmT4dTp8xoo/d7ZaNuztb4zJwAn52En+bCpmfhQi4ucIo5R0fRfFJTsn0YQo0B7RmyaBrnYs4DpkjUcmJLzkafBUictpbw9Wz0WVpMbMHsXbO98npFRERERNIbFYpExP1y5ICWLWH3bli71tvRpM4ddyQ1u/74Y8ib19zv3h1CQ6FXLzh2zNtRpkuZMkG9evDBB7BqFZw+DTOnBtDx/qbcuf17GHgCxi6FPzpCVH7ifCJZHzOeN9a0JvtHeSjYvTltJpqedTZXL0Am7O8ws4OmoYmIiIiIpIAKRSLiGSVLQt26sGSJmX+U3gQFmUY7Bw/CyJFQqhRERMCAAWZ1tOefhx07rv34JUvIc9dd5vXLVQUGQosW8OWXpqZ4YJ8v3/a6jzZZvyRo9N/w3WpY3R3Ci4JvNP/kmEMsF65ZJEpgYxMeHc7U7VM99EpERERERNIvFYpExHMaNoR8+WDqVIhOp6M7AgLghRdg+3Yzla5ePYiNhe+/h7JloVkz+O235FPsbBurd2989+zB6t07/Uy/87IiReDFF2HyZDgZ5mD9zDoMbDyQh/buI8sPm+BkaW5QI0rksBzM2DnDrfGKiIiIiGQEKhSJiOf4+EDr1nDxIsyenb4LJg6HGf6yciWsXg2PPGLWiZ87F+66C+rUgWnTTCPsRYuw1q8HMF8XLfJy8OmPjw9Uq2Zm/M2baxGxqzJVS+UFK2WPd9pO5v25mlb9v+S7BWsIP3fevQGLiIiIiKRTKhSJiGcFBUHz5mZEzoYN3o7GNerUMd2Zd+6El14Cf3/4/XdTFCtVCl5+GdvHB8B8fe+99F0kSwN8faFI3tw4UrEyR0ym48yI7cyLv9cl12c5yNqjHBXef4pXxnzOz9uXExEd4caIRURERETSB19vByAit6GyZaFGDbNOeqFCZjpaRlCyJHzzDXz4IQwfbprt7NsHJA18seLjYd06M6qoSRPvxZoBtCzVkuk7pqf8ATtagE8c5N8I2Y9xIet2trGdbYd+5JtD5pDA+BKUyVmVu0tW4d7SValWoCp5suRxzwsQEREREUmDVCgSEe9o0sSsHjZ1Krz8slkCK6PImxf69oW334Zy5eDw4SuPad0aXnkF6tc3Tb6Dgz0fZzrXplwbuizowtnos9dtaG1hkTMgJ0vemci6NQH89hssm3eMo/GbTNEo4ZbzEBE+e1kbtZe1Gybzyb8D3gIJpXzuqtxTqiq176hK1fxVyZ8tP5aVwnlvIiIiIiLpiApFIuIdvr7Qpg1MmQLnz0POnN6OyPVWrbp6kQjg3DkYONDcAO680zTGrlvXfC1d2vRBkmsK8A1gbMuxtJjYAgvrqsUi69+xXGNbjqVqqQCqVjR1ScjPkSP5WbXqIX77DVb+An/uPQ35/lM8yr2HCI6w6vQRVq2eBavNeXM48lIpb1UaFDejjqrkq0KRnEVUPBIRERGRdE+FIhHxnty5zaiajMi2TS8iHx/T0Pq/LMu8/uBg2LED9uwxtzFjzPeDgkzvo4TiUc2akCWLR19CetCsVDNmtp1Jh5kdCI8Ox2E5cNrOxK85A3IytuVYmpVqdsVjQ0OhbVtzAzh7NjerVzdi7dpGrF0Lvy+CyJhIyLc5eQEpeDuRnOC3Y/P57dj8xPNl8wmicr4q1C5sRh1VzV+VO3Pfmao+SiIiIiIi3qZCkYiIOyxaZHoRXYttw6lT8OOPpgi0Zo1ZPW3VKtMIOzwcfv7Z3MCMwKpc2RSOEopHBQt65KWkdc1LNeef7v8wdftUpu+YzvGI4+QLzEerMq1oXbY1Ab4BKTpPzpzw0EPmBuB0ws6dOfj997tYu/Yu1q6FbbPB6XMBQrYmH3mUdyvnCGfl0V9YefSXxHNmdmSjYt7K1ApNKh6VCS6Dr0NvvyIiIiKSNlm2nbGX3omMjCQwMJCIiAhy5MjhsvM6nU7CwsIICQnBoekhbqM8e4by7GK2DbVqwcaNVx9NlMCyoEwZ2LLFFIISXLpk9q1alVQ8Onr0ysffcUfy6WoVKpgRTLcxd/8uR0XB+vWwdm3SLSwM8ImF4L+SF4/ybYFMF684h58jgPLBFalZqCpV8lehav6qlA8pn+KCVlrgrjy76z1bUs9dP4vYWCeTJ4cRFxdC7tx6v3EX23YCYUAIlkY1uoVy7BnKs2coz+5n206OHg2jWrUQatZ0bY7d8Z6t/9IUEXG1G40mSmDbsH07dOoEnTubQg+Yxt7Vq5tbly7muCNHTMEooXi0ZQscOmRu48ebx2XLBrVrJxWPatcGfdh2qezZ4d57zQ3Mj+bQIVi71o8NG6qwfn0VNq58nshIwBEHuXf9WzhKmroW6x/FxhN/sPHEH4nn9bF8KZWrHDVDq1Lt35FHlfJWIqtfVu+8UBERERG5balQJCKp8/zzppDh55e0r1s3KFLEs3H88w98/jlERkLWrNC1KxQufOVxS5bA7NlJ26dOQfny8M47EB0N/frB3r1mntHEibceV0JvIofDnDMlli4108rKlzejjP7LssxrK1wYnnjC7IuKMlPUEkYcrVlj9i1ZYm4Jj6tQIfl0tSJFrv4cclMsy6S0SJGkXkdOp/mVWr/el/Xry7F+fTk2rnyK8+cBywlB+5OPPMq/kfgsp9l+egvbT29hzObR5txYFM1emlp3VKV6AVM8qpyvMjkDcnrp1YqIiIjI7UCFIhFJvR49oFix6x8TH598GtR/t68nYUbs9QoaX34JDzwADRuaQsmQITB48JXHNWpkbgk6dYJ77jH3fXzMMvXZspnCkSvExpqVzlJaJAKIiIDnnktdASd79uSvLT4e/vor+XS1Awfgzz/N7euvzXH58ydNVatXzxSoLi/6yS1zOKBkSXNr187si4+H3bth/XoH69eXYP36Emxa9RgXLwLYEHjkiuKRnf0Y+6N2sH/bDiZs+ynx/AUzF6dGoSrUvKzvUXDWYK+8VhERERHJeFQoEhHXadbMDKvYsMGMjomMNMWPY8fg7FkYMQKmTzcjXhwOMwzj1VfNiKDx480cnosXzaifvn3NqmBXExFhVgj78EOzXbeuOfexY6YQci27dpnH1qxptjNlgooV/20y4yL+/mba2cmTyXY7nU7OnDlDrly5cMTHw+jR0KCBmVIWEQEXLpjH3iwfH/NaKlY0OQWTj4Si0erVpmfSsWMwbZq5AQQEmHwkFI/q1oVcuW4+DrkqHx/TjqpMGXjqKbMvLg527oQtWyw2by78760lp37990HZjiVNWUtYdS3oIEcv7uPonn3M3DM18fy5fAtRKbgq9YtXpUYhUzwqkL0AlkaPiYiIiEgqqVAkIqn36afJR6EMHJi07eOTNLJnyBAzB+fTTyFzZlNAWrzYHJ81KwwfDmPHQseO5vidO2HoULP8FMAHH0D79nDnncmf/+RJU8xIGKFkWWaZ+ZMnr18oWrzYNJfxdfM/faGh5nY5p5O4sDAICTFFMssyo7KyZYNvvoGpU82oIlc2o86fHx591NzAFOHWr0/e6+jMGVixwtwSlC6dfLpayZKaruYGvr6mnlq+vPk1BzOY7tgx2LwZNm/Oz+bN+dmy5SH2/PbvQLvMZ5KKRgm3PLs5E/c3vx77m1+PJU2zzEoIJbNXpXbhqtxbuirVC1alSM4it1Q8io6LZspfU5ixc0bi6nKPlH6ENuXapKtm3CIiIiJybSoUiUjqXW/q2eXTvADq1zdFIjCffhs0MEUiMOuQDxiQdGy1aklFIjCFIleJjjbFkEGDXHfOW5EwqgnM9Lfvvze9iho3dt9zZs5s8t+ggdm2bTPK6vLpart2mYLdzp0wapQ5Lnfu5NPVqlVL+pmKS1kWFChgbg89lLT/3DnYuhU2b87F5s0N2bKlIVvnm4Fo+EdC3i3Ji0fB2znvCGNT1AI2/bWAr/8y5/GLz0lopipUCqnK3SWr0qhcVUrluRMfx40LlLN3zabDzA6ER4fjsBw4bSeO4w5m7JxBlwVdGNtyLM1KNXNPYkRERETEY1QoEhHX+m8BISAVowxSWnwIDjYjYRL6Htm2GU0UfJ0+LatWmWbQ/x3pkxYULGgKbAsXmul4JUt65nkty4weKl3aNCkHM+1vzZqk4tG6dXD6NMyZY25gpuxVq5Z8ulq+fJ6J+TaVLRvUqWNuCZxOOHgQtm3LwbZtDf69wc55cIkLELL1suLRJgjZSqzvWfY5f2Xf8V+ZfhxYAY64rOSKrUzxLFWpVqAq95WpSuMqZcieNVPic83eNZuWE1smPbftTPb1bPRZWkxswcy2M2leqrkHMiIiIiIi7qJCkYh4TuXKZpRKy5aQJQssWABVqqT+PIGBULw4LFtmmlmvXg158lx/2tmiRe4drXOratc2zadnzoRXXvHesvZ58pheU83+HRkSGwubNiVNV1u1Ck6cgLVrzS1hmmGxYklFo3r1oFw5M8VO3MbhMGkvVgyaX1abuXQJ9uzJwrZttf69wV/LYM/+WOw825OPPMq3GWem85zyXcUpVvH7P/DVP8BCf/zPViSvsyrFc5RnTWbT7N3GvmosNjYWFh1mduCf7v9oGpqIiIhIOqZCkYik3n97FL3wgmmifCPVqpmG1W++mbyZ9bVcq0cRQOfO8PnnMHmyKTp16ZL0vWHDoFYtcwM4ehT277/6VLbXXktqJt2hg3kd3brd+LW4mmWZAtqIEabR9DPPpI1Ci59fUi67dTOjtw4cSJqqtmoVbNtm8rt/P4wbZx4XGGiKXwnFo1q1zLAYcbtMmaBsWXN77LGk/Rcv+rFzZ2W2bavMtm3PsXMn7FgZz76zu3Dmvbx4tAkCIonJs47DrOMwcI36UDI2NuHR4UzdPpUnKz7prpcnIiIiIm5m2badgsu/9CsyMpLAwEAiIiLI4cL/oXc6nYSFhRESEoIjLXyYy6CUZ89Qnt0vxTk+fBjGjDF9hO6912Px3ZKICDO6KKF4tHYtnD+f/BgfH6hUKXmvIzdMA9TvcurFxpqe8wmtqbbvcLLl8H72nN9ITNBGqDwasoZBCnpgOywHLUu3ZNpj024qFne9Z0vquetnERvrZPLkMOLiQsidW3+j7mLbTiAMCMGylGd3UI49Q3n2DOXZ/WzbydGjYVSrFkLNmq7NsTveszWiSEQkLSlcGO65B379Fe6449pNw9OSwEBo0sTcwKz7vnVr8tXVDh+GjRvNbfhwc1yhQsmnq1WqdOsr0i1ZQp7Onc1zpOWphmmIn1/SCCTDAZTAtktw9OhjNJ2ylj8jw1J0Lqft5MyFM+4KVUREREQ8QIUiEZG0pn5906V4+nTTryi9Tdny9TW9p6pUMVMEAf7+O2nE0erVpu/R33/DpEnmBmYKYa1aScWjOnWSr4J3I7aN1bs3vnv2YPfuDfffb6b0yU2xLFPLK1EwN9uiHImNq6/HYTnIlSWXB6ITEREREXfRuDIRkbTG4YBWrcz9GTNMX6D0rlAh0zBn6FCzklpEBPzyC3z0ETz4oCkIXbhgRlJ99JFZGz5XLqhQAV5+GX74wcyPul4uFi3CWr8ewHxdtMgzry2Da1mqZYqKRGBGFD1S+hE3RyQiIiIi7qRCkYhIWpQtmykW7d8PK1d6OxrXy5rV9GDq3Rt+/hlOnzZNsb/5xjTyLlHCFIW2bYNvvzX77rzTrGzXqhUMHAhr1kBMjDmfbcN772H7+JhNHx94772MUWTzsjbl2hAUEIR1gyZFFhZBAUG0LtvaQ5GJiIiIiDto6pmISFpVrJhpap3Qr6hwYW9H5D4OB5QrZ24vvWT2nThhikEJ09XWrzf7ZswwNwB/f6heHQoUgHXrEksZVny8Gbm0aFFS7yS5KQG+AYxtOZYWE1tgYWFfZQm0hCLS2JZjCfAN8HSIIiIiIuJCGlEkIpKW3XOPmbY1bZqZmnU7yZsXWraEzz4zxaKICPP100+hRQsIDjYjilatgilTrny8ZZmRSEOHmsLShg0QFqZRRjehWalmzGw7k5wBOQHTi+jyrzkDcjKr7SyalWrmrRBFRERExEU0okhEJC1zOKB1axgxAmbNgrZtb98GzQEBpsl13brw1lum4LN3r8nN4MFXHm/bZgRS167J9/v7Q2ho8lvhwsnva2n2KzQv1Zx/uv/D1O1Tmb5jOscjjpMvMB+tyrSiddnWGkkkIiIikkGoUCQiktblyGFG1owfD2vXmtXAxBTMSpSA334DHx+Ij7/6MTlzmuP+/huOHzejkPbuNbdryZHjygLS5duFCpnC1W0mwDeAJys+Sbvy7QgLCyMkJASHQ4OTRURERDISFYpERNKDkiVNgWjJEtOvqEABb0eUNixaZHoRXYttQ3g49O1rehXFxsLRo3DkSNLt8OHk98PDITIS/vrL3K4lJOT6o5Ly5zcFLBERERGRdESFIhGR9KJRI1PImDLFLBl/G45oSebflc6uOZooQcIKaI0bg58fFC1qbtdy/vyVBaT/bl+4YPodhYWZ3kfXet4CBa4sIF1eXMqT5/adSigiIiIiaZIKRSIi6YWPT1K/ojlzzP3buchwo9FECVK7AlrWrFC6tLldjW3DmTPXHpF05IgZtRQXl7T/WgICrj0iKeF+9uw3jtnTliwhT+fOMHy4KcCJiIiISIahQpGISHoSFATNm5tRRUWLmqXhb0cJo4kcDnA6b3y8w5E0quhWi2uWBblzm1vlylc/Jj7eNNK+WhEp4f6JExAdDXv2mNu1BAZeu4hUuDAULGgadHuKbWP17o3vnj3YvXvD/fff3gVLERERkQxGhSIRkfSmXDk4eBAWLDDFgrx5vR2R58XGmmJLSopEYI47csQ8zhNFlYRpZwUKQO3aVz8mJsaMPLreFLezZyEiArZuNbdryZv3+qOS8uVzXb+kRYuw1q8HMF9TOlJLPO7LL7/ks88+4/jx41SqVIkvvviCmjVrejssERERSeNUKBIRSY+aNEnqV/TSS6b3zu3E399MJzt5Mtlup9PJmTNnyJUr15WrcYWEeHbkzY34+0OxYuZ2LVFRVy8gXb4dHW1GJ504Af8WcK7g62tGHl1vmluuXDceGfTvSC7bxwcrPt58ddVILXGpSZMm0a1bN0aMGEGtWrUYMmQITZo0YdeuXYSEhHg7PBEREUnDVCgSEUmPfH2hTRv49lv4+Wdo2dLbEXleQqHjck4ncWFhpiiUEZZtz54dypY1t6uxbTh9+vpT3P75x/RLOnTI3K4lc+Yrm23/d3vVKli3joSSkJXa/k/iMYMHD+bFF1/k2WefBWDEiBHMmzeP77//np49e3o5OhEREUnLVCgSEUmv8uSBpk1hxgzTr6hSJW9HJJ5mWeb3IE8eqFr16sfExcHx49duvH3kiFm97eJF2LXL3K7latPXLl9VTqOK0oTY2Fg2bNhAr169Evc5HA4aNWrEmjVrrvqYmJgYYmJiErcjIyMBM0rPmdIpnilgzmUDTmzbZaeV/7DtpDyLeyjHnqE8e4by7H4JObZtZ4o7J6SUK9+nE6hQJCKSnlWqBAcOwLx5ZmpRnjzejkjSGl9fKFTI3OrUufox0dHw99/XX8ktMtI06f4vjSpKc06dOkV8fDx5/9O/LG/evOzcufOqj+nfvz99+vS5Yv/JkyeJjo52WWyXLjnJlCmCiAibixczwKi/NMuJv38EMTE2oDy7h3LsGcqzZyjP7mfe/2JibMLCXJvjqKgol54PVCgSEUn/HnrIfMifMgVeeAEyZfJ2RJLeBARAiRLmdjW2DdWqwZYtV28grlFF6V6vXr3o1q1b4nZkZCShoaEEBweTI0cOlz2P0+nkzjstypYNxrL0YcRdbNvJhQsWWbIoz+6iHHuG8uwZyrP7JeS4WLFgcud2bY4DAgJcej5QoUhEJP3z8zP9ikaOhIUL4eGHvR2RZDSLFsGmTdf+vkYVpSl58uTBx8eHEydOJNt/4sQJ8uXLd9XH+Pv743+VZu8Oh+PKxvC3qEABi5AQ159XkjidEBamPLuTcuwZyrNnKM/ul5Dj3Lldn2N3/Mz0WyAikhHkzQsPPmhWvfrrL29HIxnJvyudXbU/0eUSRhWp8YzX+fn5Ua1aNZYuXZq4z+l0snTpUupca/qhiIiIyL9UKBIRySiqVoVy5WD2bAgP93Y0klEsWmRGC12tP9HlLh9VJF7XrVs3Ro4cydixY9mxYwevvvoq58+fT1wFTURERORaVCgSEckoLAuaNYMsWUy/oht9sBe5kYTRRCkd0uxwaFRRGvH4448zcOBA/ve//1G5cmU2b97MggULrmhwLSIiIvJf6apQNGDAACzLomvXrt4ORUQkbQoIMP2KTpyAJUu8HY2kd7GxZtWzlC676nSaFdJiY90bl6RI586dOXToEDExMfz+++/UqlXL2yGJiIhIOpBumlmvW7eOb775hooVK3o7FBGRtK1AAbj/fliwAIoUgVKlvB2RpFf+/mY62cmTyXY7nU7OnDlDrly5rmygGBJiHiciIiIi6VK6KBSdO3eO9u3bM3LkSD766CNvhyMikvbVqgUHDsDMmfDKKxAY6O2IJL0KDTW3yzmdxIWFmaKQVkcRERERyVDSxdVdp06daNq0KY0aNfJ2KCIi6YNlQcuW4OcH06alfOqQiIiIiIjc1tL8iKKJEyeyceNG1q1bl6LjY2JiiImJSdyOjIwEzDB5pws/KDmdTmzbduk55UrKs2coz+7nlRz7+8Mjj8DYsbB0KTRs6Lnn9hL9LnuGu/Ksn5uIiIiI96XpQtGRI0fo0qULixcvJiAgIEWP6d+/P3369Lli/8mTJ4mOjnZZbE6nk4iICGzbvrI/g7iM8uwZyrP7eS3HAQFkql4dnx07iK5QwXPP6yX6XfYMd+U5KirKZecSERERkZuTpgtFGzZsICwsjKpVqybui4+PZ8WKFQwfPpyYmBh8fHySPaZXr15069YtcTsyMpLQ0FCCg4PJkSOHy2JzOp1YlkVwcLA+jLiR8uwZyrP7eTXHDz4IgF2V6BUAAEJhSURBVOv+BUy79LvsGe7Kc0r/U0hERERE3CdNF4oaNmzI1q1bk+179tlnKV26NG+//fYVRSIAf39//K+y2orD4XD5hwbLstxyXklOefYM5dn9lGPPUJ49wx151s9MRERExPvSdKEoe/bslC9fPtm+rFmzkjt37iv2i4iIiIiIiIjIrUnThSJXsG0bSGpq7SpOp5OoqCgCAgL0P6BupDx7hvLsfsqxZyjPnuGuPCe8Vye8d4v36PopfVOe3U859gzl2TOUZ/dzZ47dcf2U7gpFy5YtS9XxCY0xQ0ND3RCNiIjrfQdcAmIv2zcYOOThOPIDb2B6G50HhgBHrnJceeAD4Ohl+97EvAaAZ4FqgA+wHfgKiHdHwJJhREVFERgY6O0wbmu6fhIREUlfXHn9ZNkZ/L/tnE4n//zzD9mzZ8eyLJedN6FJ9pEjR1zaJFuSU549Q3l2v9TkOKBTJ2LefBO7aNHrnzQ+Hi7v1fbf7etJ+Kf/Ov8u+n/4IXF33UX8Pffgs3YtvrNnE9Ov3xXHOf76i0xjxxLz6adXfM9nyRJ8V68m5p13wMeHTN9+i50/P3HNm6cszlTS77JnuCvPtm0TFRVFgQIF9D+aXqbrp/RNeXY/5dgzlGfPUJ7dz505dsf1U7obUZRaDoeDQoUKue38OXLk0B+TByjPnqE8u1+Kcuznh1/27HC145o1g7ZtYcMGKF8eIiNNsefYMTh7FkaMgOnTYckScDigSBF49VXImhXGj4dDh+DiRTh1Cvr2hdy5rx5DRAQcPox/06am+HT//TBuHP7nz0P+/MmPzZYN/Pzwv1q8YWFQsyb+uXKZ7Xr1YMIEePLJG6Xqluh32TPckWeNJEobdP2UMSjP7qcce4by7BnKs/u5K8euvn7K8IUiEZF06dNPwc8vaXvgwKRtHx8YPNjcHzIE9u41x2fObApIixeb47NmheHDYexY6NjRHL9zJwwdCjlzmu0PPoD27eHOO5M//8mTkCtX0ggly4LgYLP/v4UiMIWqLl3M8Y0awUMPmf0lSsCCBfDwwyb+lSvhxAkXJEhERERERNxBhSIRkbSoRw8oVuzq32vUKPl2/fqmSASweTM0aGCKRGAKNgMGJB1brVpSkQhMoehWFS8OY8aY5zx1Cvr0MaOh6teHhg3NqKJevUyhqHJl2LTp1p9TRERERETcQoWim+Tv78/777+Pv7+/t0PJ0JRnz1Ce3c+lOU4oCiUICLj5x15LcDCcOZPU98i2zWii4OArj82SJel+njxw113w11+mUGRZ0K6duQGsWAGFC6c83lTS77JnKM9ys/S74xnKs/spx56hPHuG8ux+6S3HGb6ZtYhIuvP889C799VHFDVrBhMnJo0YGjIEihaFFi3M9oYNMGqUmXqWJQt89ZUp1rz6qulRdP48vPhiyuLo1cuMXmrYEFatgqlT4fPPrzzuzBkICjLPc/EivP++6Wl0//0QG2tu2bKZfkrvvmv6E9WseVOpERERERER99KIIhGRtOi/PYpeeAEqVrzx46pVMw2r33wzeTPra7lWjyKAzp1NYWjyZFN06tIl6XvDhkGtWua2ejX8/LMZeRQfb0YSJUyPu3DBFJwsy4xKat5cRSIRERERkTRMI4pERERERERERAQAh7cDEBERERERERGRtEGFIhERERERERERAVQoumlffvklRYoUISAggFq1avHHH394O6QMZcWKFTRr1owCBQpgWRYzZ870dkgZTv/+/alRowbZs2cnJCSEli1bsmvXLm+HleF8/fXXVKxYkRw5cpAjRw7q1KnD/PnzvR1WhjZgwAAsy6Jr167eDiVD+eCDD7AsK9mtdOnS3g5L0qDUXiNNmTKF0qVLExAQQIUKFfj55589FGn6lpo8jxw5kgYNGhAUFERQUBCNGjXStWsK3Oz1/sSJE7Esi5YtW7o3wAwitXk+e/YsnTp1In/+/Pj7+1OyZEn9u5ECqc3zkCFDKFWqFJkzZyY0NJQ33niD6OhoD0Wb/tzM59dly5ZRtWpV/P39KVGiBGPGjHF7nCmlQtFNmDRpEt26deP9999n48aNVKpUiSZNmhAWFubt0DKM8+fPU6lSJb788ktvh5JhLV++nE6dOrF27VoWL17MpUuXaNy4MefPn/d2aBlKoUKFGDBgABs2bGD9+vXcd999tGjRgr/++svboWVI69at45tvvqFiShp/S6qVK1eOY8eOJd5Wrlzp7ZAkjUntNdLq1at54okneP7559m0aRMtW7akZcuWbNu2zcORpy+pzfOyZct44okn+PXXX1mzZg2hoaE0btyYo0ePejjy9ONmr/cPHjzIm2++SYMGDTwUafqW2jzHxsZy//33c/DgQaZOncquXbsYOXIkBQsW9HDk6Utq8zx+/Hh69uzJ+++/z44dOxg1ahSTJk3inXfe8XDk6UdqP78eOHCApk2bcu+997J582a6du3KCy+8wMKFC90caQrZkmo1a9a0O3XqlLgdHx9vFyhQwO7fv78Xo8q4AHvGjBneDiPDCwsLswF7+fLl3g4lwwsKCrK/++47b4eR4URFRdl33nmnvXjxYvvuu++2u3Tp4u2QMpT333/frlSpkrfDkDQutddIjz32mN20adNk+2rVqmW//PLLbo0zvbvVa9G4uDg7e/bs9tixY90VYrp3MzmOi4uz69ata3/33Xf2M888Y7do0cIDkaZvqc3z119/bRcrVsyOjY31VIgZQmrz3KlTJ/u+++5Ltq9bt252vXr13BpnRpGSz689evSwy5Url2zf448/bjdp0sSNkaWcRhSlUmxsLBs2bKBRwtLPgMPhoFGjRqxZs8aLkYncmoiICABy5crl5Ugyrvj4eCZOnMj58+epU6eOt8PJcDp16kTTpk2T/fssrrVnzx4KFChAsWLFaN++PYcPH/Z2SJKG3Mw10po1a674m23SpImuqa7DFdeiFy5c4NKlS3rPv4abzfGHH35ISEgIzz//vCfCTPduJs+zZ8+mTp06dOrUibx581K+fHn69etHfHy8p8JOd24mz3Xr1mXDhg2J09P279/Pzz//zEMPPeSRmG8Haf39z9fbAaQ3p06dIj4+nrx58ybbnzdvXnbu3OmlqERujdPppGvXrtSrV4/y5ct7O5wMZ+vWrdSpU4fo6GiyZcvGjBkzKFu2rLfDylAmTpzIxo0bWbdunbdDybBq1arFmDFjKFWqFMeOHaNPnz40aNCAbdu2kT17dm+HJ2nAzVwjHT9+/KrHHz9+3G1xpneuuBZ9++23KVCggArr13AzOV65ciWjRo1i8+bNHogwY7iZPO/fv59ffvmF9u3b8/PPP7N37146duzIpUuXeP/99z0RdrpzM3lu164dp06don79+ti2TVxcHK+88oqmnrnQtd7/IiMjuXjxIpkzZ/ZSZIYKRSJCp06d2LZtm/qNuEmpUqXYvHkzERERTJ06lWeeeYbly5erWOQiR44coUuXLixevJiAgABvh5NhPfjgg4n3K1asSK1atbjjjjuYPHmy/vdcJB0ZMGAAEydOZNmyZfo300WioqJ46qmnGDlyJHny5PF2OBma0+kkJCSEb7/9Fh8fH6pVq8bRo0f57LPPVChyoWXLltGvXz+++uoratWqxd69e+nSpQt9+/blvffe83Z44gEqFKVSnjx58PHx4cSJE8n2nzhxgnz58nkpKpGb17lzZ+bOncuKFSsoVKiQt8PJkPz8/ChRogQA1apVY926dQwdOpRvvvnGy5FlDBs2bCAsLIyqVasm7ouPj2fFihUMHz6cmJgYfHx8vBhhxpQzZ05KlizJ3r17vR2KpBE3c42UL18+XVOl0q1ciw4cOJABAwawZMkSNf2/jtTmeN++fRw8eJBmzZol7nM6nQD4+vqya9cuihcv7t6g06Gb+V3Onz8/mTJlSva+XqZMGY4fP05sbCx+fn5ujTk9upk8v/feezz11FO88MILAFSoUIHz58/z0ksv0bt3bxwOdbC5Vdd6/8uRI4fXRxOBVj1LNT8/P6pVq8bSpUsT9zmdTpYuXaqeI5Ku2LZN586dmTFjBr/88gtFixb1dki3DafTSUxMjLfDyDAaNmzI1q1b2bx5c+KtevXqtG/fns2bN6tI5Cbnzp1j37595M+f39uhSBpxM9dIderUSXY8wOLFi3VNdR03ey366aef0rdvXxYsWED16tU9EWq6ldocly5d+or3oebNmyeuZhQaGurJ8NONm/ldrlevHnv37k0sxAHs3r2b/Pnzq0h0DTeT5wsXLlxRDEq4nrJt233B3kbS/Pufl5tpp0sTJ060/f397TFjxtjbt2+3X3rpJTtnzpz28ePHvR1ahhEVFWVv2rTJ3rRpkw3YgwcPtjdt2mQfOnTI26FlGK+++qodGBhoL1u2zD527Fji7cKFC94OLUPp2bOnvXz5cvvAgQP2n3/+affs2dO2LMtetGiRt0PL0LTqmet1797dXrZsmX3gwAF71apVdqNGjew8efLYYWFh3g5N0pAbXSM99dRTds+ePROPX7Vqle3r62sPHDjQ3rFjh/3+++/bmTJlsrdu3eqtl5AupDbPAwYMsP38/OypU6cme8+Piory1ktI81Kb4//Sqmcpk9o8Hz582M6ePbvduXNne9euXfbcuXPtkJAQ+6OPPvLWS0gXUpvn999/386ePbs9YcIEe//+/fb/27v3OKvqem/gnwGdARJGDR0uoah5yxQUlNAM7Yxysoe086SEHkCP5knRUh5L8AKaKdbRIhXjaBp6XhqmqcdXGFYknkfFG0oXRcpb+JSMoskoKrdZzx9tdk2AAsKeDb7fr9d+vdy//Vtrfdf8gPX1M2vv/fOf/7zYZZddimOOOaatTqHqvdf/v44ZM6YYPnx4ef5zzz1XdOrUqfja175WzJ07t5g0aVLRvn37Yvr06W11Cq0IitbTlVdeWeywww5FbW1tccABBxQPPfRQW5e0Wbn33nuLJKs8Ro4c2dalbTZW9/NNUvzwhz9s69I2K//2b/9W7LjjjkVtbW2x3XbbFf/0T/8kJKoAQdGGN3To0KJ79+5FbW1t0bNnz2Lo0KHFM88809ZlUYXerUcaNGjQKtfyH//4x8Vuu+1W1NbWFnvttVcxbdq0Cle8aVqXn/OOO+642mv++PHjK1/4JmRd/yz/PUHR2lvXn/ODDz5YDBgwoKirqyt23nnn4uKLLy6WL19e4ao3Pevyc162bFlxwQUXFLvsskvRoUOHolevXsWpp55a/OUvf6l84ZuI9/r/15EjRxaDBg1aZZu+ffsWtbW1xc4771xV/x9WUxTuHQMAAADAZxQBAAAAUCIoAgAAACCJoAgAAACAEkERAAAAAEkERQAAAACUCIoAAAAASCIoAgAAAKBEUAQAAABAEkER8HeOP/74HHXUUW12/OHDh+eSSy5ps+NvCFOmTMnWW2+9VnOnT5+evn37pqWlZeMWBQCbgbbuU9ZWTU1N7rzzznedszbn0rt370ycOPF91bIh9tHW3utnNWXKlNTU1KSmpiZnnHFGxer6e4cccki5hjlz5rRJDbAhCYrgA2LlxWtNjwsuuCDf+973MmXKlDap79e//nXuvvvufOUrX2mT47eFf/7nf86WW26Zm266qa1LAYA2Ve19yrp46aWX8pnPfCZJ8sILL6x3ePDoo4/m5JNPfl+1rOs+Zs6cmZqamrz++uvv67iV1qVLl7z00ku56KKLymNFUWTcuHHp3r17OnbsmMbGxvzhD39Y72Psscce2XLLLbNgwYJVXrv99tvzyCOPrPe+odoIiuAD4qWXXio/Jk6cWL6grnycddZZqa+vX+u7YTa0K6+8MkcffXS22mqrNjl+Wzn++ONzxRVXtHUZANCmqr1PWRfdunVLXV3d+97Pdtttl06dOrX5PtZHURRZvnx5xY5XU1OTbt26pXPnzuWxb3/727niiisyefLkPPzww/nQhz6UwYMH55133lnn/d9///159dVXc9hhh+WGG25Y5fVtt90222233fs6B6gmgiL4gOjWrVv5UV9fX76grnxstdVWq9zae8ghh+T000/PGWeckW222SYNDQ259tprs3jx4pxwwgnp3LlzPvrRj+ZnP/tZq2P97ne/y2c+85lstdVWaWhoyPDhw7Nw4cI11rZixYrcdtttGTJkSKvxq6++Orvuums6dOiQhoaGfOELXyi/1tLSkgkTJmSnnXZKx44d06dPn9x2222ttn/yySfzv/7X/0qXLl3SuXPnHHzwwXn22WfL23/jG9/IRz7ykdTV1aVv376ZPn16eduVvwG8/fbbc+ihh6ZTp07p06dPZs2a1eoYU6ZMyQ477JBOnTrl85//fF599dVWr//617/OoYcems6dO6dLly7p169fHnvssfLrQ4YMyWOPPVauCwA+iKq1TymKItttt12rHqNv377p3r17+fn999+furq6vPXWW0lav/Vsp512SpLsu+++qampySGHHNJq/5dddlm6d++eD3/4wxk1alSWLVtWfu0f3zZWU1OTH/zgB/n85z+fTp06Zdddd81dd931rj/XddnHCy+8kEMPPTRJss0226SmpibHH398kvfuu1beifSzn/0s/fr1S11dXa6//vrU1NTk6aefblXTd7/73eyyyy5J/toDnnjiieX97r777vne9773rue0NoqiyMSJE3PeeeflyCOPzD777JMbb7wxf/7zn9/zbYGrc9111+Xoo4/OyJEjc/3117/v+qDaCYqAd3XDDTeka9eueeSRR3L66afnlFNOydFHH50DDzwwjz/+eA4//PAMHz683By9/vrr+fSnP5199903jz32WKZPn56mpqYcc8wxazzGb37zmyxatCj9+/cvjz322GP5yle+km984xuZN29epk+fnk996lPl1ydMmJAbb7wxkydPzpNPPpkzzzwz//qv/5r77rsvSfKnP/0pn/rUp1JXV5df/epXmT17dv7t3/6t/Nut733ve7n88stz2WWX5Te/+U0GDx6cz33uc6vcknzuuefmrLPOypw5c7Lbbrtl2LBh5X08/PDDOfHEE3Paaadlzpw5OfTQQ/PNb36z1fbHHXdcPvKRj+TRRx/N7NmzM2bMmGy55Zbl13fYYYc0NDTk//7f/7s+ywMAH2gbu0+pqanJpz71qcycOTNJ8pe//CVz587N22+/XQ5A7rvvvuy///6rvXNn5duRfvnLX+all17K7bffXn7t3nvvzbPPPpt77703N9xwQ6ZMmfKeb6278MILc8wxx+Q3v/lNjjjiiBx33HF57bXX1ulntqZ99OrVKz/5yU+SJPPmzctLL71UDm3eq+9aacyYMbn00kszd+7cfOELX0j//v1XeYv9TTfdlGOPPTbJXwOoj3zkI7n11lvz1FNPZdy4cTnnnHPy4x//eJ3O6R89//zzWbBgQRobG8tj9fX1GTBgwCq/9Hsvb7zxRm699dYcd9xx+dznPpeXXnpJ38bmrwA+cH74wx8W9fX1q4yPHDmyOPLII8vPBw0aVHzyk58sP1++fHnxoQ99qBg+fHh57KWXXiqSFLNmzSqKoiguuuii4vDDD2+13xdffLFIUsybN2+19dxxxx1F+/bti5aWlvLYT37yk6JLly5Fc3PzKvPfeeedolOnTsWDDz7YavzEE08shg0bVhRFUYwdO7bYaaediqVLl672mD169CguvvjiVmP7779/ceqppxZFURTPP/98kaT4wQ9+UH79ySefLJIUc+fOLYqiKIYNG1YcccQRrfYxdOjQVj/bzp07F1OmTFltDSvtu+++xQUXXPCucwDgg6La+pQrrrii2GuvvYqiKIo777yzGDBgQHHkkUcW3//+94uiKIrGxsbinHPOKc9PUtxxxx1FUfytn3jiiSdWOZcdd9yxWL58eXns6KOPLoYOHVp+vuOOOxbf/e53W+33vPPOKz9/8803iyTFz372s9XWvT77uPfee4skxV/+8pfynLXpu1Zud+edd7aa893vfrfYZZddys/nzZvXqpdanVGjRhX/+3//7/Lzf1z3f7S6Py8PPPBAkaT485//3Gr86KOPLo455pg17mt1rrnmmqJ3797lPnXEiBHFyJEjV5m3prWGTZE7ioB3tc8++5T/u3379vnwhz+cvffeuzzW0NCQJHn55ZeT/PWtVvfee2+22mqr8mOPPfZIkjW+vertt99OXV1dampqymOHHXZYdtxxx+y8884ZPnx4brrppvJvA5955pm89dZbOeyww1od58YbbywfY86cOTn44INb3b2zUnNzc/785z/noIMOajV+0EEHZe7cuWs8/5W3ma8817lz52bAgAGt5g8cOLDV89GjR+ekk05KY2NjLr300tX+DDp27Fg+NwBg7VWiTxk0aFCeeuqpvPLKK7nvvvtyyCGH5JBDDsnMmTOzbNmyPPjgg6u8pWxt7LXXXmnfvn35effu3ct1rs35fuhDH0qXLl3ec5v3u4+16btW+vu7w5Pki1/8Yl544YU89NBDSf56N9F+++1X/pknyaRJk9KvX79st9122WqrrXLNNddk/vz563ROG9P111+fY489ttynHnfccbn11lvzxhtvtHFlsPFs0dYFANXtH4OWmpqaVmMrL5orv+L9zTffzJAhQ/Ktb31rlX39/fv5/17Xrl3z1ltvZenSpamtrU2SdO7cOY8//nhmzpyZn//85xk3blwuuOCCPProo3nzzTeTJNOmTUvPnj1b7Wvlh0d27NhxfU53Fe92rmvjggsuyLHHHptp06blZz/7WcaPH5+pU6fm85//fHnOa6+95gMQAWA9VKJP2XvvvbPtttvmvvvuy3333ZeLL7443bp1y7e+9a08+uijWbZsWQ488MANUvt79Rjrs8373cfa9F0rfehDH2r1vFu3bvn0pz+dm2++OZ/4xCdy880355RTTim/PnXq1Jx11lm5/PLLM3DgwHTu3Dn/8R//kYcffnidzukfdevWLUnS1NTUal2bmprSt2/ftd7PU089lYceeijXXXddeeyf/umf0rlz50ydOjVf+tKX3ledUK0ERcAGtd9+++UnP/lJevfunS22WLt/YlZesJ966qlWF+8tttgijY2NaWxszPjx47P11lvnV7/6VQ477LDU1dVl/vz5GTRo0Gr3uc8+++SGG27IsmXLVmmIunTpkh49euSBBx5otf0DDzyQAw44YK3Pdc8991ylkVn5G7O/t9tuu2W33XbLmWeemWHDhuWHP/xhOSh655138uyzz2bfffdd6+MCAOtnffqUmpqaHHzwwfnv//7vPPnkk/nkJz+ZTp06ZcmSJfnP//zP9O/ff5WAZKWVvwBbsWLFBjuHjWl19X7sYx97z77r3Rx33HH5+te/nmHDhuW5557LF7/4xfJrDzzwQA488MCceuqp5bEN8QUfO+20U7p165YZM2aUe8vm5uY8/PDDrYKq93LdddelT58++djHPlYea9++fYYOHZrrrrtOUMRmy1vPgA1q1KhRee211zJs2LA8+uijefbZZ3PPPffkhBNOWGOTtN1222W//fbL/fffXx776U9/miuuuCJz5szJH//4x9x4441paWnJ7rvvns6dO+ess87KmWeemRtuuCHPPvtsHn/88Vx55ZXlryw97bTT0tzcnC9+8Yt57LHH8oc//CH/9V//lXnz5iVJvva1r+Vb3/pWbrnllsybNy9jxozJnDlz8tWvfnWtz/UrX/lKpk+fnssuuyx/+MMfctVVV7X65rS33347p512WmbOnJk//vGPeeCBB/Loo49mzz33LM956KGHUldXt8pb1gCADW99+pTkr9+w9qMf/Sh9+/bNVlttlXbt2uVTn/pUbrrppncNT7bffvt07Nix/KHZixYt2hintcHsuOOOqampyU9/+tO88sorefPNN9eq73o3//Iv/5I33ngjp5xySg499ND06NGj/Nquu+6axx57LPfcc09+//vf5/zzz8+jjz76vs+jpqYmZ5xxRr75zW/mrrvuym9/+9uMGDEiPXr0aPXNee9m2bJl+a//+q8cfPDB+d3vftfqsd9+++Xhhx/Ok08++b5rhWokKAI2qJV36qxYsSKHH3549t5775xxxhnZeuut067dmv/JOemkk1p9K8bWW2+d22+/PZ/+9Kez5557ZvLkyfnRj36UvfbaK0ly0UUX5fzzz8+ECROy55575p//+Z8zbdq08tfQfvjDH86vfvWrvPnmmxk0aFD69euXa6+9tnx30Ve+8pWMHj06/+f//J/svffemT59eu66667suuuua32un/jEJ3Lttdfme9/7Xvr06ZOf//znOe+888qvt2/fPq+++mpGjBiR3XbbLcccc0w+85nP5MILLyzP+dGPfpTjjjtutd+UAgBsWOvbpwwaNCgrVqxo9VlEhxxyyCpj/2iLLbbIFVdckf/8z/9Mjx49cuSRR27As9nwevbsmQsvvDBjxoxJQ0NDTjvttCTv3Xe9m86dO2fIkCH59a9/neOOO67Va//+7/+ef/mXf8nQoUMzYMCAvPrqq63uLno/vv71r+f000/PySefnP333z9vvvlmpk+fng4dOpTnHHLIITn++ONXu/1dd92VV155JVdddVX23nvvVo+V2/z9W9Jgc1JTFEXR1kUAvP3229l9991zyy23fGDurlm4cGF23333PPbYY2vVaAEA0NqUKVNyxhln5PXXX1/nbXfcccdceOGFawyL1sULL7yQnXbaKU888cQ6fQ4SVCN3FAFVoWPHjrnxxhuzcOHCti6lYl544YVcffXVQiIAgPdh0aJF2WqrrXL22Wev9TZPPvlk6uvrM2LEiPd9/M985jPlu95hc+COIgAAADZJb7zxRpqampL89aMLunbtWvEa/vSnP+Xtt99Okuywww7lDwWHTZWgCAAAAIAk3noGAAAAQImgCAAAAIAkgiIAAAAASgRFAAAAACQRFAEAAABQIigCAAAAIImgCAAAAIASQREAAAAASQRFAAAAAJQIigAAAABIIigCAAAAoERQBAAAAEASQREAAAAAJYIiAAAAAJIIigAAAAAoERQBAAAAkERQBAAAAECJoAgAAACAJIIiAAAAAEoERQAAAAAkERQBAAAAUFLRoOh//ud/MmTIkPTo0SM1NTW5884733ObmTNnZr/99ktdXV0++tGPZsqUKRu9TgCAaqF/AgAqqaJB0eLFi9OnT59MmjRpreY///zz+exnP5tDDz00c+bMyRlnnJGTTjop99xzz0auFACgOuifAIBKqimKomiTA9fU5I477shRRx21xjlnn312pk2blt/97nflsS9+8Yt5/fXXM3369ApUCQBQPfRPAMDGtkVbF/BuZs2alcbGxlZjgwcPzhlnnLHGbZYsWZIlS5aUn7e0tOS1117Lhz/84dTU1GysUgGA96koirzxxhvp0aNH2rXzMYrrS/8EAB8cG6N/quqgaMGCBWloaGg11tDQkObm5rz99tvp2LHjKttMmDAhF154YaVKBAA2sBdffDEf+chH2rqMTZb+CQA+eDZk/1TVQdH6GDt2bEaPHl1+vmjRouywww558cUX06VLlzasDAB4N83NzenVq1c6d+7c1qV84OifAGDTtDH6p6oOirp165ampqZWY01NTenSpctqfxuWJHV1damrq1tlvEuXLhodANgEeKvT+6N/AoAPng3ZP1X1BwAMHDgwM2bMaDX2i1/8IgMHDmyjigAAqpv+CQB4PyoaFL355puZM2dO5syZk+SvX986Z86czJ8/P8lfb3seMWJEef6Xv/zlPPfcc/n617+ep59+OldffXV+/OMf58wzz6xk2QAAbUb/BABUUkWDosceeyz77rtv9t133yTJ6NGjs++++2bcuHFJkpdeeqnc9CTJTjvtlGnTpuUXv/hF+vTpk8svvzw/+MEPMnjw4EqWDQDQZvRPAEAl1RRFUbR1ERtTc3Nz6uvrs2jRIu+xB4Aq5ppdPawFAGwaNsY1u6o/owgAAACAyhEUAQAAAJBEUAQAAABAiaAIAAAAgCSCIgAAAABKBEUAAAAAJBEUAQAAAFAiKAIAAAAgiaAIAAAAgBJBEQAAAABJBEUAAAAAlAiKAAAAAEgiKAIAAACgRFAEAAAAQBJBEQAAAAAlgiIAAAAAkgiKAAAAACgRFAEAAACQRFAEAAAAQImgCAAAAIAkgiIAAAAASgRFAAAAACQRFAEAAABQIigCAAAAIImgCAAAAIASQREAAAAASQRFAAAAAJQIigAAAABIIigCAAAAoERQBAAAAEASQREAAAAAJYIiAAAAAJIIigAAAAAoERQBAAAAkERQBAAAAECJoAgAAACAJIIiAAAAAEoERQAAAAAkERQBAAAAUCIoAgAAACCJoAgAAACAEkERAAAAAEkERQAAAACUCIoAAAAASCIoAgAAAKBEUAQAAABAEkERAAAAACWCIgAAAACSCIoAAAAAKBEUAQAAAJBEUAQAAABAiaAIAAAAgCSCIgAAAABKBEUAAAAAJBEUAQAAAFAiKAIAAAAgiaAIAAAAgBJBEQAAAABJBEUAAAAAlFQ8KJo0aVJ69+6dDh06ZMCAAXnkkUfedf7EiROz++67p2PHjunVq1fOPPPMvPPOOxWqFgCg7emfAIBKqWhQdMstt2T06NEZP358Hn/88fTp0yeDBw/Oyy+/vNr5N998c8aMGZPx48dn7ty5ue6663LLLbfknHPOqWTZAABtRv8EAFRSRYOi73znO/nSl76UE044IR/72McyefLkdOrUKddff/1q5z/44IM56KCDcuyxx6Z37945/PDDM2zYsPf8LRoAwOZC/wQAVFLFgqKlS5dm9uzZaWxs/NvB27VLY2NjZs2atdptDjzwwMyePbvc2Dz33HO5++67c8QRR6zxOEuWLElzc3OrBwDApkj/BABU2haVOtDChQuzYsWKNDQ0tBpvaGjI008/vdptjj322CxcuDCf/OQnUxRFli9fni9/+cvveuv0hAkTcuGFF27Q2gEA2oL+CQCotKr+1rOZM2fmkksuydVXX53HH388t99+e6ZNm5aLLrpojduMHTs2ixYtKj9efPHFClYMANC29E8AwPtRsTuKunbtmvbt26epqanVeFNTU7p167babc4///wMHz48J510UpJk7733zuLFi3PyySfn3HPPTbt2q+ZcdXV1qaur2/AnAABQYfonAKDSKnZHUW1tbfr165cZM2aUx1paWjJjxowMHDhwtdu89dZbqzQz7du3T5IURbHxigUAqAL6JwCg0ip2R1GSjB49OiNHjkz//v1zwAEHZOLEiVm8eHFOOOGEJMmIESPSs2fPTJgwIUkyZMiQfOc738m+++6bAQMG5Jlnnsn555+fIUOGlBseAIDNmf4JAKikigZFQ4cOzSuvvJJx48ZlwYIF6du3b6ZPn17+gMb58+e3+g3Yeeedl5qampx33nn505/+lO222y5DhgzJxRdfXMmyAQDajP4JAKikmmIzvwe5ubk59fX1WbRoUbp06dLW5QAAa+CaXT2sBQBsGjbGNbuqv/UMAAAAgMoRFAEAAACQRFAEAAAAQImgCAAAAIAkgiIAAAAASgRFAAAAACQRFAEAAABQIigCAAAAIImgCAAAAIASQREAAAAASQRFAAAAAJQIigAAAABIIigCAAAAoERQBAAAAEASQREAAAAAJYIiAAAAAJIIigAAAAAoERQBAAAAkERQBAAAAECJoAgAAACAJIIiAAAAAEoERQAAAAAkERQBAAAAUCIoAgAAACCJoAgAAACAEkERAAAAAEkERQAAAACUCIoAAAAASCIoAgAAAKBEUAQAAABAEkERAAAAACWCIgAAAACSCIoAAAAAKBEUAQAAAJBEUAQAAABAiaAIAAAAgCSCIgAAAABKBEUAAAAAJBEUAQAAAFAiKAIAAAAgiaAIAAAAgBJBEQAAAABJBEUAAAAAlAiKAAAAAEgiKAIAAACgRFAEAAAAQBJBEQAAAAAlgiIAAAAAkgiKAAAAACgRFAEAAACQRFAEAAAAQImgCAAAAIAkgiIAAAAASgRFAAAAACQRFAEAAABQIigCAAAAIImgCAAAAIASQREAAAAASQRFAAAAAJRUPCiaNGlSevfunQ4dOmTAgAF55JFH3nX+66+/nlGjRqV79+6pq6vLbrvtlrvvvrtC1QIAtD39EwBQKVtU8mC33HJLRo8encmTJ2fAgAGZOHFiBg8enHnz5mX77bdfZf7SpUtz2GGHZfvtt89tt92Wnj175o9//GO23nrrSpYNANBm9E8AQCXVFEVRVOpgAwYMyP7775+rrroqSdLS0pJevXrl9NNPz5gxY1aZP3ny5PzHf/xHnn766Wy55Zbrdczm5ubU19dn0aJF6dKly/uqHwDYeFyzV0//BACsyca4ZlfsrWdLly7N7Nmz09jY+LeDt2uXxsbGzJo1a7Xb3HXXXRk4cGBGjRqVhoaGfPzjH88ll1ySFStWrPE4S5YsSXNzc6sHAMCmSP8EAFRaxYKihQsXZsWKFWloaGg13tDQkAULFqx2m+eeey633XZbVqxYkbvvvjvnn39+Lr/88nzzm99c43EmTJiQ+vr68qNXr14b9DwAACpF/wQAVFpVf+tZS0tLtt9++1xzzTXp169fhg4dmnPPPTeTJ09e4zZjx47NokWLyo8XX3yxghUDALQt/RMA8H5U7MOsu3btmvbt26epqanVeFNTU7p167babbp3754tt9wy7du3L4/tueeeWbBgQZYuXZra2tpVtqmrq0tdXd2GLR4AoA3onwCASqvYHUW1tbXp169fZsyYUR5raWnJjBkzMnDgwNVuc9BBB+WZZ55JS0tLeez3v/99unfvvtomBwBgc6J/AgAqraJvPRs9enSuvfba3HDDDZk7d25OOeWULF68OCeccEKSZMSIERk7dmx5/imnnJLXXnstX/3qV/P73/8+06ZNyyWXXJJRo0ZVsmwAgDajfwIAKqlibz1LkqFDh+aVV17JuHHjsmDBgvTt2zfTp08vf0Dj/Pnz067d37KrXr165Z577smZZ56ZffbZJz179sxXv/rVnH322ZUsGwCgzeifAIBKqimKomjrIjam5ubm1NfXZ9GiRenSpUtblwMArIFrdvWwFgCwadgY1+yq/tYzAAAAACpHUAQAAABAEkERAAAAACWCIgAAAACSCIoAAAAAKBEUAQAAAJBEUAQAAABAiaAIAAAAgCSCIgAAAABKBEUAAAAAJBEUAQAAAFAiKAIAAAAgiaAIAAAAgBJBEQAAAABJBEUAAAAAlAiKAAAAAEgiKAIAAACgRFAEAAAAQBJBEQAAAAAlgiIAAAAAkgiKAAAAACgRFAEAAACQRFAEAAAAQImgCAAAAIAkgiIAAAAASgRFAAAAACQRFAEAAABQIigCAAAAIImgCAAAAIASQREAAAAASQRFAAAAAJQIigAAAABIIigCAAAAoERQBAAAAEASQREAAAAAJYIiAAAAAJIIigAAAAAoERQBAAAAkERQBAAAAECJoAgAAACAJIIiAAAAAEoERQAAAAAkERQBAAAAUCIoAgAAACCJoAgAAACAEkERAAAAAEkERQAAAACUCIoAAAAASCIoAgAAAKBEUAQAAABAEkERAAAAACWCIgAAAACSCIoAAAAAKBEUAQAAAJBEUAQAAABAiaAIAAAAgCSCIgAAAABKBEUAAAAAJBEUAQAAAFBS8aBo0qRJ6d27dzp06JABAwbkkUceWavtpk6dmpqamhx11FEbt0AAgCqjfwIAKqWiQdEtt9yS0aNHZ/z48Xn88cfTp0+fDB48OC+//PK7bvfCCy/krLPOysEHH1yhSgEAqoP+CQCopIoGRd/5znfypS99KSeccEI+9rGPZfLkyenUqVOuv/76NW6zYsWKHHfccbnwwguz8847V7BaAIC2p38CACqpYkHR0qVLM3v27DQ2Nv7t4O3apbGxMbNmzVrjdt/4xjey/fbb58QTT1yr4yxZsiTNzc2tHgAAmyL9EwBQaRULihYuXJgVK1akoaGh1XhDQ0MWLFiw2m3uv//+XHfddbn22mvX+jgTJkxIfX19+dGrV6/3VTcAQFvRPwEAlVa133r2xhtvZPjw4bn22mvTtWvXtd5u7NixWbRoUfnx4osvbsQqAQCqh/4JAHi/tqjUgbp27Zr27dunqamp1XhTU1O6deu2yvxnn302L7zwQoYMGVIea2lpSZJsscUWmTdvXnbZZZdVtqurq0tdXd0Grh4AoPL0TwBApVXsjqLa2tr069cvM2bMKI+1tLRkxowZGThw4Crz99hjj/z2t7/NnDlzyo/Pfe5zOfTQQzNnzhy3RAMAmz39EwBQaRW7oyhJRo8enZEjR6Z///454IADMnHixCxevDgnnHBCkmTEiBHp2bNnJkyYkA4dOuTjH/94q+233nrrJFllHABgc6V/AgAqqaJB0dChQ/PKK69k3LhxWbBgQfr27Zvp06eXP6Bx/vz5adeuaj82CQCg4vRPAEAl1RRFUbR1ERtTc3Nz6uvrs2jRonTp0qWtywEA1sA1u3pYCwDYNGyMa7ZfPwEAAACQRFAEAAAAQImgCAAAAIAkgiIAAAAASgRFAAAAACQRFAEAAABQIigCAAAAIImgCAAAAIASQREAAAAASQRFAAAAAJQIigAAAABIIigCAAAAoERQBAAAAEASQREAAAAAJYIiAAAAAJIIigAAAAAoERQBAAAAkERQBAAAAECJoAgAAACAJIIiAAAAAEoERQAAAAAkERQBAAAAUCIoAgAAACCJoAgAAACAEkERAAAAAEkERQAAAACUCIoAAAAASCIoAgAAAKBEUAQAAABAEkERAAAAACWCIgAAAACSCIoAAAAAKBEUAQAAAJBEUAQAAABAiaAIAAAAgCSCIgAAAABKBEUAAAAAJBEUAQAAAFAiKAIAAAAgiaAIAAAAgBJBEQAAAABJBEUAAAAAlAiKAAAAAEgiKAIAAACgRFAEAAAAQBJBEQAAAAAlgiIAAAAAkgiKAAAAACgRFAEAAACQRFAEAAAAQImgCAAAAIAkgiIAAAAASgRFAAAAACQRFAEAAABQIigCAAAAIImgCAAAAIASQREAAAAASQRFAAAAAJQIigAAAABI0gZB0aRJk9K7d+906NAhAwYMyCOPPLLGuddee20OPvjgbLPNNtlmm23S2Nj4rvMBADZH+icAoFIqGhTdcsstGT16dMaPH5/HH388ffr0yeDBg/Pyyy+vdv7MmTMzbNiw3HvvvZk1a1Z69eqVww8/PH/6058qWTYAQJvRPwEAlVRTFEVRqYMNGDAg+++/f6666qokSUtLS3r16pXTTz89Y8aMec/tV6xYkW222SZXXXVVRowYsVbHbG5uTn19fRYtWpQuXbq8r/oBgI3HNXv19E8AwJpsjGt2xe4oWrp0aWbPnp3Gxsa/HbxduzQ2NmbWrFlrtY+33nory5Yty7bbbrvGOUuWLElzc3OrBwDApkj/BABUWsWCooULF2bFihVpaGhoNd7Q0JAFCxas1T7OPvvs9OjRo1Wz9I8mTJiQ+vr68qNXr17vq24AgLaifwIAKm2T+dazSy+9NFOnTs0dd9yRDh06rHHe2LFjs2jRovLjxRdfrGCVAADVQ/8EAKyrLSp1oK5du6Z9+/ZpampqNd7U1JRu3bq967aXXXZZLr300vzyl7/MPvvs865z6+rqUldX977rBQBoa/onAKDSKnZHUW1tbfr165cZM2aUx1paWjJjxowMHDhwjdt9+9vfzkUXXZTp06enf//+lSgVAKAq6J8AgEqr2B1FSTJ69OiMHDky/fv3zwEHHJCJEydm8eLFOeGEE5IkI0aMSM+ePTNhwoQkybe+9a2MGzcuN998c3r37l1+L/5WW22VrbbaqpKlAwC0Cf0TAFBJFQ2Khg4dmldeeSXjxo3LggUL0rdv30yfPr38AY3z589Pu3Z/u8np+9//fpYuXZovfOELrfYzfvz4XHDBBZUsHQCgTeifAIBKqimKomjrIjam5ubm1NfXZ9GiRenSpUtblwMArIFrdvWwFgCwadgY1+xN5lvPAAAAANi4BEUAAAAAJBEUAQAAAFAiKAIAAAAgiaAIAAAAgBJBEQAAAABJBEUAAAAAlAiKAAAAAEgiKAIAAACgRFAEAAAAQBJBEQAAAAAlgiIAAAAAkgiKAAAAACgRFAEAAACQRFAEAAAAQImgCAAAAIAkgiIAAAAASgRFAAAAACQRFAEAAABQIigCAAAAIImgCAAAAIASQREAAAAASQRFAAAAAJQIigAAAABIIigCAAAAoERQBAAAAEASQREAAAAAJYIiAAAAAJIIigAAAAAoERQBAAAAkERQBAAAAECJoAgAAACAJIIiAAAAAEoERQAAAAAkERQBAAAAUCIoAgAAACCJoAgAAACAEkERAAAAAEkERQAAAACUCIoAAAAASCIoAgAAAKBEUAQAAABAEkERAAAAACWCIgAAAACSCIoAAAAAKBEUAQAAAJBEUAQAAABAiaAIAAAAgCSCIgAAAABKBEUAAAAAJBEUAQAAAFAiKAIAAAAgiaAIAAAAgBJBEQAAAABJBEUAAAAAlAiKAAAAAEgiKAIAAACgRFAEAAAAQBJBEQAAAAAlFQ+KJk2alN69e6dDhw4ZMGBAHnnkkXedf+utt2aPPfZIhw4dsvfee+fuu++uUKUAANVB/wQAVEpFg6Jbbrklo0ePzvjx4/P444+nT58+GTx4cF5++eXVzn/wwQczbNiwnHjiiXniiSdy1FFH5aijjsrvfve7SpYNANBm9E8AQCXVFEVRVOpgAwYMyP7775+rrroqSdLS0pJevXrl9NNPz5gxY1aZP3To0CxevDg//elPy2Of+MQn0rdv30yePHmtjtnc3Jz6+vosWrQoXbp02TAnAgBscK7Zq6d/AgDWZGNcs7fYIHtZC0uXLs3s2bMzduzY8li7du3S2NiYWbNmrXabWbNmZfTo0a3GBg8enDvvvHONx1myZEmWLFlSfr5o0aIkf/3hAQDVa+W1uoK/w6p6+icA4N1sjP6pYkHRwoULs2LFijQ0NLQab2hoyNNPP73abRYsWLDa+QsWLFjjcSZMmJALL7xwlfFevXqtR9UAQKW9+uqrqa+vb+syqoL+CQBYGxuyf6pYUFQpY8eObfVbtNdffz077rhj5s+fr+lsY83NzenVq1defPFFt7G3IetQPaxFdbAO1WPRokXZYYcdsu2227Z1KR84+qfq5N+n6mEtqoe1qA7WoXpsjP6pYkFR165d0759+zQ1NbUab2pqSrdu3Va7Tbdu3dZpfpLU1dWlrq5ulfH6+np/gKtEly5drEUVsA7Vw1pUB+tQPdq1q/iXslYt/ROJf5+qibWoHtaiOliH6rEh+6eKdWK1tbXp169fZsyYUR5raWnJjBkzMnDgwNVuM3DgwFbzk+QXv/jFGucDAGxO9E8AQKVV9K1no0ePzsiRI9O/f/8ccMABmThxYhYvXpwTTjghSTJixIj07NkzEyZMSJJ89atfzaBBg3L55Zfns5/9bKZOnZrHHnss11xzTSXLBgBoM/onAKCSKhoUDR06NK+88krGjRuXBQsWpG/fvpk+fXr5Axfnz5/f6napAw88MDfffHPOO++8nHPOOdl1111z55135uMf//haH7Ouri7jx49f7e3UVJa1qA7WoXpYi+pgHaqHtVg9/dMHl3WoHtaieliL6mAdqsfGWIuawnfQAgAAAJAKfkYRAAAAANVNUAQAAABAEkERAAAAACWCIgAAAACSbCZB0aRJk9K7d+906NAhAwYMyCOPPPKu82+99dbsscce6dChQ/bee+/cfffdFap087Yu63Dttdfm4IMPzjbbbJNtttkmjY2N77lurL11/Tux0tSpU1NTU5Ojjjpq4xb4AbKua/H6669n1KhR6d69e+rq6rLbbrv5N2oDWNd1mDhxYnbfffd07NgxvXr1yplnnpl33nmnQtVuvv7nf/4nQ4YMSY8ePVJTU5M777zzPbeZOXNm9ttvv9TV1eWjH/1opkyZstHr/KDQP1UH/VP10D9VD/1TddA/tb02652KTdzUqVOL2tra4vrrry+efPLJ4ktf+lKx9dZbF01NTaud/8ADDxTt27cvvv3tbxdPPfVUcd555xVbbrll8dvf/rbClW9e1nUdjj322GLSpEnFE088UcydO7c4/vjji/r6+uL//b//V+HKNz/ruhYrPf/880XPnj2Lgw8+uDjyyCMrU+xmbl3XYsmSJUX//v2LI444orj//vuL559/vpg5c2YxZ86cCle+eVnXdbjpppuKurq64qabbiqef/754p577im6d+9enHnmmRWufPNz9913F+eee25x++23F0mKO+64413nP/fcc0WnTp2K0aNHF0899VRx5ZVXFu3bty+mT59emYI3Y/qn6qB/qh76p+qhf6oO+qfq0Fa90yYfFB1wwAHFqFGjys9XrFhR9OjRo5gwYcJq5x9zzDHFZz/72VZjAwYMKP793/99o9a5uVvXdfhHy5cvLzp37lzccMMNG6vED4z1WYvly5cXBx54YPGDH/ygGDlypEZnA1nXtfj+979f7LzzzsXSpUsrVeIHwrquw6hRo4pPf/rTrcZGjx5dHHTQQRu1zg+atWl2vv71rxd77bVXq7GhQ4cWgwcP3oiVfTDon6qD/ql66J+qh/6pOuifqk8le6dN+q1nS5cuzezZs9PY2Fgea9euXRobGzNr1qzVbjNr1qxW85Nk8ODBa5zPe1ufdfhHb731VpYtW5Ztt912Y5X5gbC+a/GNb3wj22+/fU488cRKlPmBsD5rcdddd2XgwIEZNWpUGhoa8vGPfzyXXHJJVqxYUamyNzvrsw4HHnhgZs+eXb69+rnnnsvdd9+dI444oiI18zeu2RuH/qk66J+qh/6peuifqoP+adO1oa7XW2zIoipt4cKFWbFiRRoaGlqNNzQ05Omnn17tNgsWLFjt/AULFmy0Ojd367MO/+jss89Ojx49VvlDzbpZn7W4//77c91112XOnDkVqPCDY33W4rnnnsuvfvWrHHfccbn77rvzzDPP5NRTT82yZcsyfvz4SpS92VmfdTj22GOzcOHCfPKTn0xRFFm+fHm+/OUv55xzzqlEyfydNV2zm5ub8/bbb6djx45tVNmmTf9UHfRP1UP/VD30T9VB/7Tp2lC90yZ9RxGbh0svvTRTp07NHXfckQ4dOrR1OR8ob7zxRoYPH55rr702Xbt2betyPvBaWlqy/fbb55prrkm/fv0ydOjQnHvuuZk8eXJbl/aBMnPmzFxyySW5+uqr8/jjj+f222/PtGnTctFFF7V1aQBl+qe2o3+qLvqn6qB/2rxs0ncUde3aNe3bt09TU1Or8aampnTr1m2123Tr1m2d5vPe1mcdVrrsssty6aWX5pe//GX22WefjVnmB8K6rsWzzz6bF154IUOGDCmPtbS0JEm22GKLzJs3L7vsssvGLXoztT5/L7p3754tt9wy7du3L4/tueeeWbBgQZYuXZra2tqNWvPmaH3W4fzzz8/w4cNz0kknJUn23nvvLF68OCeffHLOPffctGvndyyVsqZrdpcuXdxN9D7on6qD/ql66J+qh/6pOuifNl0bqnfapFertrY2/fr1y4wZM8pjLS0tmTFjRgYOHLjabQYOHNhqfpL84he/WON83tv6rEOSfPvb385FF12U6dOnp3///pUodbO3rmuxxx575Le//W3mzJlTfnzuc5/LoYcemjlz5qRXr16VLH+zsj5/Lw466KA888wz5WYzSX7/+9+ne/fumpz1tD7r8NZbb63SzKxsPv/6OYJUimv2xqF/qg76p+qhf6oe+qfqoH/adG2w6/U6ffR1FZo6dWpRV1dXTJkypXjqqaeKk08+udh6662LBQsWFEVRFMOHDy/GjBlTnv/AAw8UW2yxRXHZZZcVc+fOLcaPH+/rXTeAdV2HSy+9tKitrS1uu+224qWXXio/3njjjbY6hc3Guq7FP/KtHRvOuq7F/Pnzi86dOxennXZaMW/evOKnP/1psf322xff/OY32+oUNgvrug7jx48vOnfuXPzoRz8qnnvuueLnP/95scsuuxTHHHNMW53CZuONN94onnjiieKJJ54okhTf+c53iieeeKL44x//WBRFUYwZM6YYPnx4ef7Kr3j92te+VsydO7eYNGnSen3FK6vSP1UH/VP10D9VD/1TddA/VYe26p02+aCoKIriyiuvLHbYYYeitra2OOCAA4qHHnqo/NqgQYOKkSNHtpr/4x//uNhtt92K2traYq+99iqmTZtW4Yo3T+uyDjvuuGORZJXH+PHjK1/4Zmhd/078PY3OhrWua/Hggw8WAwYMKOrq6oqdd965uPjii4vly5dXuOrNz7qsw7Jly4oLLrig2GWXXYoOHToUvXr1Kk499dTiL3/5S+UL38zce++9q/23f+XPf+TIkcWgQYNW2aZv375FbW1tsfPOOxc//OEPK1735kr/VB30T9VD/1Q99E/VQf/U9tqqd6opCveBAQAAALCJf0YRAAAAABuOoAgAAACAJIIiAAAAAEoERQAAAAAkERQBAAAAUCIoAgAAACCJoAgAAACAEkERAAAAAEkERQAAAACUCIoAAAAASCIoAgAAAKBEUAQAAABAkuT/A0qXlyCvOe/2AAAAAElFTkSuQmCC", "text/plain": [ - "
" + "Parameter containing:\n", + "tensor([-3.3755, -5.5730, -3.7751, ..., -2.6631, -6.7499, -4.9545],\n", + " requires_grad=True)" ] }, + "execution_count": 15, "metadata": {}, - "output_type": "display_data" + "output_type": "execute_result" } ], + "source": [ + "model.model.layers[20].mixer.dt_proj.bias" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [] } ], diff --git a/fast_llm/models/ssm/external/apriel_15b_hybrid/configuration_ssm_hybrid_apriel15b.py b/fast_llm/models/ssm/external/apriel_15b_hybrid/configuration_ssm_hybrid_apriel15b.py index 5f42bf35..ef991830 100644 --- a/fast_llm/models/ssm/external/apriel_15b_hybrid/configuration_ssm_hybrid_apriel15b.py +++ b/fast_llm/models/ssm/external/apriel_15b_hybrid/configuration_ssm_hybrid_apriel15b.py @@ -25,7 +25,7 @@ "conv_bias": True, # nemotron mamba2 "head_dim": 128, - "layer_norm_epsilon": 1e-5, + # "layer_norm_epsilon": 1e-5, } From 662e9efeeb5c96b31e9812d360c3f7feaa8ed4a1 Mon Sep 17 00:00:00 2001 From: oleksost Date: Mon, 25 Aug 2025 12:57:04 +0000 Subject: [PATCH 13/18] convertion --- fast_llm/layers/ssm/mamba2.py | 4 ++-- fast_llm/models/ssm/conversion.py | 8 ++++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/fast_llm/layers/ssm/mamba2.py b/fast_llm/layers/ssm/mamba2.py index 34cd8c54..bd9cf7b4 100644 --- a/fast_llm/layers/ssm/mamba2.py +++ b/fast_llm/layers/ssm/mamba2.py @@ -376,7 +376,7 @@ def __init__( lr_scale=lr_scale, ) - self.dt_bias = ParameterMeta.from_dims( + self.dt_proj_bias = ParameterMeta.from_dims( (tensor_space[SSMDimNames.composite_heads],), init_method=init_dtprojbias(self._config.dt_max, self._config.dt_min, self._config.dt_init_floor), lr_scale=lr_scale, @@ -491,7 +491,7 @@ def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[ chunk_size=self._config.chunk_size, D=self.D, z=None, - dt_bias=self.dt_bias, + dt_bias=self.dt_proj_bias, dt_softplus=True, seq_idx=seq_idx, # assume this is used for packing cu_seqlens=cu_seqlens, # assume this is used for packing, but maybe not needed at training diff --git a/fast_llm/models/ssm/conversion.py b/fast_llm/models/ssm/conversion.py index f3c3fcf2..18ab09c7 100644 --- a/fast_llm/models/ssm/conversion.py +++ b/fast_llm/models/ssm/conversion.py @@ -307,6 +307,14 @@ def _create_weight_converters( self._model.config.base_model, ) ) + # for nemotron mamba2, bias is a seperate parameter + converters.append( + WeightConverter( + f"layers.{offset+i+1}.mixer.dt_proj_bias", + f"{hf_base_prefix}model.layers.{i}.mixer.dt_bias", + self._model.config.base_model, + ) + ) converters.append( WeightConverter( From f78055cdefaf8868ee209bd2fcacb3f8432a79cc Mon Sep 17 00:00:00 2001 From: oleksost Date: Mon, 25 Aug 2025 12:57:38 +0000 Subject: [PATCH 14/18] undo requirement varlen for m2 testing --- setup.cfg | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index c2eb1f6f..f36c91b9 100644 --- a/setup.cfg +++ b/setup.cfg @@ -50,7 +50,8 @@ HUGGINGFACE = # To install on cpu environment (ex. for IDE support): # MAMBA_FORCE_BUILD=TRUE CAUSAL_CONV1D_FORCE_BUILD=TRUE CAUSAL_CONV1D_SKIP_CUDA_BUILD=TRUE pip install -e ".[CORE,SSM]" --no-build-isolation SSM = - mamba_ssm[causal-conv1d] @ git+https://github.com/jxiw/varlen_mamba.git@varlen_mamba + mamba_ssm[causal-conv1d]==2.2.4 + # mamba_ssm[causal-conv1d] @ git+https://github.com/jxiw/varlen_mamba.git@varlen_mamba cartesia_pytorch>=0.0.2 # GENERATION = From eb8a54e143a6053a3c744f2df17cc0bcdb343524 Mon Sep 17 00:00:00 2001 From: oleksost Date: Mon, 25 Aug 2025 13:07:31 +0000 Subject: [PATCH 15/18] varlen --- setup.cfg | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index f36c91b9..c2eb1f6f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -50,8 +50,7 @@ HUGGINGFACE = # To install on cpu environment (ex. for IDE support): # MAMBA_FORCE_BUILD=TRUE CAUSAL_CONV1D_FORCE_BUILD=TRUE CAUSAL_CONV1D_SKIP_CUDA_BUILD=TRUE pip install -e ".[CORE,SSM]" --no-build-isolation SSM = - mamba_ssm[causal-conv1d]==2.2.4 - # mamba_ssm[causal-conv1d] @ git+https://github.com/jxiw/varlen_mamba.git@varlen_mamba + mamba_ssm[causal-conv1d] @ git+https://github.com/jxiw/varlen_mamba.git@varlen_mamba cartesia_pytorch>=0.0.2 # GENERATION = From 33281d5cbeb923f95046f85df6000ba193040684 Mon Sep 17 00:00:00 2001 From: oleksost Date: Mon, 25 Aug 2025 13:41:01 +0000 Subject: [PATCH 16/18] wip --- fast_llm/models/ssm/external/15B_hybrid.ipynb | 777 +++++++++++++++++- 1 file changed, 733 insertions(+), 44 deletions(-) diff --git a/fast_llm/models/ssm/external/15B_hybrid.ipynb b/fast_llm/models/ssm/external/15B_hybrid.ipynb index b1e945ac..cfc85ef7 100644 --- a/fast_llm/models/ssm/external/15B_hybrid.ipynb +++ b/fast_llm/models/ssm/external/15B_hybrid.ipynb @@ -1558,7 +1558,112 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Instantiating mamba2 with num_heads: 256, head_dim: 16, \n", + " intermediate_size: 4096, \n", + " d_xb: 1024, \n", + " number_xb_heads: 64, \n", + " repeat_groups: 4, \n", + " d_state: 16\n", + "Instantiating mamba2 with num_heads: 256, head_dim: 16, \n", + " intermediate_size: 4096, \n", + " d_xb: 1024, \n", + " number_xb_heads: 64, \n", + " repeat_groups: 4, \n", + " d_state: 16\n", + "Loading checkpoint shards: 100%|██████████| 3/3 [00:00<00:00, 382.75it/s]\n" + ] + } + ], + "source": [ + "# model.save_pretrained(\"/mnt/checkpoints/ssm/apriel_ssm_thinker15b_hybrid_nmtrhnm2_5l_debug\")\n", + "model = AprielThinkerSSMHybridForCausalLM.from_pretrained(\"/mnt/checkpoints/ssm/apriel_ssm_thinker15b_hybrid_nmtrhnm2_5l_debug\")" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "model.embed_tokens.weight\n", + "model.layers.0.self_attn.q_proj.weight\n", + "model.layers.0.self_attn.k_proj.weight\n", + "model.layers.0.self_attn.v_proj.weight\n", + "model.layers.0.self_attn.o_proj.weight\n", + "model.layers.0.mlp.gate_proj.weight\n", + "model.layers.0.mlp.up_proj.weight\n", + "model.layers.0.mlp.down_proj.weight\n", + "model.layers.0.input_layernorm.weight\n", + "model.layers.0.post_attention_layernorm.weight\n", + "model.layers.1.self_attn.q_proj.weight\n", + "model.layers.1.self_attn.k_proj.weight\n", + "model.layers.1.self_attn.v_proj.weight\n", + "model.layers.1.self_attn.o_proj.weight\n", + "model.layers.1.mlp.gate_proj.weight\n", + "model.layers.1.mlp.up_proj.weight\n", + "model.layers.1.mlp.down_proj.weight\n", + "model.layers.1.input_layernorm.weight\n", + "model.layers.1.post_attention_layernorm.weight\n", + "model.layers.2.mixer.dt_bias\n", + "model.layers.2.mixer.A_log\n", + "model.layers.2.mixer.D\n", + "model.layers.2.mixer.conv1d.weight\n", + "model.layers.2.mixer.conv1d.bias\n", + "model.layers.2.mixer.in_proj.weight\n", + "model.layers.2.mixer.dt_in_proj.weight\n", + "model.layers.2.mixer.norm.weight\n", + "model.layers.2.mixer.out_proj.weight\n", + "model.layers.2.mlp.gate_proj.weight\n", + "model.layers.2.mlp.up_proj.weight\n", + "model.layers.2.mlp.down_proj.weight\n", + "model.layers.2.input_layernorm.weight\n", + "model.layers.2.post_attention_layernorm.weight\n", + "model.layers.3.mixer.dt_bias\n", + "model.layers.3.mixer.A_log\n", + "model.layers.3.mixer.D\n", + "model.layers.3.mixer.conv1d.weight\n", + "model.layers.3.mixer.conv1d.bias\n", + "model.layers.3.mixer.in_proj.weight\n", + "model.layers.3.mixer.dt_in_proj.weight\n", + "model.layers.3.mixer.norm.weight\n", + "model.layers.3.mixer.out_proj.weight\n", + "model.layers.3.mlp.gate_proj.weight\n", + "model.layers.3.mlp.up_proj.weight\n", + "model.layers.3.mlp.down_proj.weight\n", + "model.layers.3.input_layernorm.weight\n", + "model.layers.3.post_attention_layernorm.weight\n", + "model.layers.4.self_attn.q_proj.weight\n", + "model.layers.4.self_attn.k_proj.weight\n", + "model.layers.4.self_attn.v_proj.weight\n", + "model.layers.4.self_attn.o_proj.weight\n", + "model.layers.4.mlp.gate_proj.weight\n", + "model.layers.4.mlp.up_proj.weight\n", + "model.layers.4.mlp.down_proj.weight\n", + "model.layers.4.input_layernorm.weight\n", + "model.layers.4.post_attention_layernorm.weight\n", + "model.norm.weight\n", + "lm_head.weight\n" + ] + } + ], + "source": [ + "for k,v in model.state_dict().items():\n", + " print(k)" + ] + }, + { + "cell_type": "code", + "execution_count": 27, "metadata": {}, "outputs": [], "source": [ @@ -1568,16 +1673,66 @@ "config_thinker = AutoConfig.from_pretrained(path_thinker)\n", "\n", "# config_hybrid = AprielSSMHybridConfig.from_pretrained(path_hybrid)\n", - "config_thinker.num_hidden_layers = 5\n", - "hybrid_block_layout = [\"t\"] * config_thinker.num_hidden_layers\n", - "# debug\n", - "hybrid_block_layout[2] = \"nm2\"\n", - "hybrid_block_layout[3] = \"nm2\"\n", + "# config_thinker.num_hidden_layers = 5\n", + "# hybrid_block_layout = [\"t\"] * config_thinker.num_hidden_layers\n", + "# # debug\n", + "# hybrid_block_layout[2] = \"nm2\"\n", + "# hybrid_block_layout[3] = \"nm2\"\n", "\n", "# 25/50\n", - "# hybrid_block_layout = [\"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"t\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"t\", \"nm2\", \"t\", \"t\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"t\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"nm2\", \"t\", \"nm2\" ]\n", - "\n", - "\n", + "hybrid_block_layout = [\n", + " \"t\",\n", + " \"t\",\n", + " \"t\",\n", + " \"t\",\n", + " \"t\",\n", + " \"t\",\n", + " \"t\",\n", + " \"t\",\n", + " \"t\",\n", + " \"t\",\n", + " \"t\",\n", + " \"t\",\n", + " \"t\",\n", + " \"t\",\n", + " \"t\",\n", + " \"t\",\n", + " \"t\",\n", + " \"t\",\n", + " \"t\",\n", + " \"t\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"t\",\n", + " \"nm2\",\n", + " \"t\",\n", + " \"t\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"t\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"nm2\",\n", + " \"t\",\n", + " \"nm2\"\n", + " ]\n", + " \n", "ssm_config = {\n", " \"d_state\": 16,\n", " \"d_xb\": 1024,\n", @@ -1603,27 +1758,27 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 28, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "['t', 't', 'nm2', 'nm2', 't']" + "50" ] }, - "execution_count": 6, + "execution_count": 28, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "config_hybrid.hybrid_block_layout" + "len(config_hybrid.hybrid_block_layout)" ] }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 29, "metadata": {}, "outputs": [], "source": [ @@ -1701,9 +1856,16 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 30, "metadata": {}, "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 0%| | 0/7 [00:00 Date: Tue, 26 Aug 2025 11:25:49 +0000 Subject: [PATCH 17/18] rms norm --- fast_llm/layers/ssm/config.py | 2 +- fast_llm/layers/ssm/mamba2.py | 11 +++++++---- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/fast_llm/layers/ssm/config.py b/fast_llm/layers/ssm/config.py index 32dc27c1..f8d57799 100644 --- a/fast_llm/layers/ssm/config.py +++ b/fast_llm/layers/ssm/config.py @@ -331,7 +331,7 @@ def setup_tensor_space(self, tensor_space: TensorSpace, block_type: SSMBlockType # for the norm tensor_space.add_tensor_dim( TensorDim( - SSMDimNames.composite_heads_and_head_dim_nontp, head_groups.size * group_heads.size * head_dim.size + SSMDimNames.composite_heads_and_head_dim_nontp, num_head_groups * group_heads.size * head_dim.size ) ) # state and head dim are not the same diff --git a/fast_llm/layers/ssm/mamba2.py b/fast_llm/layers/ssm/mamba2.py index bd9cf7b4..f2735092 100644 --- a/fast_llm/layers/ssm/mamba2.py +++ b/fast_llm/layers/ssm/mamba2.py @@ -408,6 +408,7 @@ def init_(meta: ParameterMeta, tensor: torch.Tensor, generator: torch.Generator) sequence_parallel=self._sequence_parallel, lr_scale=lr_scale, ) + # TODO: this norm does nto support TP. So we need a workaround! self.norm = MambaRMSNormGated( inner_dim_non_tp, group_size=self._local_inner_size, @@ -506,12 +507,14 @@ def forward(self, input_: torch.Tensor, kwargs: dict[str, typing.Any]) -> tuple[ # y: (batch, local_heads * state, sequence) -> (batch, sequence, local_heads * state) y = y.view(batch, sequence_length, -1) - # gate norm - y = self.norm(y, gate=z) - if kwargs[TransformerKwargs.sequence_first]: # TODO: Is contiguous needed? y = y.transpose(0, 1).contiguous() + z = z.transpose(0, 1).contiguous() + # in tp need to to gather the y and z, cause norm does not + # gate norm + y = self.norm(y, gate=z) # (batch/sequence, sequence/batch, local_heads * state) # -> (batch/local_sequence, local_sequence/batch, hidden) - return self.out_proj(y) + out = self.out_proj(y) + return out From 7a047b4364d0d8dd4e490ec4a41c27a268418d42 Mon Sep 17 00:00:00 2001 From: oleksost Date: Tue, 26 Aug 2025 11:25:57 +0000 Subject: [PATCH 18/18] clean up --- .../make_hybrid_checkpoint_with_identity.py | 41 ---- ...brid_checkpoint_with_importance_15b_mil.py | 176 ------------------ .../make_hybrid_checkpoint_with_mil.py | 104 ----------- 3 files changed, 321 deletions(-) delete mode 100644 fast_llm/models/ssm/external/make_hybrid_checkpoint_with_identity.py delete mode 100644 fast_llm/models/ssm/external/make_hybrid_checkpoint_with_importance_15b_mil.py delete mode 100644 fast_llm/models/ssm/external/make_hybrid_checkpoint_with_mil.py diff --git a/fast_llm/models/ssm/external/make_hybrid_checkpoint_with_identity.py b/fast_llm/models/ssm/external/make_hybrid_checkpoint_with_identity.py deleted file mode 100644 index a0616ab6..00000000 --- a/fast_llm/models/ssm/external/make_hybrid_checkpoint_with_identity.py +++ /dev/null @@ -1,41 +0,0 @@ -import gc - -import click -import torch -from transformers import AutoConfig, AutoModelForCausalLM - -from fast_llm.models.ssm.external.apriel_15b_hybrid.configuration_ssm_hybrid_apriel15b import AprielSSMHybridConfig -from fast_llm.models.ssm.external.apriel_15b_hybrid.modeling_ssm_hybrid_apriel15b import AprielSSMHybridForCausalLM - -device = "cuda" if torch.cuda.is_available() else "cpu" - - -@click.command() -@click.option("--identity_index", type=int, required=True) -@click.option("--save_dir", type=str, required=True) -def main(identity_index: int, save_dir: str): - checkpoint = "ServiceNow-AI/Apriel-Nemotron-15b-Thinker" - config = AutoConfig.from_pretrained(checkpoint, trust_remote_code=True) - - hybrid_block_layout = ["t"] * config.num_hidden_layers - if identity_index >= 0: - hybrid_block_layout[identity_index] = "i" - - hybrdif_apriel_config = AprielSSMHybridConfig(**config.to_dict(), hybrid_block_layout=hybrid_block_layout) - hybrid_apriel_model = AprielSSMHybridForCausalLM(hybrdif_apriel_config) - hybrid_apriel_model.to(dtype=torch.bfloat16).to(device) - - apriel_model = AutoModelForCausalLM.from_pretrained(checkpoint, torch_dtype=torch.bfloat16, trust_remote_code=True) - apriel_state_dict = apriel_model.state_dict() - hybrid_apriel_model.load_state_dict(apriel_state_dict, strict=False) - - hybrid_apriel_model.save_pretrained(save_dir, save_config=True) - torch.cuda.empty_cache() - del hybrid_apriel_model - del apriel_model - del apriel_state_dict - gc.collect() - - -if __name__ == "__main__": - main() diff --git a/fast_llm/models/ssm/external/make_hybrid_checkpoint_with_importance_15b_mil.py b/fast_llm/models/ssm/external/make_hybrid_checkpoint_with_importance_15b_mil.py deleted file mode 100644 index dde11cfb..00000000 --- a/fast_llm/models/ssm/external/make_hybrid_checkpoint_with_importance_15b_mil.py +++ /dev/null @@ -1,176 +0,0 @@ -import click -import torch -import transformers -from transformers import AutoConfig, AutoModelForCausalLM - -from fast_llm.models.ssm.external.apriel_15b_hybrid.configuration_ssm_hybrid_apriel15b import AprielSSMHybridConfig -from fast_llm.models.ssm.external.apriel_15b_hybrid.modeling_ssm_hybrid_apriel15b import ( - AprielSSMM2DecoderLayer, - AprielThinkerSSMHybridForCausalLM, -) - -device = "cuda" if torch.cuda.is_available() else "cpu" - -print("Transformers version:", transformers.__version__) - - -def convert_layers(transformer, mamba_config, hybrid_block_layout, init_with_kqvo, torch_dtype): - - for layer_idx, type in enumerate(hybrid_block_layout): - # print("Converting layer %d...", layer_idx) - # Fetch the layer module for easier access - layer_module = transformer.model.layers._modules[f"{layer_idx}"] - if type == "t": - print("Skipping transformer layer %d..." % layer_idx) - elif type == "m2": - print("Converting layer %d to Mamba2 with MIL init..." % layer_idx) - # Use MambaDecoderLayer for the remaining layers - mamba_encoder = AprielSSMM2DecoderLayer( - mamba_config, - layer_idx, - device="cpu", - dtype=torch_dtype, - ) - - mamba_encoder.mlp.load_state_dict(layer_module.mlp.state_dict()) - mamba_encoder.input_layernorm.load_state_dict(layer_module.input_layernorm.state_dict()) - mamba_encoder.post_attention_layernorm.load_state_dict(layer_module.post_attention_layernorm.state_dict()) - mamba_encoder.mixer.out_proj.load_state_dict(layer_module.self_attn.o_proj.state_dict()) - - if init_with_kqvo: - # Copy weights: [z, x, B, C, dt], x -> v, B -> k, C -> q - mamba_encoder.mixer.in_proj.weight.data[ - mamba_config.ssm_cfg["d_inner"] : mamba_config.ssm_cfg["d_inner"] + mamba_config.ssm_cfg["d_xb"], : - ].copy_(layer_module.self_attn.v_proj.weight.data) - mamba_encoder.mixer.in_proj.weight.data[ - mamba_config.ssm_cfg["d_inner"] - + mamba_config.ssm_cfg["d_xb"] : mamba_config.ssm_cfg["d_inner"] - + 2 * mamba_config.ssm_cfg["d_xb"], - :, - ].copy_(layer_module.self_attn.k_proj.weight.data) - mamba_encoder.mixer.in_proj.weight.data[ - mamba_config.ssm_cfg["d_inner"] - + 2 * mamba_config.ssm_cfg["d_xb"] : 2 * mamba_config.ssm_cfg["d_inner"] - + 2 * mamba_config.ssm_cfg["d_xb"], - :, - ].copy_(layer_module.self_attn.q_proj.weight.data) - - print("Init Mamba using Attention") - - transformer.model.layers[layer_idx] = mamba_encoder - - elif type == "m2d": - raise NotImplementedError("Discrete Mamba2 not implemented") - else: - raise ValueError(f"Invalid layer type: {type}") - - -@click.command() -@click.option("--index_to_swap", type=int, required=True) -@click.option("--checkpoint", type=str, required=True) -@click.option("--output_model_path", type=str, required=True) -@click.option("--layer_type", type=str, default="m2") -@click.option("--mil_init", type=bool, default=True) -def main( - index_to_swap: int, - checkpoint=None, - output_model_path="/mnt/checkpoints/ssm/iterative_hybrids_15b_rkl_m2/apriel_ssm_thinker_15b_hybrid", - layer_type="m2", - mil_init=True, -): - print(f"index_to_swap: {index_to_swap}, checkpoint: {checkpoint}") - - layer_importance = [ - 47, - 39, - 24, - 36, - 31, - 43, - 32, - 20, - 38, - 37, - 30, - 33, - 22, - 23, - 40, - 42, - 44, - 35, - 41, - 27, - 21, - 46, - 45, - 49, - 25, - 34, - 29, - 28, - 19, - 26, - 18, - 17, - 16, - 13, - 15, - 14, - 8, - 9, - 12, - 6, - 11, - 5, - 48, - 7, - 10, - 3, - 4, - 1, - 0, - ] - path_base = "/mnt/checkpoints/upstream/Apriel-Nemotron-15b-Thinker" - config_base = AutoConfig.from_pretrained(path_base) - hybrid_block_layout = ["t"] * config_base.num_hidden_layers - - for i in range(index_to_swap + 1): - layer_idx = int(layer_importance[i]) - print(f"Swapping layer {layer_idx} to {layer_type}") - hybrid_block_layout[layer_idx] = layer_type - - transformer = AutoModelForCausalLM.from_pretrained(path_base) - model_hybrid_prev = AprielThinkerSSMHybridForCausalLM.from_pretrained(checkpoint, trust_remote_code=True).to( - torch.bfloat16 - ) - config_hybrid = AprielSSMHybridConfig(**model_hybrid_prev.config.to_dict()) - config_hybrid.hybrid_block_layout = hybrid_block_layout - convert_layers(transformer, config_hybrid, hybrid_block_layout, mil_init, torch.bfloat16) - - missing, unexpected = transformer.load_state_dict( - model_hybrid_prev.state_dict(), strict=False - ) # will not load the newly innitialized layer (will stay MIL), but will overwrite previous layers - if missing: - print("Missing keys:", missing) - if unexpected: - print("Unexpected keys:", unexpected) - transformer.to(torch.bfloat16) - model_hybrid_prev = None - print(transformer) - model_hybrid = AprielThinkerSSMHybridForCausalLM(config_hybrid) - missing, unexpected = model_hybrid.load_state_dict(transformer.state_dict()) - assert len(missing) == 0, "Missing keys: " + str(missing) - assert len(unexpected) == 0, "Unexpected keys: " + str(unexpected) - - model_hybrid.save_pretrained(f"{output_model_path}") - # config_hybrid.save_pretrained(f"{output_model_path}") - - -if __name__ == "__main__": - main() - # main( - # index_to_swap=1, - # checkpoint="/mnt/checkpoints/fast_llm_exp/slam_ssm_distill/15b-ihyb1lrklm216mil-bs768-lr0.0003-lrs0-0-0-0-sl4096_ti1000_lm2/export/apriel_ssm_thinker_hybrid/1000", - # layer_type="m2", - # ) diff --git a/fast_llm/models/ssm/external/make_hybrid_checkpoint_with_mil.py b/fast_llm/models/ssm/external/make_hybrid_checkpoint_with_mil.py deleted file mode 100644 index d50a45fa..00000000 --- a/fast_llm/models/ssm/external/make_hybrid_checkpoint_with_mil.py +++ /dev/null @@ -1,104 +0,0 @@ -import gc - -import click -import torch -from transformers import AutoModelForCausalLM - -from fast_llm.models.ssm.external.apriel_15b_hybrid.configuration_ssm_hybrid_apriel15b import AprielSSMHybridConfig -from fast_llm.models.ssm.external.apriel_15b_hybrid.modeling_ssm_hybrid_apriel15b import ( - AprielSSMM2DecoderLayer, - AprielThinkerSSMHybridForCausalLM, -) - -device = "cuda" if torch.cuda.is_available() else "cpu" - - -def convert_layers(transformer, mamba_config, hybrid_block_layout, init_with_kqvo, torch_dtype=torch.bfloat16): - config = transformer.config - embed_dim = config.hidden_size - num_heads = config.num_attention_heads - num_heads_kv = config.num_key_value_heads - head_dim = embed_dim // num_heads - head_dim * num_heads - head_dim * num_heads_kv - - for layer_idx, type in enumerate(hybrid_block_layout): - print("Converting layer %d...", layer_idx) - # Fetch the layer module for easier access - layer_module = transformer.model.layers._modules[f"{layer_idx}"] - if type == "t": - print("Skipping transformer layer %d..." % layer_idx) - elif type == "m2": - print("Converting layer %d..." % layer_idx) - # Use MambaDecoderLayer for the remaining layers - mamba_encoder = AprielSSMM2DecoderLayer( - mamba_config, - layer_idx, - device="cpu", - dtype=torch_dtype, - ) - - mamba_encoder.mlp.load_state_dict(layer_module.mlp.state_dict()) - mamba_encoder.input_layernorm.load_state_dict(layer_module.input_layernorm.state_dict()) - mamba_encoder.post_attention_layernorm.load_state_dict(layer_module.post_attention_layernorm.state_dict()) - mamba_encoder.mixer.out_proj.load_state_dict(layer_module.self_attn.o_proj.state_dict()) - - if init_with_kqvo: - # Copy weights: [z, x, B, C, dt], x -> v, B -> k, C -> q - mamba_encoder.mixer.in_proj.weight.data[ - mamba_config.ssm_cfg["d_inner"] : mamba_config.ssm_cfg["d_inner"] + mamba_config.ssm_cfg["d_xb"], : - ].copy_(layer_module.self_attn.v_proj.weight.data) - mamba_encoder.mixer.in_proj.weight.data[ - mamba_config.ssm_cfg["d_inner"] - + mamba_config.ssm_cfg["d_xb"] : mamba_config.ssm_cfg["d_inner"] - + 2 * mamba_config.ssm_cfg["d_xb"], - :, - ].copy_(layer_module.self_attn.k_proj.weight.data) - mamba_encoder.mixer.in_proj.weight.data[ - mamba_config.ssm_cfg["d_inner"] - + 2 * mamba_config.ssm_cfg["d_xb"] : 2 * mamba_config.ssm_cfg["d_inner"] - + 2 * mamba_config.ssm_cfg["d_xb"], - :, - ].copy_(layer_module.self_attn.q_proj.weight.data) - - print("Init Mamba using Attention") - - transformer.model.layers[layer_idx] = mamba_encoder - - else: - raise ValueError(f"Invalid layer type: {type}") - - -@click.command() -@click.option("--m2_index", type=int, required=True) -@click.option("--hybrid_checkpoint", type=str, required=True) -@click.option("--save_dir", type=str, required=True) -def main(m2_index: int, hybrid_checkpoint: str, save_dir: str): - path_base = "/mnt/checkpoints/upstream/Apriel-Nemotron-15b-Thinker" - transformer = AutoModelForCausalLM.from_pretrained(path_base, trust_remote_code=True) - hybrid_config = AprielSSMHybridConfig.from_pretrained(hybrid_checkpoint) - - hybrid_block_layout = hybrid_config.hybrid_block_layout - hybrid_block_layout[m2_index] = "m2" - print(hybrid_block_layout) - - convert_layers(transformer, hybrid_config, hybrid_block_layout, True, torch.bfloat16) - hybrid_config.ssm_cfg["activation"] = "silu" - - # load all existing ssm layers - hybrid_model = AprielThinkerSSMHybridForCausalLM.from_pretrained(hybrid_checkpoint) - state_dict = hybrid_model.state_dict() - missing, unexpected = transformer.load_state_dict(state_dict, strict=False) - assert f"model.layers.{m2_index}.mixer.A_log" in missing - assert f"model.layers.{m2_index}.self_attn.q_proj.weight" in unexpected - print(missing) - print(unexpected) - transformer.save_pretrained(save_dir) - - hybrid_config.save_pretrained(save_dir) - - gc.collect() - - -if __name__ == "__main__": - main()